pemix09 commited on
Commit
4721322
·
verified ·
1 Parent(s): a6d6f5c

Upload folder using huggingface_hub

Browse files
Files changed (43) hide show
  1. .DS_Store +0 -0
  2. .gitattributes +1 -0
  3. compare_model_before_after_conversion_to_tflite.py +130 -0
  4. convert_to_tflite.py +56 -0
  5. models/.DS_Store +0 -0
  6. models/flan_t5_custom/config.json +61 -0
  7. models/flan_t5_custom/generation_config.json +9 -0
  8. models/flan_t5_custom/model.safetensors +3 -0
  9. models/flan_t5_custom/special_tokens_map.json +125 -0
  10. models/flan_t5_custom/spiece.model +3 -0
  11. models/flan_t5_custom/tokenizer.json +0 -0
  12. models/flan_t5_custom/tokenizer_config.json +947 -0
  13. models/summarizer.tflite +3 -0
  14. scans_to_verify_summary/SwiadectwoMaturalne1.jpg +3 -0
  15. summarizer_learn.py +121 -0
  16. tmp_results/.DS_Store +0 -0
  17. tmp_results/checkpoint-3000/config.json +61 -0
  18. tmp_results/checkpoint-3000/generation_config.json +9 -0
  19. tmp_results/checkpoint-3000/model.safetensors +3 -0
  20. tmp_results/checkpoint-3000/optimizer.pt +3 -0
  21. tmp_results/checkpoint-3000/rng_state.pth +3 -0
  22. tmp_results/checkpoint-3000/scheduler.pt +3 -0
  23. tmp_results/checkpoint-3000/special_tokens_map.json +125 -0
  24. tmp_results/checkpoint-3000/spiece.model +3 -0
  25. tmp_results/checkpoint-3000/tokenizer.json +0 -0
  26. tmp_results/checkpoint-3000/tokenizer_config.json +940 -0
  27. tmp_results/checkpoint-3000/trainer_state.json +2238 -0
  28. tmp_results/checkpoint-3000/training_args.bin +3 -0
  29. tmp_results/checkpoint-3405/config.json +61 -0
  30. tmp_results/checkpoint-3405/generation_config.json +9 -0
  31. tmp_results/checkpoint-3405/model.safetensors +3 -0
  32. tmp_results/checkpoint-3405/optimizer.pt +3 -0
  33. tmp_results/checkpoint-3405/rng_state.pth +3 -0
  34. tmp_results/checkpoint-3405/scheduler.pt +3 -0
  35. tmp_results/checkpoint-3405/special_tokens_map.json +125 -0
  36. tmp_results/checkpoint-3405/spiece.model +3 -0
  37. tmp_results/checkpoint-3405/tokenizer.json +0 -0
  38. tmp_results/checkpoint-3405/tokenizer_config.json +940 -0
  39. tmp_results/checkpoint-3405/trainer_state.json +2526 -0
  40. tmp_results/checkpoint-3405/training_args.bin +3 -0
  41. tmp_results/runs/Jan03_12-06-34_macbook-16.local/events.out.tfevents.1767438395.macbook-16.local.99236.0 +3 -0
  42. verify_converted_to_tflite.py +100 -0
  43. verify_summarizer_before_converting_to_tflite.py +124 -0
.DS_Store ADDED
Binary file (6.15 kB). View file
 
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ scans_to_verify_summary/SwiadectwoMaturalne1.jpg filter=lfs diff=lfs merge=lfs -text
compare_model_before_after_conversion_to_tflite.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
3
+ import numpy as np
4
+ import tensorflow as tf
5
+ import pytesseract
6
+ from pathlib import Path
7
+ from PIL import Image
8
+ from pdf2image import convert_from_path
9
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
10
+
11
+ # --- KONFIGURACJA ---
12
+ pytesseract.pytesseract.tesseract_cmd = r'/opt/homebrew/bin/tesseract'
13
+
14
+ SUMMARIZER_DIR = Path(__file__).resolve().parent
15
+ BASE_DIR = SUMMARIZER_DIR.parent
16
+ PT_MODEL_PATH = SUMMARIZER_DIR / "models" / "flan_t5_custom"
17
+ TFLITE_MODEL_PATH = SUMMARIZER_DIR / "models" / "summarizer.tflite"
18
+ VERIFY_DIR = SUMMARIZER_DIR / "scans_to_verify_summary"
19
+
20
+ MAX_LEN = 256 # Musi być zgodne z ostatnią konwersją
21
+ device = "mps" if torch.backends.mps.is_available() else "cpu"
22
+
23
+
24
+ # --- ŁADOWANIE ---
25
+
26
+ def load_pt_model():
27
+ print(f"🚀 Ładowanie modelu PyTorch z: {PT_MODEL_PATH}")
28
+ tokenizer = AutoTokenizer.from_pretrained(PT_MODEL_PATH)
29
+ model = AutoModelForSeq2SeqLM.from_pretrained(PT_MODEL_PATH).to(device)
30
+ return tokenizer, model
31
+
32
+
33
+ def load_tflite_model():
34
+ print(f"🚀 Ładowanie modelu TFLite z: {TFLITE_MODEL_PATH}")
35
+ interpreter = tf.lite.Interpreter(model_path=str(TFLITE_MODEL_PATH))
36
+ interpreter.allocate_tensors()
37
+ return interpreter
38
+
39
+
40
+ # --- GENEROWANIE ---
41
+
42
+ def generate_pytorch(prompt, tokenizer, model):
43
+ inputs = tokenizer(prompt, return_tensors="pt", max_length=MAX_LEN, truncation=True).to(device)
44
+ outputs = model.generate(**inputs, max_new_tokens=128, num_beams=1, do_sample=False) # Greedy dla porównania
45
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
46
+
47
+
48
+ def generate_tflite(prompt, interpreter, tokenizer):
49
+ input_ids = tokenizer.encode(prompt, max_length=MAX_LEN, truncation=True, padding="max_length")
50
+ input_ids = np.array([input_ids], dtype=np.int32)
51
+ decoder_input_ids = np.zeros((1, MAX_LEN), dtype=np.int32)
52
+ output_tokens = [0]
53
+
54
+ input_details = interpreter.get_input_details()
55
+ output_details = interpreter.get_output_details()
56
+
57
+ for i in range(MAX_LEN - 1):
58
+ for j, token in enumerate(output_tokens):
59
+ decoder_input_ids[0, j] = token
60
+
61
+ # Dopasowanie tensorów po nazwach
62
+ for detail in input_details:
63
+ if "input_ids" in detail['name'] and "decoder" not in detail['name']:
64
+ interpreter.set_tensor(detail['index'], input_ids)
65
+ elif "decoder_input_ids" in detail['name']:
66
+ interpreter.set_tensor(detail['index'], decoder_input_ids)
67
+
68
+ interpreter.invoke()
69
+ output_data = interpreter.get_tensor(output_details[0]['index'])
70
+
71
+ # Pobieramy logity dla aktualnej pozycji
72
+ next_token_logits = output_data[0, len(output_tokens) - 1, :]
73
+ next_token = int(np.argmax(next_token_logits))
74
+
75
+ if next_token == 1: break
76
+ output_tokens.append(next_token)
77
+ if len(output_tokens) >= 128: break # Limit bezpieczeństwa
78
+
79
+ return tokenizer.decode(output_tokens, skip_special_tokens=True)
80
+
81
+
82
+ # --- OCR ---
83
+
84
+ def perform_ocr(file_path):
85
+ try:
86
+ if file_path.suffix.lower() == ".pdf":
87
+ pages = convert_from_path(file_path)
88
+ return "".join([pytesseract.image_to_string(p, lang='pol+eng') for p in pages])
89
+ return pytesseract.image_to_string(Image.open(file_path), lang='pol+eng')
90
+ except Exception as e:
91
+ return f"Błąd OCR: {e}"
92
+
93
+
94
+ # --- MAIN ---
95
+
96
+ def main():
97
+ tokenizer, pt_model = load_pt_model()
98
+ tflite_interpreter = load_tflite_model()
99
+
100
+ files = [f for f in VERIFY_DIR.glob("*") if f.suffix.lower() in [".jpg", ".jpeg", ".png", ".pdf"]]
101
+ if not files:
102
+ print(f"ℹ️ Brak plików w {VERIFY_DIR}")
103
+ return
104
+
105
+ for file_path in files:
106
+ print(f"\n" + "█" * 60)
107
+ print(f"📄 PLIK: {file_path.name}")
108
+ ocr_text = perform_ocr(file_path).strip()
109
+
110
+ for task in ["headline", "summarize"]:
111
+ prompt = f"{task}: {ocr_text}"
112
+ print(f"\n🔍 ZADANIE: {task.upper()}")
113
+
114
+ # Wynik PyTorch
115
+ pt_res = generate_pytorch(prompt, tokenizer, pt_model)
116
+ # Wynik TFLite
117
+ tfl_res = generate_tflite(prompt, tflite_interpreter, tokenizer)
118
+
119
+ print(f"{'PyTorch:':<10} {pt_res}")
120
+ print(f"{'TFLite:':<10} {tfl_res}")
121
+
122
+ # Prosta weryfikacja zgodności
123
+ if pt_res.strip() == tfl_res.strip():
124
+ print("✅ ZGODNOŚĆ: 100%")
125
+ else:
126
+ print("⚠️ ROZBIEŻNOŚĆ WYKRYTA")
127
+
128
+
129
+ if __name__ == "__main__":
130
+ main()
convert_to_tflite.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import tensorflow as tf
3
+ from transformers import TFT5ForConditionalGeneration, AutoTokenizer
4
+ from pathlib import Path
5
+
6
+ BASE_DIR = Path(__file__).resolve().parent.parent
7
+ MODEL_INPUT_DIR = BASE_DIR / "summarizer" / "models" / "flan_t5_custom"
8
+ TFLITE_OUTPUT_FILE = BASE_DIR / "summarizer" / "models" / "summarizer.tflite"
9
+
10
+ # USTAWAMY IDENTYCZNE WARTOŚCI - to rozwiązuje błąd "not broadcastable"
11
+ MAX_LEN = 256
12
+
13
+
14
+ def convert():
15
+ print(f"🚀 Konwersja z wyrównaniem kształtów do {MAX_LEN}...")
16
+
17
+ model = TFT5ForConditionalGeneration.from_pretrained(MODEL_INPUT_DIR, from_pt=True)
18
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_INPUT_DIR)
19
+
20
+ class T5MergedModel(tf.Module):
21
+ def __init__(self, model):
22
+ super(T5MergedModel, self).__init__()
23
+ self.model = model
24
+
25
+ @tf.function(input_signature=[
26
+ tf.TensorSpec([1, MAX_LEN], tf.int32, name="input_ids"),
27
+ tf.TensorSpec([1, MAX_LEN], tf.int32, name="decoder_input_ids")
28
+ ])
29
+ def __call__(self, input_ids, decoder_input_ids):
30
+ # training=False jest kluczowe dla usunięcia węzłów treningowych
31
+ output = self.model(input_ids=input_ids, decoder_input_ids=decoder_input_ids, training=False)
32
+ return output.logits
33
+
34
+ t5_module = T5MergedModel(model)
35
+ converter = tf.lite.TFLiteConverter.from_concrete_functions(
36
+ [t5_module.__call__.get_concrete_function()], t5_module
37
+ )
38
+
39
+ converter.target_spec.supported_ops = [
40
+ tf.lite.OpsSet.TFLITE_BUILTINS,
41
+ tf.lite.OpsSet.SELECT_TF_OPS
42
+ ]
43
+
44
+ # Optymalizacja pod kątem rozmiaru i stabilności
45
+ converter.optimizations = [tf.lite.Optimize.DEFAULT]
46
+ converter.target_spec.supported_types = [tf.float32]
47
+
48
+ tflite_model = converter.convert()
49
+ with open(TFLITE_OUTPUT_FILE, "wb") as f:
50
+ f.write(tflite_model)
51
+
52
+ print(f"✨ Model gotowy: {TFLITE_OUTPUT_FILE}")
53
+
54
+
55
+ if __name__ == "__main__":
56
+ convert()
models/.DS_Store ADDED
Binary file (6.15 kB). View file
 
models/flan_t5_custom/config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "T5ForConditionalGeneration"
4
+ ],
5
+ "classifier_dropout": 0.0,
6
+ "d_ff": 1024,
7
+ "d_kv": 64,
8
+ "d_model": 512,
9
+ "decoder_start_token_id": 0,
10
+ "dense_act_fn": "gelu_new",
11
+ "dropout_rate": 0.1,
12
+ "dtype": "float32",
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": true,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "model_type": "t5",
20
+ "n_positions": 512,
21
+ "num_decoder_layers": 8,
22
+ "num_heads": 6,
23
+ "num_layers": 8,
24
+ "output_past": true,
25
+ "pad_token_id": 0,
26
+ "relative_attention_max_distance": 128,
27
+ "relative_attention_num_buckets": 32,
28
+ "task_specific_params": {
29
+ "summarization": {
30
+ "early_stopping": true,
31
+ "length_penalty": 2.0,
32
+ "max_length": 200,
33
+ "min_length": 30,
34
+ "no_repeat_ngram_size": 3,
35
+ "num_beams": 4,
36
+ "prefix": "summarize: "
37
+ },
38
+ "translation_en_to_de": {
39
+ "early_stopping": true,
40
+ "max_length": 300,
41
+ "num_beams": 4,
42
+ "prefix": "translate English to German: "
43
+ },
44
+ "translation_en_to_fr": {
45
+ "early_stopping": true,
46
+ "max_length": 300,
47
+ "num_beams": 4,
48
+ "prefix": "translate English to French: "
49
+ },
50
+ "translation_en_to_ro": {
51
+ "early_stopping": true,
52
+ "max_length": 300,
53
+ "num_beams": 4,
54
+ "prefix": "translate English to Romanian: "
55
+ }
56
+ },
57
+ "tie_word_embeddings": false,
58
+ "transformers_version": "4.57.3",
59
+ "use_cache": true,
60
+ "vocab_size": 32128
61
+ }
models/flan_t5_custom/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "decoder_start_token_id": 0,
4
+ "eos_token_id": [
5
+ 1
6
+ ],
7
+ "pad_token_id": 0,
8
+ "transformers_version": "4.57.3"
9
+ }
models/flan_t5_custom/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b3a2db3625722dbe2a012701d1a2671dcc1f6a501e40cb8ccce8b33aba70610
3
+ size 307867048
models/flan_t5_custom/special_tokens_map.json ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<extra_id_0>",
4
+ "<extra_id_1>",
5
+ "<extra_id_2>",
6
+ "<extra_id_3>",
7
+ "<extra_id_4>",
8
+ "<extra_id_5>",
9
+ "<extra_id_6>",
10
+ "<extra_id_7>",
11
+ "<extra_id_8>",
12
+ "<extra_id_9>",
13
+ "<extra_id_10>",
14
+ "<extra_id_11>",
15
+ "<extra_id_12>",
16
+ "<extra_id_13>",
17
+ "<extra_id_14>",
18
+ "<extra_id_15>",
19
+ "<extra_id_16>",
20
+ "<extra_id_17>",
21
+ "<extra_id_18>",
22
+ "<extra_id_19>",
23
+ "<extra_id_20>",
24
+ "<extra_id_21>",
25
+ "<extra_id_22>",
26
+ "<extra_id_23>",
27
+ "<extra_id_24>",
28
+ "<extra_id_25>",
29
+ "<extra_id_26>",
30
+ "<extra_id_27>",
31
+ "<extra_id_28>",
32
+ "<extra_id_29>",
33
+ "<extra_id_30>",
34
+ "<extra_id_31>",
35
+ "<extra_id_32>",
36
+ "<extra_id_33>",
37
+ "<extra_id_34>",
38
+ "<extra_id_35>",
39
+ "<extra_id_36>",
40
+ "<extra_id_37>",
41
+ "<extra_id_38>",
42
+ "<extra_id_39>",
43
+ "<extra_id_40>",
44
+ "<extra_id_41>",
45
+ "<extra_id_42>",
46
+ "<extra_id_43>",
47
+ "<extra_id_44>",
48
+ "<extra_id_45>",
49
+ "<extra_id_46>",
50
+ "<extra_id_47>",
51
+ "<extra_id_48>",
52
+ "<extra_id_49>",
53
+ "<extra_id_50>",
54
+ "<extra_id_51>",
55
+ "<extra_id_52>",
56
+ "<extra_id_53>",
57
+ "<extra_id_54>",
58
+ "<extra_id_55>",
59
+ "<extra_id_56>",
60
+ "<extra_id_57>",
61
+ "<extra_id_58>",
62
+ "<extra_id_59>",
63
+ "<extra_id_60>",
64
+ "<extra_id_61>",
65
+ "<extra_id_62>",
66
+ "<extra_id_63>",
67
+ "<extra_id_64>",
68
+ "<extra_id_65>",
69
+ "<extra_id_66>",
70
+ "<extra_id_67>",
71
+ "<extra_id_68>",
72
+ "<extra_id_69>",
73
+ "<extra_id_70>",
74
+ "<extra_id_71>",
75
+ "<extra_id_72>",
76
+ "<extra_id_73>",
77
+ "<extra_id_74>",
78
+ "<extra_id_75>",
79
+ "<extra_id_76>",
80
+ "<extra_id_77>",
81
+ "<extra_id_78>",
82
+ "<extra_id_79>",
83
+ "<extra_id_80>",
84
+ "<extra_id_81>",
85
+ "<extra_id_82>",
86
+ "<extra_id_83>",
87
+ "<extra_id_84>",
88
+ "<extra_id_85>",
89
+ "<extra_id_86>",
90
+ "<extra_id_87>",
91
+ "<extra_id_88>",
92
+ "<extra_id_89>",
93
+ "<extra_id_90>",
94
+ "<extra_id_91>",
95
+ "<extra_id_92>",
96
+ "<extra_id_93>",
97
+ "<extra_id_94>",
98
+ "<extra_id_95>",
99
+ "<extra_id_96>",
100
+ "<extra_id_97>",
101
+ "<extra_id_98>",
102
+ "<extra_id_99>"
103
+ ],
104
+ "eos_token": {
105
+ "content": "</s>",
106
+ "lstrip": false,
107
+ "normalized": false,
108
+ "rstrip": false,
109
+ "single_word": false
110
+ },
111
+ "pad_token": {
112
+ "content": "<pad>",
113
+ "lstrip": false,
114
+ "normalized": false,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "unk_token": {
119
+ "content": "<unk>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ }
125
+ }
models/flan_t5_custom/spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
3
+ size 791656
models/flan_t5_custom/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
models/flan_t5_custom/tokenizer_config.json ADDED
@@ -0,0 +1,947 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": null,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<pad>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "</s>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<unk>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "32000": {
29
+ "content": "<extra_id_99>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "32001": {
37
+ "content": "<extra_id_98>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "32002": {
45
+ "content": "<extra_id_97>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "32003": {
53
+ "content": "<extra_id_96>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "32004": {
61
+ "content": "<extra_id_95>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "32005": {
69
+ "content": "<extra_id_94>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "32006": {
77
+ "content": "<extra_id_93>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "32007": {
85
+ "content": "<extra_id_92>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "32008": {
93
+ "content": "<extra_id_91>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "32009": {
101
+ "content": "<extra_id_90>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "32010": {
109
+ "content": "<extra_id_89>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "32011": {
117
+ "content": "<extra_id_88>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "32012": {
125
+ "content": "<extra_id_87>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "32013": {
133
+ "content": "<extra_id_86>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ },
140
+ "32014": {
141
+ "content": "<extra_id_85>",
142
+ "lstrip": false,
143
+ "normalized": false,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": true
147
+ },
148
+ "32015": {
149
+ "content": "<extra_id_84>",
150
+ "lstrip": false,
151
+ "normalized": false,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": true
155
+ },
156
+ "32016": {
157
+ "content": "<extra_id_83>",
158
+ "lstrip": false,
159
+ "normalized": false,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": true
163
+ },
164
+ "32017": {
165
+ "content": "<extra_id_82>",
166
+ "lstrip": false,
167
+ "normalized": false,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": true
171
+ },
172
+ "32018": {
173
+ "content": "<extra_id_81>",
174
+ "lstrip": false,
175
+ "normalized": false,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": true
179
+ },
180
+ "32019": {
181
+ "content": "<extra_id_80>",
182
+ "lstrip": false,
183
+ "normalized": false,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": true
187
+ },
188
+ "32020": {
189
+ "content": "<extra_id_79>",
190
+ "lstrip": false,
191
+ "normalized": false,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": true
195
+ },
196
+ "32021": {
197
+ "content": "<extra_id_78>",
198
+ "lstrip": false,
199
+ "normalized": false,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": true
203
+ },
204
+ "32022": {
205
+ "content": "<extra_id_77>",
206
+ "lstrip": false,
207
+ "normalized": false,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": true
211
+ },
212
+ "32023": {
213
+ "content": "<extra_id_76>",
214
+ "lstrip": false,
215
+ "normalized": false,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": true
219
+ },
220
+ "32024": {
221
+ "content": "<extra_id_75>",
222
+ "lstrip": false,
223
+ "normalized": false,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": true
227
+ },
228
+ "32025": {
229
+ "content": "<extra_id_74>",
230
+ "lstrip": false,
231
+ "normalized": false,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": true
235
+ },
236
+ "32026": {
237
+ "content": "<extra_id_73>",
238
+ "lstrip": false,
239
+ "normalized": false,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": true
243
+ },
244
+ "32027": {
245
+ "content": "<extra_id_72>",
246
+ "lstrip": false,
247
+ "normalized": false,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": true
251
+ },
252
+ "32028": {
253
+ "content": "<extra_id_71>",
254
+ "lstrip": false,
255
+ "normalized": false,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": true
259
+ },
260
+ "32029": {
261
+ "content": "<extra_id_70>",
262
+ "lstrip": false,
263
+ "normalized": false,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": true
267
+ },
268
+ "32030": {
269
+ "content": "<extra_id_69>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "32031": {
277
+ "content": "<extra_id_68>",
278
+ "lstrip": false,
279
+ "normalized": false,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "32032": {
285
+ "content": "<extra_id_67>",
286
+ "lstrip": false,
287
+ "normalized": false,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": true
291
+ },
292
+ "32033": {
293
+ "content": "<extra_id_66>",
294
+ "lstrip": false,
295
+ "normalized": false,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": true
299
+ },
300
+ "32034": {
301
+ "content": "<extra_id_65>",
302
+ "lstrip": false,
303
+ "normalized": false,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": true
307
+ },
308
+ "32035": {
309
+ "content": "<extra_id_64>",
310
+ "lstrip": false,
311
+ "normalized": false,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": true
315
+ },
316
+ "32036": {
317
+ "content": "<extra_id_63>",
318
+ "lstrip": false,
319
+ "normalized": false,
320
+ "rstrip": false,
321
+ "single_word": false,
322
+ "special": true
323
+ },
324
+ "32037": {
325
+ "content": "<extra_id_62>",
326
+ "lstrip": false,
327
+ "normalized": false,
328
+ "rstrip": false,
329
+ "single_word": false,
330
+ "special": true
331
+ },
332
+ "32038": {
333
+ "content": "<extra_id_61>",
334
+ "lstrip": false,
335
+ "normalized": false,
336
+ "rstrip": false,
337
+ "single_word": false,
338
+ "special": true
339
+ },
340
+ "32039": {
341
+ "content": "<extra_id_60>",
342
+ "lstrip": false,
343
+ "normalized": false,
344
+ "rstrip": false,
345
+ "single_word": false,
346
+ "special": true
347
+ },
348
+ "32040": {
349
+ "content": "<extra_id_59>",
350
+ "lstrip": false,
351
+ "normalized": false,
352
+ "rstrip": false,
353
+ "single_word": false,
354
+ "special": true
355
+ },
356
+ "32041": {
357
+ "content": "<extra_id_58>",
358
+ "lstrip": false,
359
+ "normalized": false,
360
+ "rstrip": false,
361
+ "single_word": false,
362
+ "special": true
363
+ },
364
+ "32042": {
365
+ "content": "<extra_id_57>",
366
+ "lstrip": false,
367
+ "normalized": false,
368
+ "rstrip": false,
369
+ "single_word": false,
370
+ "special": true
371
+ },
372
+ "32043": {
373
+ "content": "<extra_id_56>",
374
+ "lstrip": false,
375
+ "normalized": false,
376
+ "rstrip": false,
377
+ "single_word": false,
378
+ "special": true
379
+ },
380
+ "32044": {
381
+ "content": "<extra_id_55>",
382
+ "lstrip": false,
383
+ "normalized": false,
384
+ "rstrip": false,
385
+ "single_word": false,
386
+ "special": true
387
+ },
388
+ "32045": {
389
+ "content": "<extra_id_54>",
390
+ "lstrip": false,
391
+ "normalized": false,
392
+ "rstrip": false,
393
+ "single_word": false,
394
+ "special": true
395
+ },
396
+ "32046": {
397
+ "content": "<extra_id_53>",
398
+ "lstrip": false,
399
+ "normalized": false,
400
+ "rstrip": false,
401
+ "single_word": false,
402
+ "special": true
403
+ },
404
+ "32047": {
405
+ "content": "<extra_id_52>",
406
+ "lstrip": false,
407
+ "normalized": false,
408
+ "rstrip": false,
409
+ "single_word": false,
410
+ "special": true
411
+ },
412
+ "32048": {
413
+ "content": "<extra_id_51>",
414
+ "lstrip": false,
415
+ "normalized": false,
416
+ "rstrip": false,
417
+ "single_word": false,
418
+ "special": true
419
+ },
420
+ "32049": {
421
+ "content": "<extra_id_50>",
422
+ "lstrip": false,
423
+ "normalized": false,
424
+ "rstrip": false,
425
+ "single_word": false,
426
+ "special": true
427
+ },
428
+ "32050": {
429
+ "content": "<extra_id_49>",
430
+ "lstrip": false,
431
+ "normalized": false,
432
+ "rstrip": false,
433
+ "single_word": false,
434
+ "special": true
435
+ },
436
+ "32051": {
437
+ "content": "<extra_id_48>",
438
+ "lstrip": false,
439
+ "normalized": false,
440
+ "rstrip": false,
441
+ "single_word": false,
442
+ "special": true
443
+ },
444
+ "32052": {
445
+ "content": "<extra_id_47>",
446
+ "lstrip": false,
447
+ "normalized": false,
448
+ "rstrip": false,
449
+ "single_word": false,
450
+ "special": true
451
+ },
452
+ "32053": {
453
+ "content": "<extra_id_46>",
454
+ "lstrip": false,
455
+ "normalized": false,
456
+ "rstrip": false,
457
+ "single_word": false,
458
+ "special": true
459
+ },
460
+ "32054": {
461
+ "content": "<extra_id_45>",
462
+ "lstrip": false,
463
+ "normalized": false,
464
+ "rstrip": false,
465
+ "single_word": false,
466
+ "special": true
467
+ },
468
+ "32055": {
469
+ "content": "<extra_id_44>",
470
+ "lstrip": false,
471
+ "normalized": false,
472
+ "rstrip": false,
473
+ "single_word": false,
474
+ "special": true
475
+ },
476
+ "32056": {
477
+ "content": "<extra_id_43>",
478
+ "lstrip": false,
479
+ "normalized": false,
480
+ "rstrip": false,
481
+ "single_word": false,
482
+ "special": true
483
+ },
484
+ "32057": {
485
+ "content": "<extra_id_42>",
486
+ "lstrip": false,
487
+ "normalized": false,
488
+ "rstrip": false,
489
+ "single_word": false,
490
+ "special": true
491
+ },
492
+ "32058": {
493
+ "content": "<extra_id_41>",
494
+ "lstrip": false,
495
+ "normalized": false,
496
+ "rstrip": false,
497
+ "single_word": false,
498
+ "special": true
499
+ },
500
+ "32059": {
501
+ "content": "<extra_id_40>",
502
+ "lstrip": false,
503
+ "normalized": false,
504
+ "rstrip": false,
505
+ "single_word": false,
506
+ "special": true
507
+ },
508
+ "32060": {
509
+ "content": "<extra_id_39>",
510
+ "lstrip": false,
511
+ "normalized": false,
512
+ "rstrip": false,
513
+ "single_word": false,
514
+ "special": true
515
+ },
516
+ "32061": {
517
+ "content": "<extra_id_38>",
518
+ "lstrip": false,
519
+ "normalized": false,
520
+ "rstrip": false,
521
+ "single_word": false,
522
+ "special": true
523
+ },
524
+ "32062": {
525
+ "content": "<extra_id_37>",
526
+ "lstrip": false,
527
+ "normalized": false,
528
+ "rstrip": false,
529
+ "single_word": false,
530
+ "special": true
531
+ },
532
+ "32063": {
533
+ "content": "<extra_id_36>",
534
+ "lstrip": false,
535
+ "normalized": false,
536
+ "rstrip": false,
537
+ "single_word": false,
538
+ "special": true
539
+ },
540
+ "32064": {
541
+ "content": "<extra_id_35>",
542
+ "lstrip": false,
543
+ "normalized": false,
544
+ "rstrip": false,
545
+ "single_word": false,
546
+ "special": true
547
+ },
548
+ "32065": {
549
+ "content": "<extra_id_34>",
550
+ "lstrip": false,
551
+ "normalized": false,
552
+ "rstrip": false,
553
+ "single_word": false,
554
+ "special": true
555
+ },
556
+ "32066": {
557
+ "content": "<extra_id_33>",
558
+ "lstrip": false,
559
+ "normalized": false,
560
+ "rstrip": false,
561
+ "single_word": false,
562
+ "special": true
563
+ },
564
+ "32067": {
565
+ "content": "<extra_id_32>",
566
+ "lstrip": false,
567
+ "normalized": false,
568
+ "rstrip": false,
569
+ "single_word": false,
570
+ "special": true
571
+ },
572
+ "32068": {
573
+ "content": "<extra_id_31>",
574
+ "lstrip": false,
575
+ "normalized": false,
576
+ "rstrip": false,
577
+ "single_word": false,
578
+ "special": true
579
+ },
580
+ "32069": {
581
+ "content": "<extra_id_30>",
582
+ "lstrip": false,
583
+ "normalized": false,
584
+ "rstrip": false,
585
+ "single_word": false,
586
+ "special": true
587
+ },
588
+ "32070": {
589
+ "content": "<extra_id_29>",
590
+ "lstrip": false,
591
+ "normalized": false,
592
+ "rstrip": false,
593
+ "single_word": false,
594
+ "special": true
595
+ },
596
+ "32071": {
597
+ "content": "<extra_id_28>",
598
+ "lstrip": false,
599
+ "normalized": false,
600
+ "rstrip": false,
601
+ "single_word": false,
602
+ "special": true
603
+ },
604
+ "32072": {
605
+ "content": "<extra_id_27>",
606
+ "lstrip": false,
607
+ "normalized": false,
608
+ "rstrip": false,
609
+ "single_word": false,
610
+ "special": true
611
+ },
612
+ "32073": {
613
+ "content": "<extra_id_26>",
614
+ "lstrip": false,
615
+ "normalized": false,
616
+ "rstrip": false,
617
+ "single_word": false,
618
+ "special": true
619
+ },
620
+ "32074": {
621
+ "content": "<extra_id_25>",
622
+ "lstrip": false,
623
+ "normalized": false,
624
+ "rstrip": false,
625
+ "single_word": false,
626
+ "special": true
627
+ },
628
+ "32075": {
629
+ "content": "<extra_id_24>",
630
+ "lstrip": false,
631
+ "normalized": false,
632
+ "rstrip": false,
633
+ "single_word": false,
634
+ "special": true
635
+ },
636
+ "32076": {
637
+ "content": "<extra_id_23>",
638
+ "lstrip": false,
639
+ "normalized": false,
640
+ "rstrip": false,
641
+ "single_word": false,
642
+ "special": true
643
+ },
644
+ "32077": {
645
+ "content": "<extra_id_22>",
646
+ "lstrip": false,
647
+ "normalized": false,
648
+ "rstrip": false,
649
+ "single_word": false,
650
+ "special": true
651
+ },
652
+ "32078": {
653
+ "content": "<extra_id_21>",
654
+ "lstrip": false,
655
+ "normalized": false,
656
+ "rstrip": false,
657
+ "single_word": false,
658
+ "special": true
659
+ },
660
+ "32079": {
661
+ "content": "<extra_id_20>",
662
+ "lstrip": false,
663
+ "normalized": false,
664
+ "rstrip": false,
665
+ "single_word": false,
666
+ "special": true
667
+ },
668
+ "32080": {
669
+ "content": "<extra_id_19>",
670
+ "lstrip": false,
671
+ "normalized": false,
672
+ "rstrip": false,
673
+ "single_word": false,
674
+ "special": true
675
+ },
676
+ "32081": {
677
+ "content": "<extra_id_18>",
678
+ "lstrip": false,
679
+ "normalized": false,
680
+ "rstrip": false,
681
+ "single_word": false,
682
+ "special": true
683
+ },
684
+ "32082": {
685
+ "content": "<extra_id_17>",
686
+ "lstrip": false,
687
+ "normalized": false,
688
+ "rstrip": false,
689
+ "single_word": false,
690
+ "special": true
691
+ },
692
+ "32083": {
693
+ "content": "<extra_id_16>",
694
+ "lstrip": false,
695
+ "normalized": false,
696
+ "rstrip": false,
697
+ "single_word": false,
698
+ "special": true
699
+ },
700
+ "32084": {
701
+ "content": "<extra_id_15>",
702
+ "lstrip": false,
703
+ "normalized": false,
704
+ "rstrip": false,
705
+ "single_word": false,
706
+ "special": true
707
+ },
708
+ "32085": {
709
+ "content": "<extra_id_14>",
710
+ "lstrip": false,
711
+ "normalized": false,
712
+ "rstrip": false,
713
+ "single_word": false,
714
+ "special": true
715
+ },
716
+ "32086": {
717
+ "content": "<extra_id_13>",
718
+ "lstrip": false,
719
+ "normalized": false,
720
+ "rstrip": false,
721
+ "single_word": false,
722
+ "special": true
723
+ },
724
+ "32087": {
725
+ "content": "<extra_id_12>",
726
+ "lstrip": false,
727
+ "normalized": false,
728
+ "rstrip": false,
729
+ "single_word": false,
730
+ "special": true
731
+ },
732
+ "32088": {
733
+ "content": "<extra_id_11>",
734
+ "lstrip": false,
735
+ "normalized": false,
736
+ "rstrip": false,
737
+ "single_word": false,
738
+ "special": true
739
+ },
740
+ "32089": {
741
+ "content": "<extra_id_10>",
742
+ "lstrip": false,
743
+ "normalized": false,
744
+ "rstrip": false,
745
+ "single_word": false,
746
+ "special": true
747
+ },
748
+ "32090": {
749
+ "content": "<extra_id_9>",
750
+ "lstrip": false,
751
+ "normalized": false,
752
+ "rstrip": false,
753
+ "single_word": false,
754
+ "special": true
755
+ },
756
+ "32091": {
757
+ "content": "<extra_id_8>",
758
+ "lstrip": false,
759
+ "normalized": false,
760
+ "rstrip": false,
761
+ "single_word": false,
762
+ "special": true
763
+ },
764
+ "32092": {
765
+ "content": "<extra_id_7>",
766
+ "lstrip": false,
767
+ "normalized": false,
768
+ "rstrip": false,
769
+ "single_word": false,
770
+ "special": true
771
+ },
772
+ "32093": {
773
+ "content": "<extra_id_6>",
774
+ "lstrip": false,
775
+ "normalized": false,
776
+ "rstrip": false,
777
+ "single_word": false,
778
+ "special": true
779
+ },
780
+ "32094": {
781
+ "content": "<extra_id_5>",
782
+ "lstrip": false,
783
+ "normalized": false,
784
+ "rstrip": false,
785
+ "single_word": false,
786
+ "special": true
787
+ },
788
+ "32095": {
789
+ "content": "<extra_id_4>",
790
+ "lstrip": false,
791
+ "normalized": false,
792
+ "rstrip": false,
793
+ "single_word": false,
794
+ "special": true
795
+ },
796
+ "32096": {
797
+ "content": "<extra_id_3>",
798
+ "lstrip": false,
799
+ "normalized": false,
800
+ "rstrip": false,
801
+ "single_word": false,
802
+ "special": true
803
+ },
804
+ "32097": {
805
+ "content": "<extra_id_2>",
806
+ "lstrip": false,
807
+ "normalized": false,
808
+ "rstrip": false,
809
+ "single_word": false,
810
+ "special": true
811
+ },
812
+ "32098": {
813
+ "content": "<extra_id_1>",
814
+ "lstrip": false,
815
+ "normalized": false,
816
+ "rstrip": false,
817
+ "single_word": false,
818
+ "special": true
819
+ },
820
+ "32099": {
821
+ "content": "<extra_id_0>",
822
+ "lstrip": false,
823
+ "normalized": false,
824
+ "rstrip": false,
825
+ "single_word": false,
826
+ "special": true
827
+ }
828
+ },
829
+ "additional_special_tokens": [
830
+ "<extra_id_0>",
831
+ "<extra_id_1>",
832
+ "<extra_id_2>",
833
+ "<extra_id_3>",
834
+ "<extra_id_4>",
835
+ "<extra_id_5>",
836
+ "<extra_id_6>",
837
+ "<extra_id_7>",
838
+ "<extra_id_8>",
839
+ "<extra_id_9>",
840
+ "<extra_id_10>",
841
+ "<extra_id_11>",
842
+ "<extra_id_12>",
843
+ "<extra_id_13>",
844
+ "<extra_id_14>",
845
+ "<extra_id_15>",
846
+ "<extra_id_16>",
847
+ "<extra_id_17>",
848
+ "<extra_id_18>",
849
+ "<extra_id_19>",
850
+ "<extra_id_20>",
851
+ "<extra_id_21>",
852
+ "<extra_id_22>",
853
+ "<extra_id_23>",
854
+ "<extra_id_24>",
855
+ "<extra_id_25>",
856
+ "<extra_id_26>",
857
+ "<extra_id_27>",
858
+ "<extra_id_28>",
859
+ "<extra_id_29>",
860
+ "<extra_id_30>",
861
+ "<extra_id_31>",
862
+ "<extra_id_32>",
863
+ "<extra_id_33>",
864
+ "<extra_id_34>",
865
+ "<extra_id_35>",
866
+ "<extra_id_36>",
867
+ "<extra_id_37>",
868
+ "<extra_id_38>",
869
+ "<extra_id_39>",
870
+ "<extra_id_40>",
871
+ "<extra_id_41>",
872
+ "<extra_id_42>",
873
+ "<extra_id_43>",
874
+ "<extra_id_44>",
875
+ "<extra_id_45>",
876
+ "<extra_id_46>",
877
+ "<extra_id_47>",
878
+ "<extra_id_48>",
879
+ "<extra_id_49>",
880
+ "<extra_id_50>",
881
+ "<extra_id_51>",
882
+ "<extra_id_52>",
883
+ "<extra_id_53>",
884
+ "<extra_id_54>",
885
+ "<extra_id_55>",
886
+ "<extra_id_56>",
887
+ "<extra_id_57>",
888
+ "<extra_id_58>",
889
+ "<extra_id_59>",
890
+ "<extra_id_60>",
891
+ "<extra_id_61>",
892
+ "<extra_id_62>",
893
+ "<extra_id_63>",
894
+ "<extra_id_64>",
895
+ "<extra_id_65>",
896
+ "<extra_id_66>",
897
+ "<extra_id_67>",
898
+ "<extra_id_68>",
899
+ "<extra_id_69>",
900
+ "<extra_id_70>",
901
+ "<extra_id_71>",
902
+ "<extra_id_72>",
903
+ "<extra_id_73>",
904
+ "<extra_id_74>",
905
+ "<extra_id_75>",
906
+ "<extra_id_76>",
907
+ "<extra_id_77>",
908
+ "<extra_id_78>",
909
+ "<extra_id_79>",
910
+ "<extra_id_80>",
911
+ "<extra_id_81>",
912
+ "<extra_id_82>",
913
+ "<extra_id_83>",
914
+ "<extra_id_84>",
915
+ "<extra_id_85>",
916
+ "<extra_id_86>",
917
+ "<extra_id_87>",
918
+ "<extra_id_88>",
919
+ "<extra_id_89>",
920
+ "<extra_id_90>",
921
+ "<extra_id_91>",
922
+ "<extra_id_92>",
923
+ "<extra_id_93>",
924
+ "<extra_id_94>",
925
+ "<extra_id_95>",
926
+ "<extra_id_96>",
927
+ "<extra_id_97>",
928
+ "<extra_id_98>",
929
+ "<extra_id_99>"
930
+ ],
931
+ "clean_up_tokenization_spaces": false,
932
+ "eos_token": "</s>",
933
+ "extra_ids": 100,
934
+ "extra_special_tokens": {},
935
+ "max_length": 128,
936
+ "model_max_length": 512,
937
+ "pad_to_multiple_of": null,
938
+ "pad_token": "<pad>",
939
+ "pad_token_type_id": 0,
940
+ "padding_side": "right",
941
+ "sp_model_kwargs": {},
942
+ "stride": 0,
943
+ "tokenizer_class": "T5Tokenizer",
944
+ "truncation_side": "right",
945
+ "truncation_strategy": "longest_first",
946
+ "unk_token": "<unk>"
947
+ }
models/summarizer.tflite ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae687886645a601cb23480eac60ba81d33a5bdfb97d0d8ed6b38bd857a1e4bae
3
+ size 311089352
scans_to_verify_summary/SwiadectwoMaturalne1.jpg ADDED

Git LFS Details

  • SHA256: 70b926f57d89d451124f1d26c24ff23535ff05ca2e24b23c76d2eeab0e06b8d9
  • Pointer size: 131 Bytes
  • Size of remote file: 382 kB
summarizer_learn.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
3
+ from pathlib import Path
4
+ from datasets import Dataset
5
+ from transformers import (
6
+ AutoTokenizer,
7
+ AutoModelForSeq2SeqLM,
8
+ DataCollatorForSeq2Seq,
9
+ Seq2SeqTrainingArguments,
10
+ Seq2SeqTrainer
11
+ )
12
+
13
+ # --- KONFIGURACJA ŚCIEŻEK ---
14
+ # Wyjście o jeden poziom wyżej z folderu 'summarizer' do głównego folderu projektu
15
+ BASE_DIR = Path(__file__).resolve().parent.parent
16
+ DATA_ROOT = BASE_DIR / "content"
17
+ TITLE_ROOT = BASE_DIR / "titles"
18
+ SUMMARY_ROOT = BASE_DIR / "summary"
19
+
20
+ MODEL_ID = "google/flan-t5-small"
21
+ OUTPUT_MODEL_DIR = BASE_DIR / "summarizer" / "models" / "flan_t5_custom"
22
+
23
+ MAX_INPUT_LEN = 512
24
+ MAX_TARGET_LEN = 128
25
+
26
+ def load_data():
27
+ """Wczytuje dane i tworzy pary: Instrukcja + Tekst -> Wynik."""
28
+ dataset_dict = {"input_text": [], "target_text": []}
29
+
30
+ print(f"📂 Szukam danych w: {DATA_ROOT}")
31
+
32
+ # Przeszukujemy foldery rekurencyjnie
33
+ files = list(DATA_ROOT.rglob("*.txt"))
34
+ for txt_file in files:
35
+ rel_path = txt_file.relative_to(DATA_ROOT)
36
+
37
+ # 1. Wczytaj surowy tekst (cecha wejściowa)
38
+ with open(txt_file, "r", encoding="utf-8") as f:
39
+ ocr_content = f.read().strip()
40
+
41
+ if not ocr_content: continue
42
+
43
+ # 2. Dodaj parę dla zadania HEADLINE
44
+ t_file = TITLE_ROOT / rel_path
45
+ if t_file.exists():
46
+ with open(t_file, "r", encoding="utf-8") as f:
47
+ dataset_dict["input_text"].append(f"headline: {ocr_content}")
48
+ dataset_dict["target_text"].append(f.read().strip())
49
+
50
+ # 3. Dodaj parę dla zadania SUMMARIZE
51
+ s_file = SUMMARY_ROOT / rel_path
52
+ if s_file.exists():
53
+ with open(s_file, "r", encoding="utf-8") as f:
54
+ dataset_dict["input_text"].append(f"summarize: {ocr_content}")
55
+ dataset_dict["target_text"].append(f.read().strip())
56
+
57
+ return Dataset.from_dict(dataset_dict)
58
+
59
+ def main():
60
+ # 1. Przygotowanie danych
61
+ raw_dataset = load_data()
62
+ if len(raw_dataset) == 0:
63
+ print("❌ Nie znaleziono plików w content/titles/summary. Sprawdź ścieżki.")
64
+ return
65
+
66
+ dataset = raw_dataset.train_test_split(test_size=0.1)
67
+
68
+ # 2. Tokenizer i Model
69
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
70
+ model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_ID)
71
+
72
+ def preprocess(examples):
73
+ inputs = [ex for ex in examples["input_text"]]
74
+ model_inputs = tokenizer(inputs, max_length=MAX_INPUT_LEN, truncation=True, padding="max_length")
75
+
76
+ labels = tokenizer(text_target=examples["target_text"], max_length=MAX_TARGET_LEN, truncation=True, padding="max_length")
77
+ model_inputs["labels"] = labels["input_ids"]
78
+ return model_inputs
79
+
80
+ tokenized_dataset = dataset.map(preprocess, batched=True)
81
+
82
+ # 3. Argumenty treningu
83
+ # 3. Argumenty treningu
84
+ training_args = Seq2SeqTrainingArguments(
85
+ output_dir="./tmp_results",
86
+ eval_strategy="epoch", # <--- Zmieniono z evaluation_strategy
87
+ learning_rate=3e-4,
88
+ per_device_train_batch_size=8,
89
+ per_device_eval_batch_size=8,
90
+ weight_decay=0.01,
91
+ save_total_limit=2,
92
+ num_train_epochs=15,
93
+ predict_with_generate=True,
94
+ fp16=False,
95
+ logging_steps=10,
96
+ # Opcjonalnie dodaj te parametry dla lepszego generowania:
97
+ generation_max_length=MAX_TARGET_LEN,
98
+ generation_num_beams=4,
99
+ )
100
+
101
+ # 4. Trener
102
+ trainer = Seq2SeqTrainer(
103
+ model=model,
104
+ args=training_args,
105
+ train_dataset=tokenized_dataset["train"],
106
+ eval_dataset=tokenized_dataset["test"],
107
+ tokenizer=tokenizer,
108
+ data_collator=DataCollatorForSeq2Seq(tokenizer, model=model),
109
+ )
110
+
111
+ print(f"🚀 Rozpoczynam uczenie na {len(raw_dataset)} przykładach...")
112
+ trainer.train()
113
+
114
+ # 5. Zapisywanie modelu
115
+ os.makedirs(OUTPUT_MODEL_DIR, exist_ok=True)
116
+ model.save_pretrained(OUTPUT_MODEL_DIR)
117
+ tokenizer.save_pretrained(OUTPUT_MODEL_DIR)
118
+ print(f"✨ Model wyuczony i zapisany w: {OUTPUT_MODEL_DIR}")
119
+
120
+ if __name__ == "__main__":
121
+ main()
tmp_results/.DS_Store ADDED
Binary file (8.2 kB). View file
 
tmp_results/checkpoint-3000/config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "T5ForConditionalGeneration"
4
+ ],
5
+ "classifier_dropout": 0.0,
6
+ "d_ff": 1024,
7
+ "d_kv": 64,
8
+ "d_model": 512,
9
+ "decoder_start_token_id": 0,
10
+ "dense_act_fn": "gelu_new",
11
+ "dropout_rate": 0.1,
12
+ "dtype": "float32",
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": true,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "model_type": "t5",
20
+ "n_positions": 512,
21
+ "num_decoder_layers": 8,
22
+ "num_heads": 6,
23
+ "num_layers": 8,
24
+ "output_past": true,
25
+ "pad_token_id": 0,
26
+ "relative_attention_max_distance": 128,
27
+ "relative_attention_num_buckets": 32,
28
+ "task_specific_params": {
29
+ "summarization": {
30
+ "early_stopping": true,
31
+ "length_penalty": 2.0,
32
+ "max_length": 200,
33
+ "min_length": 30,
34
+ "no_repeat_ngram_size": 3,
35
+ "num_beams": 4,
36
+ "prefix": "summarize: "
37
+ },
38
+ "translation_en_to_de": {
39
+ "early_stopping": true,
40
+ "max_length": 300,
41
+ "num_beams": 4,
42
+ "prefix": "translate English to German: "
43
+ },
44
+ "translation_en_to_fr": {
45
+ "early_stopping": true,
46
+ "max_length": 300,
47
+ "num_beams": 4,
48
+ "prefix": "translate English to French: "
49
+ },
50
+ "translation_en_to_ro": {
51
+ "early_stopping": true,
52
+ "max_length": 300,
53
+ "num_beams": 4,
54
+ "prefix": "translate English to Romanian: "
55
+ }
56
+ },
57
+ "tie_word_embeddings": false,
58
+ "transformers_version": "4.57.3",
59
+ "use_cache": true,
60
+ "vocab_size": 32128
61
+ }
tmp_results/checkpoint-3000/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "decoder_start_token_id": 0,
4
+ "eos_token_id": [
5
+ 1
6
+ ],
7
+ "pad_token_id": 0,
8
+ "transformers_version": "4.57.3"
9
+ }
tmp_results/checkpoint-3000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e722fb3e0a8c1b5149360222cc8e30e7f2a5c37480bd6cdd04c672c99dd6283
3
+ size 307867048
tmp_results/checkpoint-3000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dee0095da2b543a823055baa4998a5c0fb010018c262f19cbfca257bf7ee3fad
3
+ size 615844491
tmp_results/checkpoint-3000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70cdeeb84a5ac0d9321002a0360ebe56b500ad720a39b1b70977779db2dec847
3
+ size 14455
tmp_results/checkpoint-3000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b13bdc819ce523f450286fd3a4aa378d243a5ce2ce1f99328bfde200acf8691d
3
+ size 1465
tmp_results/checkpoint-3000/special_tokens_map.json ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<extra_id_0>",
4
+ "<extra_id_1>",
5
+ "<extra_id_2>",
6
+ "<extra_id_3>",
7
+ "<extra_id_4>",
8
+ "<extra_id_5>",
9
+ "<extra_id_6>",
10
+ "<extra_id_7>",
11
+ "<extra_id_8>",
12
+ "<extra_id_9>",
13
+ "<extra_id_10>",
14
+ "<extra_id_11>",
15
+ "<extra_id_12>",
16
+ "<extra_id_13>",
17
+ "<extra_id_14>",
18
+ "<extra_id_15>",
19
+ "<extra_id_16>",
20
+ "<extra_id_17>",
21
+ "<extra_id_18>",
22
+ "<extra_id_19>",
23
+ "<extra_id_20>",
24
+ "<extra_id_21>",
25
+ "<extra_id_22>",
26
+ "<extra_id_23>",
27
+ "<extra_id_24>",
28
+ "<extra_id_25>",
29
+ "<extra_id_26>",
30
+ "<extra_id_27>",
31
+ "<extra_id_28>",
32
+ "<extra_id_29>",
33
+ "<extra_id_30>",
34
+ "<extra_id_31>",
35
+ "<extra_id_32>",
36
+ "<extra_id_33>",
37
+ "<extra_id_34>",
38
+ "<extra_id_35>",
39
+ "<extra_id_36>",
40
+ "<extra_id_37>",
41
+ "<extra_id_38>",
42
+ "<extra_id_39>",
43
+ "<extra_id_40>",
44
+ "<extra_id_41>",
45
+ "<extra_id_42>",
46
+ "<extra_id_43>",
47
+ "<extra_id_44>",
48
+ "<extra_id_45>",
49
+ "<extra_id_46>",
50
+ "<extra_id_47>",
51
+ "<extra_id_48>",
52
+ "<extra_id_49>",
53
+ "<extra_id_50>",
54
+ "<extra_id_51>",
55
+ "<extra_id_52>",
56
+ "<extra_id_53>",
57
+ "<extra_id_54>",
58
+ "<extra_id_55>",
59
+ "<extra_id_56>",
60
+ "<extra_id_57>",
61
+ "<extra_id_58>",
62
+ "<extra_id_59>",
63
+ "<extra_id_60>",
64
+ "<extra_id_61>",
65
+ "<extra_id_62>",
66
+ "<extra_id_63>",
67
+ "<extra_id_64>",
68
+ "<extra_id_65>",
69
+ "<extra_id_66>",
70
+ "<extra_id_67>",
71
+ "<extra_id_68>",
72
+ "<extra_id_69>",
73
+ "<extra_id_70>",
74
+ "<extra_id_71>",
75
+ "<extra_id_72>",
76
+ "<extra_id_73>",
77
+ "<extra_id_74>",
78
+ "<extra_id_75>",
79
+ "<extra_id_76>",
80
+ "<extra_id_77>",
81
+ "<extra_id_78>",
82
+ "<extra_id_79>",
83
+ "<extra_id_80>",
84
+ "<extra_id_81>",
85
+ "<extra_id_82>",
86
+ "<extra_id_83>",
87
+ "<extra_id_84>",
88
+ "<extra_id_85>",
89
+ "<extra_id_86>",
90
+ "<extra_id_87>",
91
+ "<extra_id_88>",
92
+ "<extra_id_89>",
93
+ "<extra_id_90>",
94
+ "<extra_id_91>",
95
+ "<extra_id_92>",
96
+ "<extra_id_93>",
97
+ "<extra_id_94>",
98
+ "<extra_id_95>",
99
+ "<extra_id_96>",
100
+ "<extra_id_97>",
101
+ "<extra_id_98>",
102
+ "<extra_id_99>"
103
+ ],
104
+ "eos_token": {
105
+ "content": "</s>",
106
+ "lstrip": false,
107
+ "normalized": false,
108
+ "rstrip": false,
109
+ "single_word": false
110
+ },
111
+ "pad_token": {
112
+ "content": "<pad>",
113
+ "lstrip": false,
114
+ "normalized": false,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "unk_token": {
119
+ "content": "<unk>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ }
125
+ }
tmp_results/checkpoint-3000/spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
3
+ size 791656
tmp_results/checkpoint-3000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tmp_results/checkpoint-3000/tokenizer_config.json ADDED
@@ -0,0 +1,940 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": null,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<pad>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "</s>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<unk>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "32000": {
29
+ "content": "<extra_id_99>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "32001": {
37
+ "content": "<extra_id_98>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "32002": {
45
+ "content": "<extra_id_97>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "32003": {
53
+ "content": "<extra_id_96>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "32004": {
61
+ "content": "<extra_id_95>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "32005": {
69
+ "content": "<extra_id_94>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "32006": {
77
+ "content": "<extra_id_93>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "32007": {
85
+ "content": "<extra_id_92>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "32008": {
93
+ "content": "<extra_id_91>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "32009": {
101
+ "content": "<extra_id_90>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "32010": {
109
+ "content": "<extra_id_89>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "32011": {
117
+ "content": "<extra_id_88>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "32012": {
125
+ "content": "<extra_id_87>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "32013": {
133
+ "content": "<extra_id_86>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ },
140
+ "32014": {
141
+ "content": "<extra_id_85>",
142
+ "lstrip": false,
143
+ "normalized": false,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": true
147
+ },
148
+ "32015": {
149
+ "content": "<extra_id_84>",
150
+ "lstrip": false,
151
+ "normalized": false,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": true
155
+ },
156
+ "32016": {
157
+ "content": "<extra_id_83>",
158
+ "lstrip": false,
159
+ "normalized": false,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": true
163
+ },
164
+ "32017": {
165
+ "content": "<extra_id_82>",
166
+ "lstrip": false,
167
+ "normalized": false,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": true
171
+ },
172
+ "32018": {
173
+ "content": "<extra_id_81>",
174
+ "lstrip": false,
175
+ "normalized": false,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": true
179
+ },
180
+ "32019": {
181
+ "content": "<extra_id_80>",
182
+ "lstrip": false,
183
+ "normalized": false,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": true
187
+ },
188
+ "32020": {
189
+ "content": "<extra_id_79>",
190
+ "lstrip": false,
191
+ "normalized": false,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": true
195
+ },
196
+ "32021": {
197
+ "content": "<extra_id_78>",
198
+ "lstrip": false,
199
+ "normalized": false,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": true
203
+ },
204
+ "32022": {
205
+ "content": "<extra_id_77>",
206
+ "lstrip": false,
207
+ "normalized": false,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": true
211
+ },
212
+ "32023": {
213
+ "content": "<extra_id_76>",
214
+ "lstrip": false,
215
+ "normalized": false,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": true
219
+ },
220
+ "32024": {
221
+ "content": "<extra_id_75>",
222
+ "lstrip": false,
223
+ "normalized": false,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": true
227
+ },
228
+ "32025": {
229
+ "content": "<extra_id_74>",
230
+ "lstrip": false,
231
+ "normalized": false,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": true
235
+ },
236
+ "32026": {
237
+ "content": "<extra_id_73>",
238
+ "lstrip": false,
239
+ "normalized": false,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": true
243
+ },
244
+ "32027": {
245
+ "content": "<extra_id_72>",
246
+ "lstrip": false,
247
+ "normalized": false,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": true
251
+ },
252
+ "32028": {
253
+ "content": "<extra_id_71>",
254
+ "lstrip": false,
255
+ "normalized": false,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": true
259
+ },
260
+ "32029": {
261
+ "content": "<extra_id_70>",
262
+ "lstrip": false,
263
+ "normalized": false,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": true
267
+ },
268
+ "32030": {
269
+ "content": "<extra_id_69>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "32031": {
277
+ "content": "<extra_id_68>",
278
+ "lstrip": false,
279
+ "normalized": false,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "32032": {
285
+ "content": "<extra_id_67>",
286
+ "lstrip": false,
287
+ "normalized": false,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": true
291
+ },
292
+ "32033": {
293
+ "content": "<extra_id_66>",
294
+ "lstrip": false,
295
+ "normalized": false,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": true
299
+ },
300
+ "32034": {
301
+ "content": "<extra_id_65>",
302
+ "lstrip": false,
303
+ "normalized": false,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": true
307
+ },
308
+ "32035": {
309
+ "content": "<extra_id_64>",
310
+ "lstrip": false,
311
+ "normalized": false,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": true
315
+ },
316
+ "32036": {
317
+ "content": "<extra_id_63>",
318
+ "lstrip": false,
319
+ "normalized": false,
320
+ "rstrip": false,
321
+ "single_word": false,
322
+ "special": true
323
+ },
324
+ "32037": {
325
+ "content": "<extra_id_62>",
326
+ "lstrip": false,
327
+ "normalized": false,
328
+ "rstrip": false,
329
+ "single_word": false,
330
+ "special": true
331
+ },
332
+ "32038": {
333
+ "content": "<extra_id_61>",
334
+ "lstrip": false,
335
+ "normalized": false,
336
+ "rstrip": false,
337
+ "single_word": false,
338
+ "special": true
339
+ },
340
+ "32039": {
341
+ "content": "<extra_id_60>",
342
+ "lstrip": false,
343
+ "normalized": false,
344
+ "rstrip": false,
345
+ "single_word": false,
346
+ "special": true
347
+ },
348
+ "32040": {
349
+ "content": "<extra_id_59>",
350
+ "lstrip": false,
351
+ "normalized": false,
352
+ "rstrip": false,
353
+ "single_word": false,
354
+ "special": true
355
+ },
356
+ "32041": {
357
+ "content": "<extra_id_58>",
358
+ "lstrip": false,
359
+ "normalized": false,
360
+ "rstrip": false,
361
+ "single_word": false,
362
+ "special": true
363
+ },
364
+ "32042": {
365
+ "content": "<extra_id_57>",
366
+ "lstrip": false,
367
+ "normalized": false,
368
+ "rstrip": false,
369
+ "single_word": false,
370
+ "special": true
371
+ },
372
+ "32043": {
373
+ "content": "<extra_id_56>",
374
+ "lstrip": false,
375
+ "normalized": false,
376
+ "rstrip": false,
377
+ "single_word": false,
378
+ "special": true
379
+ },
380
+ "32044": {
381
+ "content": "<extra_id_55>",
382
+ "lstrip": false,
383
+ "normalized": false,
384
+ "rstrip": false,
385
+ "single_word": false,
386
+ "special": true
387
+ },
388
+ "32045": {
389
+ "content": "<extra_id_54>",
390
+ "lstrip": false,
391
+ "normalized": false,
392
+ "rstrip": false,
393
+ "single_word": false,
394
+ "special": true
395
+ },
396
+ "32046": {
397
+ "content": "<extra_id_53>",
398
+ "lstrip": false,
399
+ "normalized": false,
400
+ "rstrip": false,
401
+ "single_word": false,
402
+ "special": true
403
+ },
404
+ "32047": {
405
+ "content": "<extra_id_52>",
406
+ "lstrip": false,
407
+ "normalized": false,
408
+ "rstrip": false,
409
+ "single_word": false,
410
+ "special": true
411
+ },
412
+ "32048": {
413
+ "content": "<extra_id_51>",
414
+ "lstrip": false,
415
+ "normalized": false,
416
+ "rstrip": false,
417
+ "single_word": false,
418
+ "special": true
419
+ },
420
+ "32049": {
421
+ "content": "<extra_id_50>",
422
+ "lstrip": false,
423
+ "normalized": false,
424
+ "rstrip": false,
425
+ "single_word": false,
426
+ "special": true
427
+ },
428
+ "32050": {
429
+ "content": "<extra_id_49>",
430
+ "lstrip": false,
431
+ "normalized": false,
432
+ "rstrip": false,
433
+ "single_word": false,
434
+ "special": true
435
+ },
436
+ "32051": {
437
+ "content": "<extra_id_48>",
438
+ "lstrip": false,
439
+ "normalized": false,
440
+ "rstrip": false,
441
+ "single_word": false,
442
+ "special": true
443
+ },
444
+ "32052": {
445
+ "content": "<extra_id_47>",
446
+ "lstrip": false,
447
+ "normalized": false,
448
+ "rstrip": false,
449
+ "single_word": false,
450
+ "special": true
451
+ },
452
+ "32053": {
453
+ "content": "<extra_id_46>",
454
+ "lstrip": false,
455
+ "normalized": false,
456
+ "rstrip": false,
457
+ "single_word": false,
458
+ "special": true
459
+ },
460
+ "32054": {
461
+ "content": "<extra_id_45>",
462
+ "lstrip": false,
463
+ "normalized": false,
464
+ "rstrip": false,
465
+ "single_word": false,
466
+ "special": true
467
+ },
468
+ "32055": {
469
+ "content": "<extra_id_44>",
470
+ "lstrip": false,
471
+ "normalized": false,
472
+ "rstrip": false,
473
+ "single_word": false,
474
+ "special": true
475
+ },
476
+ "32056": {
477
+ "content": "<extra_id_43>",
478
+ "lstrip": false,
479
+ "normalized": false,
480
+ "rstrip": false,
481
+ "single_word": false,
482
+ "special": true
483
+ },
484
+ "32057": {
485
+ "content": "<extra_id_42>",
486
+ "lstrip": false,
487
+ "normalized": false,
488
+ "rstrip": false,
489
+ "single_word": false,
490
+ "special": true
491
+ },
492
+ "32058": {
493
+ "content": "<extra_id_41>",
494
+ "lstrip": false,
495
+ "normalized": false,
496
+ "rstrip": false,
497
+ "single_word": false,
498
+ "special": true
499
+ },
500
+ "32059": {
501
+ "content": "<extra_id_40>",
502
+ "lstrip": false,
503
+ "normalized": false,
504
+ "rstrip": false,
505
+ "single_word": false,
506
+ "special": true
507
+ },
508
+ "32060": {
509
+ "content": "<extra_id_39>",
510
+ "lstrip": false,
511
+ "normalized": false,
512
+ "rstrip": false,
513
+ "single_word": false,
514
+ "special": true
515
+ },
516
+ "32061": {
517
+ "content": "<extra_id_38>",
518
+ "lstrip": false,
519
+ "normalized": false,
520
+ "rstrip": false,
521
+ "single_word": false,
522
+ "special": true
523
+ },
524
+ "32062": {
525
+ "content": "<extra_id_37>",
526
+ "lstrip": false,
527
+ "normalized": false,
528
+ "rstrip": false,
529
+ "single_word": false,
530
+ "special": true
531
+ },
532
+ "32063": {
533
+ "content": "<extra_id_36>",
534
+ "lstrip": false,
535
+ "normalized": false,
536
+ "rstrip": false,
537
+ "single_word": false,
538
+ "special": true
539
+ },
540
+ "32064": {
541
+ "content": "<extra_id_35>",
542
+ "lstrip": false,
543
+ "normalized": false,
544
+ "rstrip": false,
545
+ "single_word": false,
546
+ "special": true
547
+ },
548
+ "32065": {
549
+ "content": "<extra_id_34>",
550
+ "lstrip": false,
551
+ "normalized": false,
552
+ "rstrip": false,
553
+ "single_word": false,
554
+ "special": true
555
+ },
556
+ "32066": {
557
+ "content": "<extra_id_33>",
558
+ "lstrip": false,
559
+ "normalized": false,
560
+ "rstrip": false,
561
+ "single_word": false,
562
+ "special": true
563
+ },
564
+ "32067": {
565
+ "content": "<extra_id_32>",
566
+ "lstrip": false,
567
+ "normalized": false,
568
+ "rstrip": false,
569
+ "single_word": false,
570
+ "special": true
571
+ },
572
+ "32068": {
573
+ "content": "<extra_id_31>",
574
+ "lstrip": false,
575
+ "normalized": false,
576
+ "rstrip": false,
577
+ "single_word": false,
578
+ "special": true
579
+ },
580
+ "32069": {
581
+ "content": "<extra_id_30>",
582
+ "lstrip": false,
583
+ "normalized": false,
584
+ "rstrip": false,
585
+ "single_word": false,
586
+ "special": true
587
+ },
588
+ "32070": {
589
+ "content": "<extra_id_29>",
590
+ "lstrip": false,
591
+ "normalized": false,
592
+ "rstrip": false,
593
+ "single_word": false,
594
+ "special": true
595
+ },
596
+ "32071": {
597
+ "content": "<extra_id_28>",
598
+ "lstrip": false,
599
+ "normalized": false,
600
+ "rstrip": false,
601
+ "single_word": false,
602
+ "special": true
603
+ },
604
+ "32072": {
605
+ "content": "<extra_id_27>",
606
+ "lstrip": false,
607
+ "normalized": false,
608
+ "rstrip": false,
609
+ "single_word": false,
610
+ "special": true
611
+ },
612
+ "32073": {
613
+ "content": "<extra_id_26>",
614
+ "lstrip": false,
615
+ "normalized": false,
616
+ "rstrip": false,
617
+ "single_word": false,
618
+ "special": true
619
+ },
620
+ "32074": {
621
+ "content": "<extra_id_25>",
622
+ "lstrip": false,
623
+ "normalized": false,
624
+ "rstrip": false,
625
+ "single_word": false,
626
+ "special": true
627
+ },
628
+ "32075": {
629
+ "content": "<extra_id_24>",
630
+ "lstrip": false,
631
+ "normalized": false,
632
+ "rstrip": false,
633
+ "single_word": false,
634
+ "special": true
635
+ },
636
+ "32076": {
637
+ "content": "<extra_id_23>",
638
+ "lstrip": false,
639
+ "normalized": false,
640
+ "rstrip": false,
641
+ "single_word": false,
642
+ "special": true
643
+ },
644
+ "32077": {
645
+ "content": "<extra_id_22>",
646
+ "lstrip": false,
647
+ "normalized": false,
648
+ "rstrip": false,
649
+ "single_word": false,
650
+ "special": true
651
+ },
652
+ "32078": {
653
+ "content": "<extra_id_21>",
654
+ "lstrip": false,
655
+ "normalized": false,
656
+ "rstrip": false,
657
+ "single_word": false,
658
+ "special": true
659
+ },
660
+ "32079": {
661
+ "content": "<extra_id_20>",
662
+ "lstrip": false,
663
+ "normalized": false,
664
+ "rstrip": false,
665
+ "single_word": false,
666
+ "special": true
667
+ },
668
+ "32080": {
669
+ "content": "<extra_id_19>",
670
+ "lstrip": false,
671
+ "normalized": false,
672
+ "rstrip": false,
673
+ "single_word": false,
674
+ "special": true
675
+ },
676
+ "32081": {
677
+ "content": "<extra_id_18>",
678
+ "lstrip": false,
679
+ "normalized": false,
680
+ "rstrip": false,
681
+ "single_word": false,
682
+ "special": true
683
+ },
684
+ "32082": {
685
+ "content": "<extra_id_17>",
686
+ "lstrip": false,
687
+ "normalized": false,
688
+ "rstrip": false,
689
+ "single_word": false,
690
+ "special": true
691
+ },
692
+ "32083": {
693
+ "content": "<extra_id_16>",
694
+ "lstrip": false,
695
+ "normalized": false,
696
+ "rstrip": false,
697
+ "single_word": false,
698
+ "special": true
699
+ },
700
+ "32084": {
701
+ "content": "<extra_id_15>",
702
+ "lstrip": false,
703
+ "normalized": false,
704
+ "rstrip": false,
705
+ "single_word": false,
706
+ "special": true
707
+ },
708
+ "32085": {
709
+ "content": "<extra_id_14>",
710
+ "lstrip": false,
711
+ "normalized": false,
712
+ "rstrip": false,
713
+ "single_word": false,
714
+ "special": true
715
+ },
716
+ "32086": {
717
+ "content": "<extra_id_13>",
718
+ "lstrip": false,
719
+ "normalized": false,
720
+ "rstrip": false,
721
+ "single_word": false,
722
+ "special": true
723
+ },
724
+ "32087": {
725
+ "content": "<extra_id_12>",
726
+ "lstrip": false,
727
+ "normalized": false,
728
+ "rstrip": false,
729
+ "single_word": false,
730
+ "special": true
731
+ },
732
+ "32088": {
733
+ "content": "<extra_id_11>",
734
+ "lstrip": false,
735
+ "normalized": false,
736
+ "rstrip": false,
737
+ "single_word": false,
738
+ "special": true
739
+ },
740
+ "32089": {
741
+ "content": "<extra_id_10>",
742
+ "lstrip": false,
743
+ "normalized": false,
744
+ "rstrip": false,
745
+ "single_word": false,
746
+ "special": true
747
+ },
748
+ "32090": {
749
+ "content": "<extra_id_9>",
750
+ "lstrip": false,
751
+ "normalized": false,
752
+ "rstrip": false,
753
+ "single_word": false,
754
+ "special": true
755
+ },
756
+ "32091": {
757
+ "content": "<extra_id_8>",
758
+ "lstrip": false,
759
+ "normalized": false,
760
+ "rstrip": false,
761
+ "single_word": false,
762
+ "special": true
763
+ },
764
+ "32092": {
765
+ "content": "<extra_id_7>",
766
+ "lstrip": false,
767
+ "normalized": false,
768
+ "rstrip": false,
769
+ "single_word": false,
770
+ "special": true
771
+ },
772
+ "32093": {
773
+ "content": "<extra_id_6>",
774
+ "lstrip": false,
775
+ "normalized": false,
776
+ "rstrip": false,
777
+ "single_word": false,
778
+ "special": true
779
+ },
780
+ "32094": {
781
+ "content": "<extra_id_5>",
782
+ "lstrip": false,
783
+ "normalized": false,
784
+ "rstrip": false,
785
+ "single_word": false,
786
+ "special": true
787
+ },
788
+ "32095": {
789
+ "content": "<extra_id_4>",
790
+ "lstrip": false,
791
+ "normalized": false,
792
+ "rstrip": false,
793
+ "single_word": false,
794
+ "special": true
795
+ },
796
+ "32096": {
797
+ "content": "<extra_id_3>",
798
+ "lstrip": false,
799
+ "normalized": false,
800
+ "rstrip": false,
801
+ "single_word": false,
802
+ "special": true
803
+ },
804
+ "32097": {
805
+ "content": "<extra_id_2>",
806
+ "lstrip": false,
807
+ "normalized": false,
808
+ "rstrip": false,
809
+ "single_word": false,
810
+ "special": true
811
+ },
812
+ "32098": {
813
+ "content": "<extra_id_1>",
814
+ "lstrip": false,
815
+ "normalized": false,
816
+ "rstrip": false,
817
+ "single_word": false,
818
+ "special": true
819
+ },
820
+ "32099": {
821
+ "content": "<extra_id_0>",
822
+ "lstrip": false,
823
+ "normalized": false,
824
+ "rstrip": false,
825
+ "single_word": false,
826
+ "special": true
827
+ }
828
+ },
829
+ "additional_special_tokens": [
830
+ "<extra_id_0>",
831
+ "<extra_id_1>",
832
+ "<extra_id_2>",
833
+ "<extra_id_3>",
834
+ "<extra_id_4>",
835
+ "<extra_id_5>",
836
+ "<extra_id_6>",
837
+ "<extra_id_7>",
838
+ "<extra_id_8>",
839
+ "<extra_id_9>",
840
+ "<extra_id_10>",
841
+ "<extra_id_11>",
842
+ "<extra_id_12>",
843
+ "<extra_id_13>",
844
+ "<extra_id_14>",
845
+ "<extra_id_15>",
846
+ "<extra_id_16>",
847
+ "<extra_id_17>",
848
+ "<extra_id_18>",
849
+ "<extra_id_19>",
850
+ "<extra_id_20>",
851
+ "<extra_id_21>",
852
+ "<extra_id_22>",
853
+ "<extra_id_23>",
854
+ "<extra_id_24>",
855
+ "<extra_id_25>",
856
+ "<extra_id_26>",
857
+ "<extra_id_27>",
858
+ "<extra_id_28>",
859
+ "<extra_id_29>",
860
+ "<extra_id_30>",
861
+ "<extra_id_31>",
862
+ "<extra_id_32>",
863
+ "<extra_id_33>",
864
+ "<extra_id_34>",
865
+ "<extra_id_35>",
866
+ "<extra_id_36>",
867
+ "<extra_id_37>",
868
+ "<extra_id_38>",
869
+ "<extra_id_39>",
870
+ "<extra_id_40>",
871
+ "<extra_id_41>",
872
+ "<extra_id_42>",
873
+ "<extra_id_43>",
874
+ "<extra_id_44>",
875
+ "<extra_id_45>",
876
+ "<extra_id_46>",
877
+ "<extra_id_47>",
878
+ "<extra_id_48>",
879
+ "<extra_id_49>",
880
+ "<extra_id_50>",
881
+ "<extra_id_51>",
882
+ "<extra_id_52>",
883
+ "<extra_id_53>",
884
+ "<extra_id_54>",
885
+ "<extra_id_55>",
886
+ "<extra_id_56>",
887
+ "<extra_id_57>",
888
+ "<extra_id_58>",
889
+ "<extra_id_59>",
890
+ "<extra_id_60>",
891
+ "<extra_id_61>",
892
+ "<extra_id_62>",
893
+ "<extra_id_63>",
894
+ "<extra_id_64>",
895
+ "<extra_id_65>",
896
+ "<extra_id_66>",
897
+ "<extra_id_67>",
898
+ "<extra_id_68>",
899
+ "<extra_id_69>",
900
+ "<extra_id_70>",
901
+ "<extra_id_71>",
902
+ "<extra_id_72>",
903
+ "<extra_id_73>",
904
+ "<extra_id_74>",
905
+ "<extra_id_75>",
906
+ "<extra_id_76>",
907
+ "<extra_id_77>",
908
+ "<extra_id_78>",
909
+ "<extra_id_79>",
910
+ "<extra_id_80>",
911
+ "<extra_id_81>",
912
+ "<extra_id_82>",
913
+ "<extra_id_83>",
914
+ "<extra_id_84>",
915
+ "<extra_id_85>",
916
+ "<extra_id_86>",
917
+ "<extra_id_87>",
918
+ "<extra_id_88>",
919
+ "<extra_id_89>",
920
+ "<extra_id_90>",
921
+ "<extra_id_91>",
922
+ "<extra_id_92>",
923
+ "<extra_id_93>",
924
+ "<extra_id_94>",
925
+ "<extra_id_95>",
926
+ "<extra_id_96>",
927
+ "<extra_id_97>",
928
+ "<extra_id_98>",
929
+ "<extra_id_99>"
930
+ ],
931
+ "clean_up_tokenization_spaces": false,
932
+ "eos_token": "</s>",
933
+ "extra_ids": 100,
934
+ "extra_special_tokens": {},
935
+ "model_max_length": 512,
936
+ "pad_token": "<pad>",
937
+ "sp_model_kwargs": {},
938
+ "tokenizer_class": "T5Tokenizer",
939
+ "unk_token": "<unk>"
940
+ }
tmp_results/checkpoint-3000/trainer_state.json ADDED
@@ -0,0 +1,2238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 13.215859030837004,
6
+ "eval_steps": 500,
7
+ "global_step": 3000,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.04405286343612335,
14
+ "grad_norm": 32.503074645996094,
15
+ "learning_rate": 0.0002992070484581498,
16
+ "loss": 18.6527,
17
+ "step": 10
18
+ },
19
+ {
20
+ "epoch": 0.0881057268722467,
21
+ "grad_norm": 14.659343719482422,
22
+ "learning_rate": 0.0002983259911894273,
23
+ "loss": 6.941,
24
+ "step": 20
25
+ },
26
+ {
27
+ "epoch": 0.13215859030837004,
28
+ "grad_norm": 13.076040267944336,
29
+ "learning_rate": 0.00029744493392070483,
30
+ "loss": 4.3562,
31
+ "step": 30
32
+ },
33
+ {
34
+ "epoch": 0.1762114537444934,
35
+ "grad_norm": 21.742950439453125,
36
+ "learning_rate": 0.00029656387665198236,
37
+ "loss": 3.5182,
38
+ "step": 40
39
+ },
40
+ {
41
+ "epoch": 0.22026431718061673,
42
+ "grad_norm": 19.481538772583008,
43
+ "learning_rate": 0.0002956828193832599,
44
+ "loss": 2.6543,
45
+ "step": 50
46
+ },
47
+ {
48
+ "epoch": 0.2643171806167401,
49
+ "grad_norm": 8.22918701171875,
50
+ "learning_rate": 0.0002948017621145374,
51
+ "loss": 1.7678,
52
+ "step": 60
53
+ },
54
+ {
55
+ "epoch": 0.30837004405286345,
56
+ "grad_norm": 5.820700168609619,
57
+ "learning_rate": 0.00029392070484581494,
58
+ "loss": 1.2462,
59
+ "step": 70
60
+ },
61
+ {
62
+ "epoch": 0.3524229074889868,
63
+ "grad_norm": 2.173941135406494,
64
+ "learning_rate": 0.00029303964757709247,
65
+ "loss": 1.3665,
66
+ "step": 80
67
+ },
68
+ {
69
+ "epoch": 0.3964757709251101,
70
+ "grad_norm": 1.8419830799102783,
71
+ "learning_rate": 0.00029215859030837,
72
+ "loss": 1.1257,
73
+ "step": 90
74
+ },
75
+ {
76
+ "epoch": 0.44052863436123346,
77
+ "grad_norm": 1.7700544595718384,
78
+ "learning_rate": 0.0002912775330396475,
79
+ "loss": 1.0317,
80
+ "step": 100
81
+ },
82
+ {
83
+ "epoch": 0.4845814977973568,
84
+ "grad_norm": 1.2313016653060913,
85
+ "learning_rate": 0.0002903964757709251,
86
+ "loss": 0.8453,
87
+ "step": 110
88
+ },
89
+ {
90
+ "epoch": 0.5286343612334802,
91
+ "grad_norm": 1.3880990743637085,
92
+ "learning_rate": 0.00028951541850220263,
93
+ "loss": 0.8675,
94
+ "step": 120
95
+ },
96
+ {
97
+ "epoch": 0.5726872246696035,
98
+ "grad_norm": 2.3586199283599854,
99
+ "learning_rate": 0.00028863436123348016,
100
+ "loss": 0.8509,
101
+ "step": 130
102
+ },
103
+ {
104
+ "epoch": 0.6167400881057269,
105
+ "grad_norm": 1.3902242183685303,
106
+ "learning_rate": 0.0002877533039647577,
107
+ "loss": 0.8528,
108
+ "step": 140
109
+ },
110
+ {
111
+ "epoch": 0.6607929515418502,
112
+ "grad_norm": 1.0430246591567993,
113
+ "learning_rate": 0.0002868722466960352,
114
+ "loss": 0.8726,
115
+ "step": 150
116
+ },
117
+ {
118
+ "epoch": 0.7048458149779736,
119
+ "grad_norm": 1.3441425561904907,
120
+ "learning_rate": 0.00028599118942731274,
121
+ "loss": 1.1029,
122
+ "step": 160
123
+ },
124
+ {
125
+ "epoch": 0.748898678414097,
126
+ "grad_norm": 1.18771231174469,
127
+ "learning_rate": 0.00028511013215859026,
128
+ "loss": 0.9229,
129
+ "step": 170
130
+ },
131
+ {
132
+ "epoch": 0.7929515418502202,
133
+ "grad_norm": 0.8050010800361633,
134
+ "learning_rate": 0.0002842290748898678,
135
+ "loss": 0.8363,
136
+ "step": 180
137
+ },
138
+ {
139
+ "epoch": 0.8370044052863436,
140
+ "grad_norm": 1.1800554990768433,
141
+ "learning_rate": 0.0002833480176211453,
142
+ "loss": 0.8128,
143
+ "step": 190
144
+ },
145
+ {
146
+ "epoch": 0.8810572687224669,
147
+ "grad_norm": 1.3873122930526733,
148
+ "learning_rate": 0.00028246696035242285,
149
+ "loss": 0.8275,
150
+ "step": 200
151
+ },
152
+ {
153
+ "epoch": 0.9251101321585903,
154
+ "grad_norm": 1.1561434268951416,
155
+ "learning_rate": 0.0002815859030837004,
156
+ "loss": 0.8961,
157
+ "step": 210
158
+ },
159
+ {
160
+ "epoch": 0.9691629955947136,
161
+ "grad_norm": 0.6641005277633667,
162
+ "learning_rate": 0.00028070484581497795,
163
+ "loss": 0.6599,
164
+ "step": 220
165
+ },
166
+ {
167
+ "epoch": 1.0,
168
+ "eval_loss": 0.6834425330162048,
169
+ "eval_runtime": 5.6075,
170
+ "eval_samples_per_second": 36.023,
171
+ "eval_steps_per_second": 4.637,
172
+ "step": 227
173
+ },
174
+ {
175
+ "epoch": 1.013215859030837,
176
+ "grad_norm": 1.0736805200576782,
177
+ "learning_rate": 0.0002798237885462555,
178
+ "loss": 0.7283,
179
+ "step": 230
180
+ },
181
+ {
182
+ "epoch": 1.0572687224669604,
183
+ "grad_norm": 1.069932460784912,
184
+ "learning_rate": 0.000278942731277533,
185
+ "loss": 0.825,
186
+ "step": 240
187
+ },
188
+ {
189
+ "epoch": 1.1013215859030836,
190
+ "grad_norm": 1.3098456859588623,
191
+ "learning_rate": 0.0002780616740088106,
192
+ "loss": 0.7152,
193
+ "step": 250
194
+ },
195
+ {
196
+ "epoch": 1.145374449339207,
197
+ "grad_norm": 1.0547797679901123,
198
+ "learning_rate": 0.0002771806167400881,
199
+ "loss": 0.746,
200
+ "step": 260
201
+ },
202
+ {
203
+ "epoch": 1.1894273127753303,
204
+ "grad_norm": 1.58526611328125,
205
+ "learning_rate": 0.00027629955947136564,
206
+ "loss": 0.6655,
207
+ "step": 270
208
+ },
209
+ {
210
+ "epoch": 1.2334801762114538,
211
+ "grad_norm": 1.4090569019317627,
212
+ "learning_rate": 0.0002754185022026431,
213
+ "loss": 0.7397,
214
+ "step": 280
215
+ },
216
+ {
217
+ "epoch": 1.277533039647577,
218
+ "grad_norm": 1.3417810201644897,
219
+ "learning_rate": 0.00027453744493392064,
220
+ "loss": 0.6534,
221
+ "step": 290
222
+ },
223
+ {
224
+ "epoch": 1.3215859030837005,
225
+ "grad_norm": 0.7320300936698914,
226
+ "learning_rate": 0.0002736563876651982,
227
+ "loss": 0.719,
228
+ "step": 300
229
+ },
230
+ {
231
+ "epoch": 1.3656387665198237,
232
+ "grad_norm": 1.7867811918258667,
233
+ "learning_rate": 0.00027277533039647575,
234
+ "loss": 0.8878,
235
+ "step": 310
236
+ },
237
+ {
238
+ "epoch": 1.4096916299559472,
239
+ "grad_norm": 1.0332417488098145,
240
+ "learning_rate": 0.0002718942731277533,
241
+ "loss": 0.7058,
242
+ "step": 320
243
+ },
244
+ {
245
+ "epoch": 1.4537444933920705,
246
+ "grad_norm": 1.1942096948623657,
247
+ "learning_rate": 0.0002710132158590308,
248
+ "loss": 0.6599,
249
+ "step": 330
250
+ },
251
+ {
252
+ "epoch": 1.497797356828194,
253
+ "grad_norm": 1.0352708101272583,
254
+ "learning_rate": 0.00027013215859030833,
255
+ "loss": 0.6487,
256
+ "step": 340
257
+ },
258
+ {
259
+ "epoch": 1.5418502202643172,
260
+ "grad_norm": 1.2984694242477417,
261
+ "learning_rate": 0.0002692511013215859,
262
+ "loss": 0.7225,
263
+ "step": 350
264
+ },
265
+ {
266
+ "epoch": 1.5859030837004404,
267
+ "grad_norm": 1.1419997215270996,
268
+ "learning_rate": 0.00026837004405286344,
269
+ "loss": 0.7233,
270
+ "step": 360
271
+ },
272
+ {
273
+ "epoch": 1.6299559471365639,
274
+ "grad_norm": 1.5873011350631714,
275
+ "learning_rate": 0.00026748898678414097,
276
+ "loss": 0.5429,
277
+ "step": 370
278
+ },
279
+ {
280
+ "epoch": 1.6740088105726874,
281
+ "grad_norm": 1.058026671409607,
282
+ "learning_rate": 0.0002666079295154185,
283
+ "loss": 0.7621,
284
+ "step": 380
285
+ },
286
+ {
287
+ "epoch": 1.7180616740088106,
288
+ "grad_norm": 1.424886703491211,
289
+ "learning_rate": 0.000265726872246696,
290
+ "loss": 0.7103,
291
+ "step": 390
292
+ },
293
+ {
294
+ "epoch": 1.7621145374449338,
295
+ "grad_norm": 0.9987337589263916,
296
+ "learning_rate": 0.00026484581497797355,
297
+ "loss": 0.6882,
298
+ "step": 400
299
+ },
300
+ {
301
+ "epoch": 1.8061674008810573,
302
+ "grad_norm": 1.0241808891296387,
303
+ "learning_rate": 0.0002639647577092511,
304
+ "loss": 0.6754,
305
+ "step": 410
306
+ },
307
+ {
308
+ "epoch": 1.8502202643171806,
309
+ "grad_norm": 0.7069824934005737,
310
+ "learning_rate": 0.0002630837004405286,
311
+ "loss": 0.6426,
312
+ "step": 420
313
+ },
314
+ {
315
+ "epoch": 1.894273127753304,
316
+ "grad_norm": 1.18909752368927,
317
+ "learning_rate": 0.00026220264317180613,
318
+ "loss": 0.7879,
319
+ "step": 430
320
+ },
321
+ {
322
+ "epoch": 1.9383259911894273,
323
+ "grad_norm": 0.8950007557868958,
324
+ "learning_rate": 0.00026132158590308366,
325
+ "loss": 0.7197,
326
+ "step": 440
327
+ },
328
+ {
329
+ "epoch": 1.9823788546255505,
330
+ "grad_norm": 1.3497512340545654,
331
+ "learning_rate": 0.00026044052863436124,
332
+ "loss": 0.6892,
333
+ "step": 450
334
+ },
335
+ {
336
+ "epoch": 2.0,
337
+ "eval_loss": 0.6154947876930237,
338
+ "eval_runtime": 5.3228,
339
+ "eval_samples_per_second": 37.95,
340
+ "eval_steps_per_second": 4.885,
341
+ "step": 454
342
+ },
343
+ {
344
+ "epoch": 2.026431718061674,
345
+ "grad_norm": 1.0325676202774048,
346
+ "learning_rate": 0.00025955947136563877,
347
+ "loss": 0.7104,
348
+ "step": 460
349
+ },
350
+ {
351
+ "epoch": 2.0704845814977975,
352
+ "grad_norm": 1.4701021909713745,
353
+ "learning_rate": 0.0002586784140969163,
354
+ "loss": 0.6249,
355
+ "step": 470
356
+ },
357
+ {
358
+ "epoch": 2.1145374449339207,
359
+ "grad_norm": 1.2472504377365112,
360
+ "learning_rate": 0.0002577973568281938,
361
+ "loss": 0.7115,
362
+ "step": 480
363
+ },
364
+ {
365
+ "epoch": 2.158590308370044,
366
+ "grad_norm": 1.01516592502594,
367
+ "learning_rate": 0.00025691629955947135,
368
+ "loss": 0.6039,
369
+ "step": 490
370
+ },
371
+ {
372
+ "epoch": 2.202643171806167,
373
+ "grad_norm": 1.3985668420791626,
374
+ "learning_rate": 0.0002560352422907489,
375
+ "loss": 0.5976,
376
+ "step": 500
377
+ },
378
+ {
379
+ "epoch": 2.246696035242291,
380
+ "grad_norm": 0.6047684550285339,
381
+ "learning_rate": 0.0002551541850220264,
382
+ "loss": 0.5158,
383
+ "step": 510
384
+ },
385
+ {
386
+ "epoch": 2.290748898678414,
387
+ "grad_norm": 0.8428493142127991,
388
+ "learning_rate": 0.00025427312775330393,
389
+ "loss": 0.6338,
390
+ "step": 520
391
+ },
392
+ {
393
+ "epoch": 2.3348017621145374,
394
+ "grad_norm": 1.0199517011642456,
395
+ "learning_rate": 0.00025339207048458146,
396
+ "loss": 0.469,
397
+ "step": 530
398
+ },
399
+ {
400
+ "epoch": 2.3788546255506606,
401
+ "grad_norm": 0.8641414642333984,
402
+ "learning_rate": 0.000252511013215859,
403
+ "loss": 0.583,
404
+ "step": 540
405
+ },
406
+ {
407
+ "epoch": 2.4229074889867843,
408
+ "grad_norm": 0.8442863821983337,
409
+ "learning_rate": 0.00025162995594713657,
410
+ "loss": 0.6108,
411
+ "step": 550
412
+ },
413
+ {
414
+ "epoch": 2.4669603524229076,
415
+ "grad_norm": 0.8864941000938416,
416
+ "learning_rate": 0.0002507488986784141,
417
+ "loss": 0.5572,
418
+ "step": 560
419
+ },
420
+ {
421
+ "epoch": 2.511013215859031,
422
+ "grad_norm": 0.9025411605834961,
423
+ "learning_rate": 0.0002498678414096916,
424
+ "loss": 0.6174,
425
+ "step": 570
426
+ },
427
+ {
428
+ "epoch": 2.555066079295154,
429
+ "grad_norm": 0.8481220602989197,
430
+ "learning_rate": 0.00024898678414096915,
431
+ "loss": 0.6118,
432
+ "step": 580
433
+ },
434
+ {
435
+ "epoch": 2.5991189427312777,
436
+ "grad_norm": 0.9391738772392273,
437
+ "learning_rate": 0.0002481057268722467,
438
+ "loss": 0.569,
439
+ "step": 590
440
+ },
441
+ {
442
+ "epoch": 2.643171806167401,
443
+ "grad_norm": 1.0381453037261963,
444
+ "learning_rate": 0.0002472246696035242,
445
+ "loss": 0.4904,
446
+ "step": 600
447
+ },
448
+ {
449
+ "epoch": 2.6872246696035242,
450
+ "grad_norm": 1.023573398590088,
451
+ "learning_rate": 0.00024634361233480173,
452
+ "loss": 0.5969,
453
+ "step": 610
454
+ },
455
+ {
456
+ "epoch": 2.7312775330396475,
457
+ "grad_norm": 1.2105042934417725,
458
+ "learning_rate": 0.00024546255506607926,
459
+ "loss": 0.6589,
460
+ "step": 620
461
+ },
462
+ {
463
+ "epoch": 2.7753303964757707,
464
+ "grad_norm": 1.1160320043563843,
465
+ "learning_rate": 0.0002445814977973568,
466
+ "loss": 0.5242,
467
+ "step": 630
468
+ },
469
+ {
470
+ "epoch": 2.8193832599118944,
471
+ "grad_norm": 1.1934391260147095,
472
+ "learning_rate": 0.00024370044052863436,
473
+ "loss": 0.6548,
474
+ "step": 640
475
+ },
476
+ {
477
+ "epoch": 2.8634361233480177,
478
+ "grad_norm": 1.1788102388381958,
479
+ "learning_rate": 0.0002428193832599119,
480
+ "loss": 0.5828,
481
+ "step": 650
482
+ },
483
+ {
484
+ "epoch": 2.907488986784141,
485
+ "grad_norm": 1.2889748811721802,
486
+ "learning_rate": 0.00024193832599118942,
487
+ "loss": 0.6468,
488
+ "step": 660
489
+ },
490
+ {
491
+ "epoch": 2.951541850220264,
492
+ "grad_norm": 0.8837119340896606,
493
+ "learning_rate": 0.00024105726872246695,
494
+ "loss": 0.6694,
495
+ "step": 670
496
+ },
497
+ {
498
+ "epoch": 2.995594713656388,
499
+ "grad_norm": 1.0216575860977173,
500
+ "learning_rate": 0.00024017621145374447,
501
+ "loss": 0.633,
502
+ "step": 680
503
+ },
504
+ {
505
+ "epoch": 3.0,
506
+ "eval_loss": 0.592506468296051,
507
+ "eval_runtime": 5.3852,
508
+ "eval_samples_per_second": 37.51,
509
+ "eval_steps_per_second": 4.828,
510
+ "step": 681
511
+ },
512
+ {
513
+ "epoch": 3.039647577092511,
514
+ "grad_norm": 1.58785080909729,
515
+ "learning_rate": 0.00023929515418502203,
516
+ "loss": 0.4512,
517
+ "step": 690
518
+ },
519
+ {
520
+ "epoch": 3.0837004405286343,
521
+ "grad_norm": 1.0036600828170776,
522
+ "learning_rate": 0.00023841409691629955,
523
+ "loss": 0.9613,
524
+ "step": 700
525
+ },
526
+ {
527
+ "epoch": 3.1277533039647576,
528
+ "grad_norm": 0.9956134557723999,
529
+ "learning_rate": 0.00023753303964757708,
530
+ "loss": 0.479,
531
+ "step": 710
532
+ },
533
+ {
534
+ "epoch": 3.171806167400881,
535
+ "grad_norm": 1.1154946088790894,
536
+ "learning_rate": 0.0002366519823788546,
537
+ "loss": 0.5444,
538
+ "step": 720
539
+ },
540
+ {
541
+ "epoch": 3.2158590308370045,
542
+ "grad_norm": 1.3544610738754272,
543
+ "learning_rate": 0.00023577092511013214,
544
+ "loss": 0.5163,
545
+ "step": 730
546
+ },
547
+ {
548
+ "epoch": 3.2599118942731278,
549
+ "grad_norm": 0.7720727920532227,
550
+ "learning_rate": 0.0002348898678414097,
551
+ "loss": 0.5317,
552
+ "step": 740
553
+ },
554
+ {
555
+ "epoch": 3.303964757709251,
556
+ "grad_norm": 0.9804306030273438,
557
+ "learning_rate": 0.00023400881057268722,
558
+ "loss": 0.5179,
559
+ "step": 750
560
+ },
561
+ {
562
+ "epoch": 3.3480176211453743,
563
+ "grad_norm": 1.0230934619903564,
564
+ "learning_rate": 0.00023312775330396474,
565
+ "loss": 0.5261,
566
+ "step": 760
567
+ },
568
+ {
569
+ "epoch": 3.392070484581498,
570
+ "grad_norm": 0.8620821237564087,
571
+ "learning_rate": 0.00023224669603524227,
572
+ "loss": 0.4998,
573
+ "step": 770
574
+ },
575
+ {
576
+ "epoch": 3.436123348017621,
577
+ "grad_norm": 0.8884461522102356,
578
+ "learning_rate": 0.0002313656387665198,
579
+ "loss": 0.5527,
580
+ "step": 780
581
+ },
582
+ {
583
+ "epoch": 3.4801762114537445,
584
+ "grad_norm": 0.7721192836761475,
585
+ "learning_rate": 0.00023048458149779735,
586
+ "loss": 0.5279,
587
+ "step": 790
588
+ },
589
+ {
590
+ "epoch": 3.5242290748898677,
591
+ "grad_norm": 1.0769802331924438,
592
+ "learning_rate": 0.00022960352422907488,
593
+ "loss": 0.5851,
594
+ "step": 800
595
+ },
596
+ {
597
+ "epoch": 3.568281938325991,
598
+ "grad_norm": 1.3999199867248535,
599
+ "learning_rate": 0.0002287224669603524,
600
+ "loss": 0.44,
601
+ "step": 810
602
+ },
603
+ {
604
+ "epoch": 3.6123348017621146,
605
+ "grad_norm": 0.9963156580924988,
606
+ "learning_rate": 0.00022784140969162993,
607
+ "loss": 0.6028,
608
+ "step": 820
609
+ },
610
+ {
611
+ "epoch": 3.656387665198238,
612
+ "grad_norm": 0.9077759981155396,
613
+ "learning_rate": 0.00022696035242290746,
614
+ "loss": 0.6824,
615
+ "step": 830
616
+ },
617
+ {
618
+ "epoch": 3.700440528634361,
619
+ "grad_norm": 0.9758647680282593,
620
+ "learning_rate": 0.00022607929515418502,
621
+ "loss": 0.5424,
622
+ "step": 840
623
+ },
624
+ {
625
+ "epoch": 3.744493392070485,
626
+ "grad_norm": 0.9838646054267883,
627
+ "learning_rate": 0.00022519823788546254,
628
+ "loss": 0.5588,
629
+ "step": 850
630
+ },
631
+ {
632
+ "epoch": 3.788546255506608,
633
+ "grad_norm": 1.1924773454666138,
634
+ "learning_rate": 0.00022431718061674007,
635
+ "loss": 0.6215,
636
+ "step": 860
637
+ },
638
+ {
639
+ "epoch": 3.8325991189427313,
640
+ "grad_norm": 1.27988600730896,
641
+ "learning_rate": 0.0002234361233480176,
642
+ "loss": 0.5336,
643
+ "step": 870
644
+ },
645
+ {
646
+ "epoch": 3.8766519823788546,
647
+ "grad_norm": 1.0098719596862793,
648
+ "learning_rate": 0.00022255506607929512,
649
+ "loss": 0.6623,
650
+ "step": 880
651
+ },
652
+ {
653
+ "epoch": 3.920704845814978,
654
+ "grad_norm": 1.301437497138977,
655
+ "learning_rate": 0.00022167400881057268,
656
+ "loss": 0.4837,
657
+ "step": 890
658
+ },
659
+ {
660
+ "epoch": 3.964757709251101,
661
+ "grad_norm": 1.3062794208526611,
662
+ "learning_rate": 0.0002207929515418502,
663
+ "loss": 0.4399,
664
+ "step": 900
665
+ },
666
+ {
667
+ "epoch": 4.0,
668
+ "eval_loss": 0.5791140198707581,
669
+ "eval_runtime": 5.2768,
670
+ "eval_samples_per_second": 38.281,
671
+ "eval_steps_per_second": 4.927,
672
+ "step": 908
673
+ },
674
+ {
675
+ "epoch": 4.008810572687224,
676
+ "grad_norm": 1.2243441343307495,
677
+ "learning_rate": 0.00021991189427312773,
678
+ "loss": 0.5225,
679
+ "step": 910
680
+ },
681
+ {
682
+ "epoch": 4.052863436123348,
683
+ "grad_norm": 1.0874862670898438,
684
+ "learning_rate": 0.00021903083700440526,
685
+ "loss": 0.5878,
686
+ "step": 920
687
+ },
688
+ {
689
+ "epoch": 4.096916299559472,
690
+ "grad_norm": 1.1561787128448486,
691
+ "learning_rate": 0.0002181497797356828,
692
+ "loss": 0.4172,
693
+ "step": 930
694
+ },
695
+ {
696
+ "epoch": 4.140969162995595,
697
+ "grad_norm": 0.9504215121269226,
698
+ "learning_rate": 0.00021726872246696034,
699
+ "loss": 0.454,
700
+ "step": 940
701
+ },
702
+ {
703
+ "epoch": 4.185022026431718,
704
+ "grad_norm": 1.0901755094528198,
705
+ "learning_rate": 0.00021638766519823787,
706
+ "loss": 0.5222,
707
+ "step": 950
708
+ },
709
+ {
710
+ "epoch": 4.229074889867841,
711
+ "grad_norm": 0.7518570423126221,
712
+ "learning_rate": 0.0002155066079295154,
713
+ "loss": 0.4048,
714
+ "step": 960
715
+ },
716
+ {
717
+ "epoch": 4.273127753303965,
718
+ "grad_norm": 0.9933887720108032,
719
+ "learning_rate": 0.00021462555066079292,
720
+ "loss": 0.4545,
721
+ "step": 970
722
+ },
723
+ {
724
+ "epoch": 4.317180616740088,
725
+ "grad_norm": 0.8956694006919861,
726
+ "learning_rate": 0.00021374449339207048,
727
+ "loss": 0.5703,
728
+ "step": 980
729
+ },
730
+ {
731
+ "epoch": 4.361233480176211,
732
+ "grad_norm": 1.0768828392028809,
733
+ "learning_rate": 0.000212863436123348,
734
+ "loss": 0.411,
735
+ "step": 990
736
+ },
737
+ {
738
+ "epoch": 4.405286343612334,
739
+ "grad_norm": 1.3219349384307861,
740
+ "learning_rate": 0.00021198237885462553,
741
+ "loss": 0.5096,
742
+ "step": 1000
743
+ },
744
+ {
745
+ "epoch": 4.4493392070484585,
746
+ "grad_norm": 0.6028145551681519,
747
+ "learning_rate": 0.00021110132158590306,
748
+ "loss": 0.5427,
749
+ "step": 1010
750
+ },
751
+ {
752
+ "epoch": 4.493392070484582,
753
+ "grad_norm": 0.6015641689300537,
754
+ "learning_rate": 0.00021022026431718059,
755
+ "loss": 0.4855,
756
+ "step": 1020
757
+ },
758
+ {
759
+ "epoch": 4.537444933920705,
760
+ "grad_norm": 0.7184689044952393,
761
+ "learning_rate": 0.00020933920704845814,
762
+ "loss": 0.4893,
763
+ "step": 1030
764
+ },
765
+ {
766
+ "epoch": 4.581497797356828,
767
+ "grad_norm": 1.445830225944519,
768
+ "learning_rate": 0.00020845814977973567,
769
+ "loss": 0.4412,
770
+ "step": 1040
771
+ },
772
+ {
773
+ "epoch": 4.6255506607929515,
774
+ "grad_norm": 0.9506711959838867,
775
+ "learning_rate": 0.0002075770925110132,
776
+ "loss": 0.5596,
777
+ "step": 1050
778
+ },
779
+ {
780
+ "epoch": 4.669603524229075,
781
+ "grad_norm": 0.9642265439033508,
782
+ "learning_rate": 0.00020669603524229072,
783
+ "loss": 0.3944,
784
+ "step": 1060
785
+ },
786
+ {
787
+ "epoch": 4.713656387665198,
788
+ "grad_norm": 0.9548330307006836,
789
+ "learning_rate": 0.00020581497797356825,
790
+ "loss": 0.4925,
791
+ "step": 1070
792
+ },
793
+ {
794
+ "epoch": 4.757709251101321,
795
+ "grad_norm": 1.5850030183792114,
796
+ "learning_rate": 0.0002049339207048458,
797
+ "loss": 0.5114,
798
+ "step": 1080
799
+ },
800
+ {
801
+ "epoch": 4.8017621145374445,
802
+ "grad_norm": 0.7429970502853394,
803
+ "learning_rate": 0.00020405286343612333,
804
+ "loss": 0.5556,
805
+ "step": 1090
806
+ },
807
+ {
808
+ "epoch": 4.845814977973569,
809
+ "grad_norm": 0.9865929484367371,
810
+ "learning_rate": 0.00020317180616740086,
811
+ "loss": 0.4612,
812
+ "step": 1100
813
+ },
814
+ {
815
+ "epoch": 4.889867841409692,
816
+ "grad_norm": 0.8113177418708801,
817
+ "learning_rate": 0.00020229074889867838,
818
+ "loss": 0.5196,
819
+ "step": 1110
820
+ },
821
+ {
822
+ "epoch": 4.933920704845815,
823
+ "grad_norm": 1.1767125129699707,
824
+ "learning_rate": 0.0002014096916299559,
825
+ "loss": 0.5321,
826
+ "step": 1120
827
+ },
828
+ {
829
+ "epoch": 4.977973568281938,
830
+ "grad_norm": 0.8367587327957153,
831
+ "learning_rate": 0.00020052863436123347,
832
+ "loss": 0.506,
833
+ "step": 1130
834
+ },
835
+ {
836
+ "epoch": 5.0,
837
+ "eval_loss": 0.5730367302894592,
838
+ "eval_runtime": 6.1337,
839
+ "eval_samples_per_second": 32.933,
840
+ "eval_steps_per_second": 4.239,
841
+ "step": 1135
842
+ },
843
+ {
844
+ "epoch": 5.022026431718062,
845
+ "grad_norm": 1.1242823600769043,
846
+ "learning_rate": 0.000199647577092511,
847
+ "loss": 0.4678,
848
+ "step": 1140
849
+ },
850
+ {
851
+ "epoch": 5.066079295154185,
852
+ "grad_norm": 1.0385881662368774,
853
+ "learning_rate": 0.00019876651982378852,
854
+ "loss": 0.3968,
855
+ "step": 1150
856
+ },
857
+ {
858
+ "epoch": 5.110132158590308,
859
+ "grad_norm": 0.9282165765762329,
860
+ "learning_rate": 0.00019788546255506605,
861
+ "loss": 0.5089,
862
+ "step": 1160
863
+ },
864
+ {
865
+ "epoch": 5.154185022026431,
866
+ "grad_norm": 1.401548147201538,
867
+ "learning_rate": 0.00019700440528634357,
868
+ "loss": 0.4457,
869
+ "step": 1170
870
+ },
871
+ {
872
+ "epoch": 5.1982378854625555,
873
+ "grad_norm": 0.6676862835884094,
874
+ "learning_rate": 0.00019612334801762113,
875
+ "loss": 0.3175,
876
+ "step": 1180
877
+ },
878
+ {
879
+ "epoch": 5.242290748898679,
880
+ "grad_norm": 1.1318411827087402,
881
+ "learning_rate": 0.00019524229074889866,
882
+ "loss": 0.4468,
883
+ "step": 1190
884
+ },
885
+ {
886
+ "epoch": 5.286343612334802,
887
+ "grad_norm": 0.706200361251831,
888
+ "learning_rate": 0.00019436123348017618,
889
+ "loss": 0.3954,
890
+ "step": 1200
891
+ },
892
+ {
893
+ "epoch": 5.330396475770925,
894
+ "grad_norm": 0.6558952927589417,
895
+ "learning_rate": 0.0001934801762114537,
896
+ "loss": 0.4318,
897
+ "step": 1210
898
+ },
899
+ {
900
+ "epoch": 5.3744493392070485,
901
+ "grad_norm": 0.59174644947052,
902
+ "learning_rate": 0.00019259911894273124,
903
+ "loss": 0.3962,
904
+ "step": 1220
905
+ },
906
+ {
907
+ "epoch": 5.418502202643172,
908
+ "grad_norm": 0.9306423664093018,
909
+ "learning_rate": 0.0001917180616740088,
910
+ "loss": 0.4161,
911
+ "step": 1230
912
+ },
913
+ {
914
+ "epoch": 5.462555066079295,
915
+ "grad_norm": 1.2412904500961304,
916
+ "learning_rate": 0.00019083700440528632,
917
+ "loss": 0.4259,
918
+ "step": 1240
919
+ },
920
+ {
921
+ "epoch": 5.506607929515418,
922
+ "grad_norm": 0.8949795961380005,
923
+ "learning_rate": 0.00018995594713656385,
924
+ "loss": 0.5512,
925
+ "step": 1250
926
+ },
927
+ {
928
+ "epoch": 5.5506607929515415,
929
+ "grad_norm": 0.9977787733078003,
930
+ "learning_rate": 0.00018907488986784137,
931
+ "loss": 0.4497,
932
+ "step": 1260
933
+ },
934
+ {
935
+ "epoch": 5.594713656387665,
936
+ "grad_norm": 1.0676085948944092,
937
+ "learning_rate": 0.0001881938325991189,
938
+ "loss": 0.4344,
939
+ "step": 1270
940
+ },
941
+ {
942
+ "epoch": 5.638766519823789,
943
+ "grad_norm": 0.6446275115013123,
944
+ "learning_rate": 0.00018731277533039648,
945
+ "loss": 0.4184,
946
+ "step": 1280
947
+ },
948
+ {
949
+ "epoch": 5.682819383259912,
950
+ "grad_norm": 1.3255438804626465,
951
+ "learning_rate": 0.000186431718061674,
952
+ "loss": 0.5441,
953
+ "step": 1290
954
+ },
955
+ {
956
+ "epoch": 5.726872246696035,
957
+ "grad_norm": 0.823581337928772,
958
+ "learning_rate": 0.0001855506607929515,
959
+ "loss": 0.5028,
960
+ "step": 1300
961
+ },
962
+ {
963
+ "epoch": 5.770925110132159,
964
+ "grad_norm": 1.0471981763839722,
965
+ "learning_rate": 0.00018466960352422904,
966
+ "loss": 0.4407,
967
+ "step": 1310
968
+ },
969
+ {
970
+ "epoch": 5.814977973568282,
971
+ "grad_norm": 1.0394315719604492,
972
+ "learning_rate": 0.00018378854625550662,
973
+ "loss": 0.4788,
974
+ "step": 1320
975
+ },
976
+ {
977
+ "epoch": 5.859030837004405,
978
+ "grad_norm": 1.4738258123397827,
979
+ "learning_rate": 0.00018290748898678414,
980
+ "loss": 0.5495,
981
+ "step": 1330
982
+ },
983
+ {
984
+ "epoch": 5.903083700440528,
985
+ "grad_norm": 1.2812182903289795,
986
+ "learning_rate": 0.00018202643171806167,
987
+ "loss": 0.4335,
988
+ "step": 1340
989
+ },
990
+ {
991
+ "epoch": 5.9471365638766525,
992
+ "grad_norm": 1.4929533004760742,
993
+ "learning_rate": 0.0001811453744493392,
994
+ "loss": 0.5097,
995
+ "step": 1350
996
+ },
997
+ {
998
+ "epoch": 5.991189427312776,
999
+ "grad_norm": 1.2788587808609009,
1000
+ "learning_rate": 0.00018026431718061673,
1001
+ "loss": 0.4702,
1002
+ "step": 1360
1003
+ },
1004
+ {
1005
+ "epoch": 6.0,
1006
+ "eval_loss": 0.5740869045257568,
1007
+ "eval_runtime": 4.9653,
1008
+ "eval_samples_per_second": 40.682,
1009
+ "eval_steps_per_second": 5.236,
1010
+ "step": 1362
1011
+ },
1012
+ {
1013
+ "epoch": 6.035242290748899,
1014
+ "grad_norm": 0.9543855786323547,
1015
+ "learning_rate": 0.00017938325991189428,
1016
+ "loss": 0.4232,
1017
+ "step": 1370
1018
+ },
1019
+ {
1020
+ "epoch": 6.079295154185022,
1021
+ "grad_norm": 1.0528812408447266,
1022
+ "learning_rate": 0.0001785022026431718,
1023
+ "loss": 0.4025,
1024
+ "step": 1380
1025
+ },
1026
+ {
1027
+ "epoch": 6.1233480176211454,
1028
+ "grad_norm": 0.9573265910148621,
1029
+ "learning_rate": 0.00017762114537444933,
1030
+ "loss": 0.4127,
1031
+ "step": 1390
1032
+ },
1033
+ {
1034
+ "epoch": 6.167400881057269,
1035
+ "grad_norm": 1.7806532382965088,
1036
+ "learning_rate": 0.00017674008810572686,
1037
+ "loss": 0.4646,
1038
+ "step": 1400
1039
+ },
1040
+ {
1041
+ "epoch": 6.211453744493392,
1042
+ "grad_norm": 1.0559179782867432,
1043
+ "learning_rate": 0.0001758590308370044,
1044
+ "loss": 0.3222,
1045
+ "step": 1410
1046
+ },
1047
+ {
1048
+ "epoch": 6.255506607929515,
1049
+ "grad_norm": 0.9502829313278198,
1050
+ "learning_rate": 0.00017497797356828194,
1051
+ "loss": 0.4697,
1052
+ "step": 1420
1053
+ },
1054
+ {
1055
+ "epoch": 6.299559471365638,
1056
+ "grad_norm": 0.6869007349014282,
1057
+ "learning_rate": 0.00017409691629955947,
1058
+ "loss": 0.4155,
1059
+ "step": 1430
1060
+ },
1061
+ {
1062
+ "epoch": 6.343612334801762,
1063
+ "grad_norm": 0.6793345808982849,
1064
+ "learning_rate": 0.000173215859030837,
1065
+ "loss": 0.4236,
1066
+ "step": 1440
1067
+ },
1068
+ {
1069
+ "epoch": 6.387665198237886,
1070
+ "grad_norm": 1.067975640296936,
1071
+ "learning_rate": 0.00017233480176211452,
1072
+ "loss": 0.3558,
1073
+ "step": 1450
1074
+ },
1075
+ {
1076
+ "epoch": 6.431718061674009,
1077
+ "grad_norm": 1.0968421697616577,
1078
+ "learning_rate": 0.00017145374449339205,
1079
+ "loss": 0.4453,
1080
+ "step": 1460
1081
+ },
1082
+ {
1083
+ "epoch": 6.475770925110132,
1084
+ "grad_norm": 1.1832313537597656,
1085
+ "learning_rate": 0.0001705726872246696,
1086
+ "loss": 0.5115,
1087
+ "step": 1470
1088
+ },
1089
+ {
1090
+ "epoch": 6.5198237885462555,
1091
+ "grad_norm": 0.9857836365699768,
1092
+ "learning_rate": 0.00016969162995594713,
1093
+ "loss": 0.4274,
1094
+ "step": 1480
1095
+ },
1096
+ {
1097
+ "epoch": 6.563876651982379,
1098
+ "grad_norm": 0.9006336331367493,
1099
+ "learning_rate": 0.00016881057268722466,
1100
+ "loss": 0.3865,
1101
+ "step": 1490
1102
+ },
1103
+ {
1104
+ "epoch": 6.607929515418502,
1105
+ "grad_norm": 1.1091986894607544,
1106
+ "learning_rate": 0.0001679295154185022,
1107
+ "loss": 0.3988,
1108
+ "step": 1500
1109
+ },
1110
+ {
1111
+ "epoch": 6.651982378854625,
1112
+ "grad_norm": 1.423886775970459,
1113
+ "learning_rate": 0.00016704845814977971,
1114
+ "loss": 0.5057,
1115
+ "step": 1510
1116
+ },
1117
+ {
1118
+ "epoch": 6.6960352422907485,
1119
+ "grad_norm": 0.9245197176933289,
1120
+ "learning_rate": 0.00016616740088105727,
1121
+ "loss": 0.3966,
1122
+ "step": 1520
1123
+ },
1124
+ {
1125
+ "epoch": 6.740088105726873,
1126
+ "grad_norm": 0.944870114326477,
1127
+ "learning_rate": 0.0001652863436123348,
1128
+ "loss": 0.4521,
1129
+ "step": 1530
1130
+ },
1131
+ {
1132
+ "epoch": 6.784140969162996,
1133
+ "grad_norm": 0.8870773315429688,
1134
+ "learning_rate": 0.00016440528634361232,
1135
+ "loss": 0.4425,
1136
+ "step": 1540
1137
+ },
1138
+ {
1139
+ "epoch": 6.828193832599119,
1140
+ "grad_norm": 0.7404115200042725,
1141
+ "learning_rate": 0.00016352422907488985,
1142
+ "loss": 0.3207,
1143
+ "step": 1550
1144
+ },
1145
+ {
1146
+ "epoch": 6.872246696035242,
1147
+ "grad_norm": 0.9958137273788452,
1148
+ "learning_rate": 0.00016264317180616738,
1149
+ "loss": 0.4244,
1150
+ "step": 1560
1151
+ },
1152
+ {
1153
+ "epoch": 6.916299559471366,
1154
+ "grad_norm": 1.0651079416275024,
1155
+ "learning_rate": 0.00016176211453744493,
1156
+ "loss": 0.4075,
1157
+ "step": 1570
1158
+ },
1159
+ {
1160
+ "epoch": 6.960352422907489,
1161
+ "grad_norm": 0.9528789520263672,
1162
+ "learning_rate": 0.00016088105726872246,
1163
+ "loss": 0.493,
1164
+ "step": 1580
1165
+ },
1166
+ {
1167
+ "epoch": 7.0,
1168
+ "eval_loss": 0.5702072381973267,
1169
+ "eval_runtime": 5.1787,
1170
+ "eval_samples_per_second": 39.006,
1171
+ "eval_steps_per_second": 5.021,
1172
+ "step": 1589
1173
+ },
1174
+ {
1175
+ "epoch": 7.004405286343612,
1176
+ "grad_norm": 1.0486853122711182,
1177
+ "learning_rate": 0.00015999999999999999,
1178
+ "loss": 0.4131,
1179
+ "step": 1590
1180
+ },
1181
+ {
1182
+ "epoch": 7.048458149779735,
1183
+ "grad_norm": 1.2176262140274048,
1184
+ "learning_rate": 0.0001591189427312775,
1185
+ "loss": 0.4417,
1186
+ "step": 1600
1187
+ },
1188
+ {
1189
+ "epoch": 7.092511013215859,
1190
+ "grad_norm": 1.187107801437378,
1191
+ "learning_rate": 0.00015823788546255504,
1192
+ "loss": 0.4372,
1193
+ "step": 1610
1194
+ },
1195
+ {
1196
+ "epoch": 7.136563876651983,
1197
+ "grad_norm": 0.9459372758865356,
1198
+ "learning_rate": 0.0001573568281938326,
1199
+ "loss": 0.356,
1200
+ "step": 1620
1201
+ },
1202
+ {
1203
+ "epoch": 7.180616740088106,
1204
+ "grad_norm": 0.8114103078842163,
1205
+ "learning_rate": 0.00015647577092511012,
1206
+ "loss": 0.308,
1207
+ "step": 1630
1208
+ },
1209
+ {
1210
+ "epoch": 7.224669603524229,
1211
+ "grad_norm": 1.035370945930481,
1212
+ "learning_rate": 0.00015559471365638765,
1213
+ "loss": 0.3738,
1214
+ "step": 1640
1215
+ },
1216
+ {
1217
+ "epoch": 7.2687224669603525,
1218
+ "grad_norm": 1.0260848999023438,
1219
+ "learning_rate": 0.00015471365638766518,
1220
+ "loss": 0.342,
1221
+ "step": 1650
1222
+ },
1223
+ {
1224
+ "epoch": 7.312775330396476,
1225
+ "grad_norm": 0.8079932928085327,
1226
+ "learning_rate": 0.00015383259911894273,
1227
+ "loss": 0.4381,
1228
+ "step": 1660
1229
+ },
1230
+ {
1231
+ "epoch": 7.356828193832599,
1232
+ "grad_norm": 1.318695068359375,
1233
+ "learning_rate": 0.00015295154185022026,
1234
+ "loss": 0.3685,
1235
+ "step": 1670
1236
+ },
1237
+ {
1238
+ "epoch": 7.400881057268722,
1239
+ "grad_norm": 1.3181859254837036,
1240
+ "learning_rate": 0.00015207048458149778,
1241
+ "loss": 0.3465,
1242
+ "step": 1680
1243
+ },
1244
+ {
1245
+ "epoch": 7.4449339207048455,
1246
+ "grad_norm": 1.0277948379516602,
1247
+ "learning_rate": 0.0001511894273127753,
1248
+ "loss": 0.3659,
1249
+ "step": 1690
1250
+ },
1251
+ {
1252
+ "epoch": 7.48898678414097,
1253
+ "grad_norm": 1.1619762182235718,
1254
+ "learning_rate": 0.00015030837004405284,
1255
+ "loss": 0.4304,
1256
+ "step": 1700
1257
+ },
1258
+ {
1259
+ "epoch": 7.533039647577093,
1260
+ "grad_norm": 1.2854048013687134,
1261
+ "learning_rate": 0.0001494273127753304,
1262
+ "loss": 0.4372,
1263
+ "step": 1710
1264
+ },
1265
+ {
1266
+ "epoch": 7.577092511013216,
1267
+ "grad_norm": 1.032459020614624,
1268
+ "learning_rate": 0.00014854625550660792,
1269
+ "loss": 0.3687,
1270
+ "step": 1720
1271
+ },
1272
+ {
1273
+ "epoch": 7.621145374449339,
1274
+ "grad_norm": 0.9430228471755981,
1275
+ "learning_rate": 0.00014766519823788545,
1276
+ "loss": 0.3967,
1277
+ "step": 1730
1278
+ },
1279
+ {
1280
+ "epoch": 7.665198237885463,
1281
+ "grad_norm": 1.2012503147125244,
1282
+ "learning_rate": 0.00014678414096916297,
1283
+ "loss": 0.4028,
1284
+ "step": 1740
1285
+ },
1286
+ {
1287
+ "epoch": 7.709251101321586,
1288
+ "grad_norm": 0.9703013896942139,
1289
+ "learning_rate": 0.00014590308370044053,
1290
+ "loss": 0.4037,
1291
+ "step": 1750
1292
+ },
1293
+ {
1294
+ "epoch": 7.753303964757709,
1295
+ "grad_norm": 1.2811229228973389,
1296
+ "learning_rate": 0.00014502202643171806,
1297
+ "loss": 0.3725,
1298
+ "step": 1760
1299
+ },
1300
+ {
1301
+ "epoch": 7.797356828193832,
1302
+ "grad_norm": 0.9879553914070129,
1303
+ "learning_rate": 0.00014414096916299558,
1304
+ "loss": 0.4385,
1305
+ "step": 1770
1306
+ },
1307
+ {
1308
+ "epoch": 7.841409691629956,
1309
+ "grad_norm": 1.4015151262283325,
1310
+ "learning_rate": 0.0001432599118942731,
1311
+ "loss": 0.4046,
1312
+ "step": 1780
1313
+ },
1314
+ {
1315
+ "epoch": 7.885462555066079,
1316
+ "grad_norm": 0.9369928240776062,
1317
+ "learning_rate": 0.00014237885462555064,
1318
+ "loss": 0.4232,
1319
+ "step": 1790
1320
+ },
1321
+ {
1322
+ "epoch": 7.929515418502203,
1323
+ "grad_norm": 0.7787442803382874,
1324
+ "learning_rate": 0.0001414977973568282,
1325
+ "loss": 0.3679,
1326
+ "step": 1800
1327
+ },
1328
+ {
1329
+ "epoch": 7.973568281938326,
1330
+ "grad_norm": 0.7212619781494141,
1331
+ "learning_rate": 0.00014061674008810572,
1332
+ "loss": 0.4299,
1333
+ "step": 1810
1334
+ },
1335
+ {
1336
+ "epoch": 8.0,
1337
+ "eval_loss": 0.57987380027771,
1338
+ "eval_runtime": 4.8534,
1339
+ "eval_samples_per_second": 41.62,
1340
+ "eval_steps_per_second": 5.357,
1341
+ "step": 1816
1342
+ },
1343
+ {
1344
+ "epoch": 8.017621145374449,
1345
+ "grad_norm": 1.1815301179885864,
1346
+ "learning_rate": 0.00013973568281938325,
1347
+ "loss": 0.4058,
1348
+ "step": 1820
1349
+ },
1350
+ {
1351
+ "epoch": 8.061674008810572,
1352
+ "grad_norm": 0.7913572192192078,
1353
+ "learning_rate": 0.00013885462555066077,
1354
+ "loss": 0.2876,
1355
+ "step": 1830
1356
+ },
1357
+ {
1358
+ "epoch": 8.105726872246697,
1359
+ "grad_norm": 0.9591747522354126,
1360
+ "learning_rate": 0.0001379735682819383,
1361
+ "loss": 0.2801,
1362
+ "step": 1840
1363
+ },
1364
+ {
1365
+ "epoch": 8.14977973568282,
1366
+ "grad_norm": 1.2883862257003784,
1367
+ "learning_rate": 0.00013709251101321585,
1368
+ "loss": 0.3435,
1369
+ "step": 1850
1370
+ },
1371
+ {
1372
+ "epoch": 8.193832599118943,
1373
+ "grad_norm": 1.2138097286224365,
1374
+ "learning_rate": 0.00013621145374449338,
1375
+ "loss": 0.4603,
1376
+ "step": 1860
1377
+ },
1378
+ {
1379
+ "epoch": 8.237885462555067,
1380
+ "grad_norm": 0.9017927050590515,
1381
+ "learning_rate": 0.0001353303964757709,
1382
+ "loss": 0.328,
1383
+ "step": 1870
1384
+ },
1385
+ {
1386
+ "epoch": 8.28193832599119,
1387
+ "grad_norm": 1.0213032960891724,
1388
+ "learning_rate": 0.00013444933920704844,
1389
+ "loss": 0.4241,
1390
+ "step": 1880
1391
+ },
1392
+ {
1393
+ "epoch": 8.325991189427313,
1394
+ "grad_norm": 0.782507598400116,
1395
+ "learning_rate": 0.00013356828193832596,
1396
+ "loss": 0.287,
1397
+ "step": 1890
1398
+ },
1399
+ {
1400
+ "epoch": 8.370044052863436,
1401
+ "grad_norm": 0.8239027261734009,
1402
+ "learning_rate": 0.00013268722466960352,
1403
+ "loss": 0.3471,
1404
+ "step": 1900
1405
+ },
1406
+ {
1407
+ "epoch": 8.41409691629956,
1408
+ "grad_norm": 0.9952473044395447,
1409
+ "learning_rate": 0.00013180616740088104,
1410
+ "loss": 0.325,
1411
+ "step": 1910
1412
+ },
1413
+ {
1414
+ "epoch": 8.458149779735683,
1415
+ "grad_norm": 0.7988440990447998,
1416
+ "learning_rate": 0.00013092511013215857,
1417
+ "loss": 0.3397,
1418
+ "step": 1920
1419
+ },
1420
+ {
1421
+ "epoch": 8.502202643171806,
1422
+ "grad_norm": 1.2881464958190918,
1423
+ "learning_rate": 0.0001300440528634361,
1424
+ "loss": 0.4655,
1425
+ "step": 1930
1426
+ },
1427
+ {
1428
+ "epoch": 8.54625550660793,
1429
+ "grad_norm": 0.9545268416404724,
1430
+ "learning_rate": 0.00012916299559471365,
1431
+ "loss": 0.4031,
1432
+ "step": 1940
1433
+ },
1434
+ {
1435
+ "epoch": 8.590308370044053,
1436
+ "grad_norm": 1.550424337387085,
1437
+ "learning_rate": 0.00012828193832599118,
1438
+ "loss": 0.3697,
1439
+ "step": 1950
1440
+ },
1441
+ {
1442
+ "epoch": 8.634361233480176,
1443
+ "grad_norm": 1.2041224241256714,
1444
+ "learning_rate": 0.0001274008810572687,
1445
+ "loss": 0.43,
1446
+ "step": 1960
1447
+ },
1448
+ {
1449
+ "epoch": 8.678414096916299,
1450
+ "grad_norm": 0.8280724287033081,
1451
+ "learning_rate": 0.00012651982378854626,
1452
+ "loss": 0.4045,
1453
+ "step": 1970
1454
+ },
1455
+ {
1456
+ "epoch": 8.722466960352422,
1457
+ "grad_norm": 0.8164283037185669,
1458
+ "learning_rate": 0.00012563876651982376,
1459
+ "loss": 0.4001,
1460
+ "step": 1980
1461
+ },
1462
+ {
1463
+ "epoch": 8.766519823788546,
1464
+ "grad_norm": 0.9470929503440857,
1465
+ "learning_rate": 0.00012475770925110132,
1466
+ "loss": 0.3767,
1467
+ "step": 1990
1468
+ },
1469
+ {
1470
+ "epoch": 8.810572687224669,
1471
+ "grad_norm": 0.7390472888946533,
1472
+ "learning_rate": 0.00012387665198237884,
1473
+ "loss": 0.4206,
1474
+ "step": 2000
1475
+ },
1476
+ {
1477
+ "epoch": 8.854625550660792,
1478
+ "grad_norm": 0.8382723927497864,
1479
+ "learning_rate": 0.00012299559471365637,
1480
+ "loss": 0.3061,
1481
+ "step": 2010
1482
+ },
1483
+ {
1484
+ "epoch": 8.898678414096917,
1485
+ "grad_norm": 1.060539722442627,
1486
+ "learning_rate": 0.00012211453744493392,
1487
+ "loss": 0.4921,
1488
+ "step": 2020
1489
+ },
1490
+ {
1491
+ "epoch": 8.94273127753304,
1492
+ "grad_norm": 0.6955994367599487,
1493
+ "learning_rate": 0.00012123348017621144,
1494
+ "loss": 0.4077,
1495
+ "step": 2030
1496
+ },
1497
+ {
1498
+ "epoch": 8.986784140969164,
1499
+ "grad_norm": 0.8158656358718872,
1500
+ "learning_rate": 0.00012035242290748898,
1501
+ "loss": 0.3759,
1502
+ "step": 2040
1503
+ },
1504
+ {
1505
+ "epoch": 9.0,
1506
+ "eval_loss": 0.582844614982605,
1507
+ "eval_runtime": 4.8405,
1508
+ "eval_samples_per_second": 41.732,
1509
+ "eval_steps_per_second": 5.371,
1510
+ "step": 2043
1511
+ },
1512
+ {
1513
+ "epoch": 9.030837004405287,
1514
+ "grad_norm": 0.9192315936088562,
1515
+ "learning_rate": 0.0001194713656387665,
1516
+ "loss": 0.3809,
1517
+ "step": 2050
1518
+ },
1519
+ {
1520
+ "epoch": 9.07488986784141,
1521
+ "grad_norm": 1.0536017417907715,
1522
+ "learning_rate": 0.00011859030837004403,
1523
+ "loss": 0.3321,
1524
+ "step": 2060
1525
+ },
1526
+ {
1527
+ "epoch": 9.118942731277533,
1528
+ "grad_norm": 1.1080108880996704,
1529
+ "learning_rate": 0.00011770925110132157,
1530
+ "loss": 0.407,
1531
+ "step": 2070
1532
+ },
1533
+ {
1534
+ "epoch": 9.162995594713657,
1535
+ "grad_norm": 0.9956775903701782,
1536
+ "learning_rate": 0.0001168281938325991,
1537
+ "loss": 0.3423,
1538
+ "step": 2080
1539
+ },
1540
+ {
1541
+ "epoch": 9.20704845814978,
1542
+ "grad_norm": 0.746013343334198,
1543
+ "learning_rate": 0.00011594713656387664,
1544
+ "loss": 0.3794,
1545
+ "step": 2090
1546
+ },
1547
+ {
1548
+ "epoch": 9.251101321585903,
1549
+ "grad_norm": 1.126372218132019,
1550
+ "learning_rate": 0.00011506607929515417,
1551
+ "loss": 0.4121,
1552
+ "step": 2100
1553
+ },
1554
+ {
1555
+ "epoch": 9.295154185022026,
1556
+ "grad_norm": 1.4978642463684082,
1557
+ "learning_rate": 0.00011418502202643172,
1558
+ "loss": 0.3358,
1559
+ "step": 2110
1560
+ },
1561
+ {
1562
+ "epoch": 9.33920704845815,
1563
+ "grad_norm": 0.7826859951019287,
1564
+ "learning_rate": 0.00011330396475770924,
1565
+ "loss": 0.2931,
1566
+ "step": 2120
1567
+ },
1568
+ {
1569
+ "epoch": 9.383259911894273,
1570
+ "grad_norm": 1.1644082069396973,
1571
+ "learning_rate": 0.00011242290748898676,
1572
+ "loss": 0.377,
1573
+ "step": 2130
1574
+ },
1575
+ {
1576
+ "epoch": 9.427312775330396,
1577
+ "grad_norm": 0.8106231093406677,
1578
+ "learning_rate": 0.00011154185022026432,
1579
+ "loss": 0.3562,
1580
+ "step": 2140
1581
+ },
1582
+ {
1583
+ "epoch": 9.47136563876652,
1584
+ "grad_norm": 1.162919282913208,
1585
+ "learning_rate": 0.00011066079295154183,
1586
+ "loss": 0.3441,
1587
+ "step": 2150
1588
+ },
1589
+ {
1590
+ "epoch": 9.515418502202643,
1591
+ "grad_norm": 0.7184136509895325,
1592
+ "learning_rate": 0.00010977973568281939,
1593
+ "loss": 0.3254,
1594
+ "step": 2160
1595
+ },
1596
+ {
1597
+ "epoch": 9.559471365638766,
1598
+ "grad_norm": 0.9587578177452087,
1599
+ "learning_rate": 0.00010889867841409691,
1600
+ "loss": 0.3533,
1601
+ "step": 2170
1602
+ },
1603
+ {
1604
+ "epoch": 9.603524229074889,
1605
+ "grad_norm": 0.8703950643539429,
1606
+ "learning_rate": 0.00010801762114537444,
1607
+ "loss": 0.3366,
1608
+ "step": 2180
1609
+ },
1610
+ {
1611
+ "epoch": 9.647577092511014,
1612
+ "grad_norm": 0.7304671406745911,
1613
+ "learning_rate": 0.00010713656387665198,
1614
+ "loss": 0.3608,
1615
+ "step": 2190
1616
+ },
1617
+ {
1618
+ "epoch": 9.691629955947137,
1619
+ "grad_norm": 1.1611542701721191,
1620
+ "learning_rate": 0.00010625550660792951,
1621
+ "loss": 0.3353,
1622
+ "step": 2200
1623
+ },
1624
+ {
1625
+ "epoch": 9.73568281938326,
1626
+ "grad_norm": 0.7281723022460938,
1627
+ "learning_rate": 0.00010537444933920705,
1628
+ "loss": 0.3082,
1629
+ "step": 2210
1630
+ },
1631
+ {
1632
+ "epoch": 9.779735682819384,
1633
+ "grad_norm": 1.1435456275939941,
1634
+ "learning_rate": 0.00010449339207048458,
1635
+ "loss": 0.4317,
1636
+ "step": 2220
1637
+ },
1638
+ {
1639
+ "epoch": 9.823788546255507,
1640
+ "grad_norm": 0.9928381443023682,
1641
+ "learning_rate": 0.0001036123348017621,
1642
+ "loss": 0.3564,
1643
+ "step": 2230
1644
+ },
1645
+ {
1646
+ "epoch": 9.86784140969163,
1647
+ "grad_norm": 0.8395977020263672,
1648
+ "learning_rate": 0.00010273127753303964,
1649
+ "loss": 0.3531,
1650
+ "step": 2240
1651
+ },
1652
+ {
1653
+ "epoch": 9.911894273127754,
1654
+ "grad_norm": 1.0142395496368408,
1655
+ "learning_rate": 0.00010185022026431717,
1656
+ "loss": 0.3896,
1657
+ "step": 2250
1658
+ },
1659
+ {
1660
+ "epoch": 9.955947136563877,
1661
+ "grad_norm": 0.6916971802711487,
1662
+ "learning_rate": 0.00010096916299559471,
1663
+ "loss": 0.3667,
1664
+ "step": 2260
1665
+ },
1666
+ {
1667
+ "epoch": 10.0,
1668
+ "grad_norm": 0.7665943503379822,
1669
+ "learning_rate": 0.00010008810572687224,
1670
+ "loss": 0.3075,
1671
+ "step": 2270
1672
+ },
1673
+ {
1674
+ "epoch": 10.0,
1675
+ "eval_loss": 0.579430878162384,
1676
+ "eval_runtime": 4.7891,
1677
+ "eval_samples_per_second": 42.179,
1678
+ "eval_steps_per_second": 5.429,
1679
+ "step": 2270
1680
+ },
1681
+ {
1682
+ "epoch": 10.044052863436123,
1683
+ "grad_norm": 1.4675018787384033,
1684
+ "learning_rate": 9.920704845814978e-05,
1685
+ "loss": 0.3251,
1686
+ "step": 2280
1687
+ },
1688
+ {
1689
+ "epoch": 10.088105726872246,
1690
+ "grad_norm": 0.6954736709594727,
1691
+ "learning_rate": 9.83259911894273e-05,
1692
+ "loss": 0.3656,
1693
+ "step": 2290
1694
+ },
1695
+ {
1696
+ "epoch": 10.13215859030837,
1697
+ "grad_norm": 1.4188182353973389,
1698
+ "learning_rate": 9.744493392070483e-05,
1699
+ "loss": 0.335,
1700
+ "step": 2300
1701
+ },
1702
+ {
1703
+ "epoch": 10.176211453744493,
1704
+ "grad_norm": 0.9333553910255432,
1705
+ "learning_rate": 9.656387665198237e-05,
1706
+ "loss": 0.2888,
1707
+ "step": 2310
1708
+ },
1709
+ {
1710
+ "epoch": 10.220264317180616,
1711
+ "grad_norm": 0.886482834815979,
1712
+ "learning_rate": 9.56828193832599e-05,
1713
+ "loss": 0.3122,
1714
+ "step": 2320
1715
+ },
1716
+ {
1717
+ "epoch": 10.26431718061674,
1718
+ "grad_norm": 0.6795399188995361,
1719
+ "learning_rate": 9.480176211453744e-05,
1720
+ "loss": 0.3765,
1721
+ "step": 2330
1722
+ },
1723
+ {
1724
+ "epoch": 10.308370044052863,
1725
+ "grad_norm": 1.3046603202819824,
1726
+ "learning_rate": 9.392070484581497e-05,
1727
+ "loss": 0.3316,
1728
+ "step": 2340
1729
+ },
1730
+ {
1731
+ "epoch": 10.352422907488986,
1732
+ "grad_norm": 1.0006519556045532,
1733
+ "learning_rate": 9.30396475770925e-05,
1734
+ "loss": 0.3659,
1735
+ "step": 2350
1736
+ },
1737
+ {
1738
+ "epoch": 10.396475770925111,
1739
+ "grad_norm": 1.1640467643737793,
1740
+ "learning_rate": 9.215859030837004e-05,
1741
+ "loss": 0.346,
1742
+ "step": 2360
1743
+ },
1744
+ {
1745
+ "epoch": 10.440528634361234,
1746
+ "grad_norm": 0.9744365811347961,
1747
+ "learning_rate": 9.127753303964756e-05,
1748
+ "loss": 0.3317,
1749
+ "step": 2370
1750
+ },
1751
+ {
1752
+ "epoch": 10.484581497797357,
1753
+ "grad_norm": 1.039802074432373,
1754
+ "learning_rate": 9.03964757709251e-05,
1755
+ "loss": 0.3162,
1756
+ "step": 2380
1757
+ },
1758
+ {
1759
+ "epoch": 10.52863436123348,
1760
+ "grad_norm": 0.9926576614379883,
1761
+ "learning_rate": 8.951541850220263e-05,
1762
+ "loss": 0.3559,
1763
+ "step": 2390
1764
+ },
1765
+ {
1766
+ "epoch": 10.572687224669604,
1767
+ "grad_norm": 1.0141366720199585,
1768
+ "learning_rate": 8.863436123348016e-05,
1769
+ "loss": 0.3196,
1770
+ "step": 2400
1771
+ },
1772
+ {
1773
+ "epoch": 10.616740088105727,
1774
+ "grad_norm": 0.5856879353523254,
1775
+ "learning_rate": 8.77533039647577e-05,
1776
+ "loss": 0.2919,
1777
+ "step": 2410
1778
+ },
1779
+ {
1780
+ "epoch": 10.66079295154185,
1781
+ "grad_norm": 0.9484356045722961,
1782
+ "learning_rate": 8.687224669603523e-05,
1783
+ "loss": 0.339,
1784
+ "step": 2420
1785
+ },
1786
+ {
1787
+ "epoch": 10.704845814977974,
1788
+ "grad_norm": 0.9014990925788879,
1789
+ "learning_rate": 8.599118942731277e-05,
1790
+ "loss": 0.3089,
1791
+ "step": 2430
1792
+ },
1793
+ {
1794
+ "epoch": 10.748898678414097,
1795
+ "grad_norm": 0.9830072522163391,
1796
+ "learning_rate": 8.51101321585903e-05,
1797
+ "loss": 0.3461,
1798
+ "step": 2440
1799
+ },
1800
+ {
1801
+ "epoch": 10.79295154185022,
1802
+ "grad_norm": 1.051647424697876,
1803
+ "learning_rate": 8.422907488986782e-05,
1804
+ "loss": 0.292,
1805
+ "step": 2450
1806
+ },
1807
+ {
1808
+ "epoch": 10.837004405286343,
1809
+ "grad_norm": 1.0580625534057617,
1810
+ "learning_rate": 8.334801762114536e-05,
1811
+ "loss": 0.4052,
1812
+ "step": 2460
1813
+ },
1814
+ {
1815
+ "epoch": 10.881057268722467,
1816
+ "grad_norm": 1.01996648311615,
1817
+ "learning_rate": 8.246696035242289e-05,
1818
+ "loss": 0.3927,
1819
+ "step": 2470
1820
+ },
1821
+ {
1822
+ "epoch": 10.92511013215859,
1823
+ "grad_norm": 0.6538860201835632,
1824
+ "learning_rate": 8.158590308370044e-05,
1825
+ "loss": 0.3451,
1826
+ "step": 2480
1827
+ },
1828
+ {
1829
+ "epoch": 10.969162995594713,
1830
+ "grad_norm": 0.9368380308151245,
1831
+ "learning_rate": 8.070484581497796e-05,
1832
+ "loss": 0.3932,
1833
+ "step": 2490
1834
+ },
1835
+ {
1836
+ "epoch": 11.0,
1837
+ "eval_loss": 0.5825287103652954,
1838
+ "eval_runtime": 4.811,
1839
+ "eval_samples_per_second": 41.987,
1840
+ "eval_steps_per_second": 5.404,
1841
+ "step": 2497
1842
+ },
1843
+ {
1844
+ "epoch": 11.013215859030836,
1845
+ "grad_norm": 0.9590967893600464,
1846
+ "learning_rate": 7.982378854625551e-05,
1847
+ "loss": 0.32,
1848
+ "step": 2500
1849
+ },
1850
+ {
1851
+ "epoch": 11.05726872246696,
1852
+ "grad_norm": 0.9905742406845093,
1853
+ "learning_rate": 7.894273127753304e-05,
1854
+ "loss": 0.3029,
1855
+ "step": 2510
1856
+ },
1857
+ {
1858
+ "epoch": 11.101321585903083,
1859
+ "grad_norm": 1.2009577751159668,
1860
+ "learning_rate": 7.806167400881057e-05,
1861
+ "loss": 0.3626,
1862
+ "step": 2520
1863
+ },
1864
+ {
1865
+ "epoch": 11.145374449339206,
1866
+ "grad_norm": 1.0607908964157104,
1867
+ "learning_rate": 7.718061674008811e-05,
1868
+ "loss": 0.314,
1869
+ "step": 2530
1870
+ },
1871
+ {
1872
+ "epoch": 11.189427312775331,
1873
+ "grad_norm": 1.1098504066467285,
1874
+ "learning_rate": 7.629955947136563e-05,
1875
+ "loss": 0.3062,
1876
+ "step": 2540
1877
+ },
1878
+ {
1879
+ "epoch": 11.233480176211454,
1880
+ "grad_norm": 0.6961995959281921,
1881
+ "learning_rate": 7.541850220264317e-05,
1882
+ "loss": 0.3499,
1883
+ "step": 2550
1884
+ },
1885
+ {
1886
+ "epoch": 11.277533039647578,
1887
+ "grad_norm": 1.0727498531341553,
1888
+ "learning_rate": 7.45374449339207e-05,
1889
+ "loss": 0.2559,
1890
+ "step": 2560
1891
+ },
1892
+ {
1893
+ "epoch": 11.321585903083701,
1894
+ "grad_norm": 1.064344048500061,
1895
+ "learning_rate": 7.365638766519823e-05,
1896
+ "loss": 0.3011,
1897
+ "step": 2570
1898
+ },
1899
+ {
1900
+ "epoch": 11.365638766519824,
1901
+ "grad_norm": 1.1059036254882812,
1902
+ "learning_rate": 7.277533039647577e-05,
1903
+ "loss": 0.3415,
1904
+ "step": 2580
1905
+ },
1906
+ {
1907
+ "epoch": 11.409691629955947,
1908
+ "grad_norm": 0.8815020322799683,
1909
+ "learning_rate": 7.18942731277533e-05,
1910
+ "loss": 0.3164,
1911
+ "step": 2590
1912
+ },
1913
+ {
1914
+ "epoch": 11.45374449339207,
1915
+ "grad_norm": 0.9667496085166931,
1916
+ "learning_rate": 7.101321585903082e-05,
1917
+ "loss": 0.3642,
1918
+ "step": 2600
1919
+ },
1920
+ {
1921
+ "epoch": 11.497797356828194,
1922
+ "grad_norm": 0.942876935005188,
1923
+ "learning_rate": 7.013215859030836e-05,
1924
+ "loss": 0.3624,
1925
+ "step": 2610
1926
+ },
1927
+ {
1928
+ "epoch": 11.541850220264317,
1929
+ "grad_norm": 1.022675633430481,
1930
+ "learning_rate": 6.925110132158589e-05,
1931
+ "loss": 0.3351,
1932
+ "step": 2620
1933
+ },
1934
+ {
1935
+ "epoch": 11.58590308370044,
1936
+ "grad_norm": 0.9919267892837524,
1937
+ "learning_rate": 6.837004405286343e-05,
1938
+ "loss": 0.3335,
1939
+ "step": 2630
1940
+ },
1941
+ {
1942
+ "epoch": 11.629955947136564,
1943
+ "grad_norm": 0.9724282026290894,
1944
+ "learning_rate": 6.748898678414096e-05,
1945
+ "loss": 0.3154,
1946
+ "step": 2640
1947
+ },
1948
+ {
1949
+ "epoch": 11.674008810572687,
1950
+ "grad_norm": 1.3246617317199707,
1951
+ "learning_rate": 6.660792951541849e-05,
1952
+ "loss": 0.4366,
1953
+ "step": 2650
1954
+ },
1955
+ {
1956
+ "epoch": 11.71806167400881,
1957
+ "grad_norm": 1.0111949443817139,
1958
+ "learning_rate": 6.572687224669603e-05,
1959
+ "loss": 0.3324,
1960
+ "step": 2660
1961
+ },
1962
+ {
1963
+ "epoch": 11.762114537444933,
1964
+ "grad_norm": 0.8399791717529297,
1965
+ "learning_rate": 6.484581497797357e-05,
1966
+ "loss": 0.2669,
1967
+ "step": 2670
1968
+ },
1969
+ {
1970
+ "epoch": 11.806167400881057,
1971
+ "grad_norm": 0.917736828327179,
1972
+ "learning_rate": 6.39647577092511e-05,
1973
+ "loss": 0.324,
1974
+ "step": 2680
1975
+ },
1976
+ {
1977
+ "epoch": 11.85022026431718,
1978
+ "grad_norm": 0.9939138293266296,
1979
+ "learning_rate": 6.308370044052864e-05,
1980
+ "loss": 0.2888,
1981
+ "step": 2690
1982
+ },
1983
+ {
1984
+ "epoch": 11.894273127753303,
1985
+ "grad_norm": 0.9510142803192139,
1986
+ "learning_rate": 6.220264317180616e-05,
1987
+ "loss": 0.3428,
1988
+ "step": 2700
1989
+ },
1990
+ {
1991
+ "epoch": 11.938325991189426,
1992
+ "grad_norm": 1.3216148614883423,
1993
+ "learning_rate": 6.132158590308369e-05,
1994
+ "loss": 0.3254,
1995
+ "step": 2710
1996
+ },
1997
+ {
1998
+ "epoch": 11.982378854625551,
1999
+ "grad_norm": 1.2755056619644165,
2000
+ "learning_rate": 6.0440528634361224e-05,
2001
+ "loss": 0.3188,
2002
+ "step": 2720
2003
+ },
2004
+ {
2005
+ "epoch": 12.0,
2006
+ "eval_loss": 0.5862967371940613,
2007
+ "eval_runtime": 5.4591,
2008
+ "eval_samples_per_second": 37.002,
2009
+ "eval_steps_per_second": 4.763,
2010
+ "step": 2724
2011
+ },
2012
+ {
2013
+ "epoch": 12.026431718061675,
2014
+ "grad_norm": 0.8617092967033386,
2015
+ "learning_rate": 5.955947136563876e-05,
2016
+ "loss": 0.2953,
2017
+ "step": 2730
2018
+ },
2019
+ {
2020
+ "epoch": 12.070484581497798,
2021
+ "grad_norm": 0.7434670329093933,
2022
+ "learning_rate": 5.86784140969163e-05,
2023
+ "loss": 0.312,
2024
+ "step": 2740
2025
+ },
2026
+ {
2027
+ "epoch": 12.114537444933921,
2028
+ "grad_norm": 0.9274753332138062,
2029
+ "learning_rate": 5.779735682819383e-05,
2030
+ "loss": 0.2664,
2031
+ "step": 2750
2032
+ },
2033
+ {
2034
+ "epoch": 12.158590308370044,
2035
+ "grad_norm": 1.058923363685608,
2036
+ "learning_rate": 5.691629955947135e-05,
2037
+ "loss": 0.3236,
2038
+ "step": 2760
2039
+ },
2040
+ {
2041
+ "epoch": 12.202643171806168,
2042
+ "grad_norm": 0.7601414918899536,
2043
+ "learning_rate": 5.6035242290748894e-05,
2044
+ "loss": 0.3076,
2045
+ "step": 2770
2046
+ },
2047
+ {
2048
+ "epoch": 12.246696035242291,
2049
+ "grad_norm": 0.7787047624588013,
2050
+ "learning_rate": 5.515418502202643e-05,
2051
+ "loss": 0.2618,
2052
+ "step": 2780
2053
+ },
2054
+ {
2055
+ "epoch": 12.290748898678414,
2056
+ "grad_norm": 0.9064326882362366,
2057
+ "learning_rate": 5.427312775330396e-05,
2058
+ "loss": 0.3305,
2059
+ "step": 2790
2060
+ },
2061
+ {
2062
+ "epoch": 12.334801762114537,
2063
+ "grad_norm": 1.0712478160858154,
2064
+ "learning_rate": 5.3392070484581496e-05,
2065
+ "loss": 0.341,
2066
+ "step": 2800
2067
+ },
2068
+ {
2069
+ "epoch": 12.37885462555066,
2070
+ "grad_norm": 0.6585920453071594,
2071
+ "learning_rate": 5.251101321585903e-05,
2072
+ "loss": 0.3476,
2073
+ "step": 2810
2074
+ },
2075
+ {
2076
+ "epoch": 12.422907488986784,
2077
+ "grad_norm": 1.1152169704437256,
2078
+ "learning_rate": 5.162995594713656e-05,
2079
+ "loss": 0.3418,
2080
+ "step": 2820
2081
+ },
2082
+ {
2083
+ "epoch": 12.466960352422907,
2084
+ "grad_norm": 0.926008403301239,
2085
+ "learning_rate": 5.074889867841409e-05,
2086
+ "loss": 0.2543,
2087
+ "step": 2830
2088
+ },
2089
+ {
2090
+ "epoch": 12.51101321585903,
2091
+ "grad_norm": 1.1506083011627197,
2092
+ "learning_rate": 4.9867841409691625e-05,
2093
+ "loss": 0.2895,
2094
+ "step": 2840
2095
+ },
2096
+ {
2097
+ "epoch": 12.555066079295154,
2098
+ "grad_norm": 0.8726121783256531,
2099
+ "learning_rate": 4.898678414096916e-05,
2100
+ "loss": 0.2917,
2101
+ "step": 2850
2102
+ },
2103
+ {
2104
+ "epoch": 12.599118942731277,
2105
+ "grad_norm": 1.1620839834213257,
2106
+ "learning_rate": 4.810572687224669e-05,
2107
+ "loss": 0.3585,
2108
+ "step": 2860
2109
+ },
2110
+ {
2111
+ "epoch": 12.6431718061674,
2112
+ "grad_norm": 1.1911215782165527,
2113
+ "learning_rate": 4.7224669603524226e-05,
2114
+ "loss": 0.3177,
2115
+ "step": 2870
2116
+ },
2117
+ {
2118
+ "epoch": 12.687224669603523,
2119
+ "grad_norm": 0.9236161708831787,
2120
+ "learning_rate": 4.6343612334801754e-05,
2121
+ "loss": 0.3203,
2122
+ "step": 2880
2123
+ },
2124
+ {
2125
+ "epoch": 12.731277533039648,
2126
+ "grad_norm": 1.0384935140609741,
2127
+ "learning_rate": 4.546255506607929e-05,
2128
+ "loss": 0.3264,
2129
+ "step": 2890
2130
+ },
2131
+ {
2132
+ "epoch": 12.775330396475772,
2133
+ "grad_norm": 1.3048256635665894,
2134
+ "learning_rate": 4.458149779735682e-05,
2135
+ "loss": 0.3544,
2136
+ "step": 2900
2137
+ },
2138
+ {
2139
+ "epoch": 12.819383259911895,
2140
+ "grad_norm": 1.127678394317627,
2141
+ "learning_rate": 4.370044052863436e-05,
2142
+ "loss": 0.3768,
2143
+ "step": 2910
2144
+ },
2145
+ {
2146
+ "epoch": 12.863436123348018,
2147
+ "grad_norm": 0.9425409436225891,
2148
+ "learning_rate": 4.2819383259911896e-05,
2149
+ "loss": 0.2778,
2150
+ "step": 2920
2151
+ },
2152
+ {
2153
+ "epoch": 12.907488986784141,
2154
+ "grad_norm": 1.2469598054885864,
2155
+ "learning_rate": 4.1938325991189416e-05,
2156
+ "loss": 0.3532,
2157
+ "step": 2930
2158
+ },
2159
+ {
2160
+ "epoch": 12.951541850220265,
2161
+ "grad_norm": 0.7975876927375793,
2162
+ "learning_rate": 4.105726872246696e-05,
2163
+ "loss": 0.3189,
2164
+ "step": 2940
2165
+ },
2166
+ {
2167
+ "epoch": 12.995594713656388,
2168
+ "grad_norm": 0.8869457840919495,
2169
+ "learning_rate": 4.017621145374449e-05,
2170
+ "loss": 0.3282,
2171
+ "step": 2950
2172
+ },
2173
+ {
2174
+ "epoch": 13.0,
2175
+ "eval_loss": 0.5989018678665161,
2176
+ "eval_runtime": 4.9822,
2177
+ "eval_samples_per_second": 40.544,
2178
+ "eval_steps_per_second": 5.219,
2179
+ "step": 2951
2180
+ },
2181
+ {
2182
+ "epoch": 13.039647577092511,
2183
+ "grad_norm": 1.1934689283370972,
2184
+ "learning_rate": 3.9295154185022025e-05,
2185
+ "loss": 0.2919,
2186
+ "step": 2960
2187
+ },
2188
+ {
2189
+ "epoch": 13.083700440528634,
2190
+ "grad_norm": 1.1812618970870972,
2191
+ "learning_rate": 3.841409691629956e-05,
2192
+ "loss": 0.3219,
2193
+ "step": 2970
2194
+ },
2195
+ {
2196
+ "epoch": 13.127753303964758,
2197
+ "grad_norm": 1.2065187692642212,
2198
+ "learning_rate": 3.753303964757709e-05,
2199
+ "loss": 0.29,
2200
+ "step": 2980
2201
+ },
2202
+ {
2203
+ "epoch": 13.17180616740088,
2204
+ "grad_norm": 0.8890476822853088,
2205
+ "learning_rate": 3.665198237885462e-05,
2206
+ "loss": 0.2462,
2207
+ "step": 2990
2208
+ },
2209
+ {
2210
+ "epoch": 13.215859030837004,
2211
+ "grad_norm": 1.2433491945266724,
2212
+ "learning_rate": 3.5770925110132154e-05,
2213
+ "loss": 0.2614,
2214
+ "step": 3000
2215
+ }
2216
+ ],
2217
+ "logging_steps": 10,
2218
+ "max_steps": 3405,
2219
+ "num_input_tokens_seen": 0,
2220
+ "num_train_epochs": 15,
2221
+ "save_steps": 500,
2222
+ "stateful_callbacks": {
2223
+ "TrainerControl": {
2224
+ "args": {
2225
+ "should_epoch_stop": false,
2226
+ "should_evaluate": false,
2227
+ "should_log": false,
2228
+ "should_save": true,
2229
+ "should_training_stop": false
2230
+ },
2231
+ "attributes": {}
2232
+ }
2233
+ },
2234
+ "total_flos": 4451705806651392.0,
2235
+ "train_batch_size": 8,
2236
+ "trial_name": null,
2237
+ "trial_params": null
2238
+ }
tmp_results/checkpoint-3000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1edef10a07a64822c4f208cd46f60e8c0cb7451bbb616f0e25deb26d71f518ec
3
+ size 5969
tmp_results/checkpoint-3405/config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "T5ForConditionalGeneration"
4
+ ],
5
+ "classifier_dropout": 0.0,
6
+ "d_ff": 1024,
7
+ "d_kv": 64,
8
+ "d_model": 512,
9
+ "decoder_start_token_id": 0,
10
+ "dense_act_fn": "gelu_new",
11
+ "dropout_rate": 0.1,
12
+ "dtype": "float32",
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": true,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "model_type": "t5",
20
+ "n_positions": 512,
21
+ "num_decoder_layers": 8,
22
+ "num_heads": 6,
23
+ "num_layers": 8,
24
+ "output_past": true,
25
+ "pad_token_id": 0,
26
+ "relative_attention_max_distance": 128,
27
+ "relative_attention_num_buckets": 32,
28
+ "task_specific_params": {
29
+ "summarization": {
30
+ "early_stopping": true,
31
+ "length_penalty": 2.0,
32
+ "max_length": 200,
33
+ "min_length": 30,
34
+ "no_repeat_ngram_size": 3,
35
+ "num_beams": 4,
36
+ "prefix": "summarize: "
37
+ },
38
+ "translation_en_to_de": {
39
+ "early_stopping": true,
40
+ "max_length": 300,
41
+ "num_beams": 4,
42
+ "prefix": "translate English to German: "
43
+ },
44
+ "translation_en_to_fr": {
45
+ "early_stopping": true,
46
+ "max_length": 300,
47
+ "num_beams": 4,
48
+ "prefix": "translate English to French: "
49
+ },
50
+ "translation_en_to_ro": {
51
+ "early_stopping": true,
52
+ "max_length": 300,
53
+ "num_beams": 4,
54
+ "prefix": "translate English to Romanian: "
55
+ }
56
+ },
57
+ "tie_word_embeddings": false,
58
+ "transformers_version": "4.57.3",
59
+ "use_cache": true,
60
+ "vocab_size": 32128
61
+ }
tmp_results/checkpoint-3405/generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "decoder_start_token_id": 0,
4
+ "eos_token_id": [
5
+ 1
6
+ ],
7
+ "pad_token_id": 0,
8
+ "transformers_version": "4.57.3"
9
+ }
tmp_results/checkpoint-3405/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b3a2db3625722dbe2a012701d1a2671dcc1f6a501e40cb8ccce8b33aba70610
3
+ size 307867048
tmp_results/checkpoint-3405/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65ae0201e11ac49a46ccecdaa995dafef5b1cd457dac98c1a055059094810d05
3
+ size 615844491
tmp_results/checkpoint-3405/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4fe1113ae2a197d50471fb03eb02da32e0f166a18e368f89b685446286dd1aa1
3
+ size 14455
tmp_results/checkpoint-3405/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76683b9808c402a0e771d1b0b2e073855c4da6a945f66763a6a5f129db15af14
3
+ size 1465
tmp_results/checkpoint-3405/special_tokens_map.json ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<extra_id_0>",
4
+ "<extra_id_1>",
5
+ "<extra_id_2>",
6
+ "<extra_id_3>",
7
+ "<extra_id_4>",
8
+ "<extra_id_5>",
9
+ "<extra_id_6>",
10
+ "<extra_id_7>",
11
+ "<extra_id_8>",
12
+ "<extra_id_9>",
13
+ "<extra_id_10>",
14
+ "<extra_id_11>",
15
+ "<extra_id_12>",
16
+ "<extra_id_13>",
17
+ "<extra_id_14>",
18
+ "<extra_id_15>",
19
+ "<extra_id_16>",
20
+ "<extra_id_17>",
21
+ "<extra_id_18>",
22
+ "<extra_id_19>",
23
+ "<extra_id_20>",
24
+ "<extra_id_21>",
25
+ "<extra_id_22>",
26
+ "<extra_id_23>",
27
+ "<extra_id_24>",
28
+ "<extra_id_25>",
29
+ "<extra_id_26>",
30
+ "<extra_id_27>",
31
+ "<extra_id_28>",
32
+ "<extra_id_29>",
33
+ "<extra_id_30>",
34
+ "<extra_id_31>",
35
+ "<extra_id_32>",
36
+ "<extra_id_33>",
37
+ "<extra_id_34>",
38
+ "<extra_id_35>",
39
+ "<extra_id_36>",
40
+ "<extra_id_37>",
41
+ "<extra_id_38>",
42
+ "<extra_id_39>",
43
+ "<extra_id_40>",
44
+ "<extra_id_41>",
45
+ "<extra_id_42>",
46
+ "<extra_id_43>",
47
+ "<extra_id_44>",
48
+ "<extra_id_45>",
49
+ "<extra_id_46>",
50
+ "<extra_id_47>",
51
+ "<extra_id_48>",
52
+ "<extra_id_49>",
53
+ "<extra_id_50>",
54
+ "<extra_id_51>",
55
+ "<extra_id_52>",
56
+ "<extra_id_53>",
57
+ "<extra_id_54>",
58
+ "<extra_id_55>",
59
+ "<extra_id_56>",
60
+ "<extra_id_57>",
61
+ "<extra_id_58>",
62
+ "<extra_id_59>",
63
+ "<extra_id_60>",
64
+ "<extra_id_61>",
65
+ "<extra_id_62>",
66
+ "<extra_id_63>",
67
+ "<extra_id_64>",
68
+ "<extra_id_65>",
69
+ "<extra_id_66>",
70
+ "<extra_id_67>",
71
+ "<extra_id_68>",
72
+ "<extra_id_69>",
73
+ "<extra_id_70>",
74
+ "<extra_id_71>",
75
+ "<extra_id_72>",
76
+ "<extra_id_73>",
77
+ "<extra_id_74>",
78
+ "<extra_id_75>",
79
+ "<extra_id_76>",
80
+ "<extra_id_77>",
81
+ "<extra_id_78>",
82
+ "<extra_id_79>",
83
+ "<extra_id_80>",
84
+ "<extra_id_81>",
85
+ "<extra_id_82>",
86
+ "<extra_id_83>",
87
+ "<extra_id_84>",
88
+ "<extra_id_85>",
89
+ "<extra_id_86>",
90
+ "<extra_id_87>",
91
+ "<extra_id_88>",
92
+ "<extra_id_89>",
93
+ "<extra_id_90>",
94
+ "<extra_id_91>",
95
+ "<extra_id_92>",
96
+ "<extra_id_93>",
97
+ "<extra_id_94>",
98
+ "<extra_id_95>",
99
+ "<extra_id_96>",
100
+ "<extra_id_97>",
101
+ "<extra_id_98>",
102
+ "<extra_id_99>"
103
+ ],
104
+ "eos_token": {
105
+ "content": "</s>",
106
+ "lstrip": false,
107
+ "normalized": false,
108
+ "rstrip": false,
109
+ "single_word": false
110
+ },
111
+ "pad_token": {
112
+ "content": "<pad>",
113
+ "lstrip": false,
114
+ "normalized": false,
115
+ "rstrip": false,
116
+ "single_word": false
117
+ },
118
+ "unk_token": {
119
+ "content": "<unk>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false
124
+ }
125
+ }
tmp_results/checkpoint-3405/spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
3
+ size 791656
tmp_results/checkpoint-3405/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tmp_results/checkpoint-3405/tokenizer_config.json ADDED
@@ -0,0 +1,940 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": null,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<pad>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "</s>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<unk>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "32000": {
29
+ "content": "<extra_id_99>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "32001": {
37
+ "content": "<extra_id_98>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "32002": {
45
+ "content": "<extra_id_97>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "32003": {
53
+ "content": "<extra_id_96>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "32004": {
61
+ "content": "<extra_id_95>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "32005": {
69
+ "content": "<extra_id_94>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "32006": {
77
+ "content": "<extra_id_93>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "32007": {
85
+ "content": "<extra_id_92>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "32008": {
93
+ "content": "<extra_id_91>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "32009": {
101
+ "content": "<extra_id_90>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "32010": {
109
+ "content": "<extra_id_89>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "32011": {
117
+ "content": "<extra_id_88>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "32012": {
125
+ "content": "<extra_id_87>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "32013": {
133
+ "content": "<extra_id_86>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ },
140
+ "32014": {
141
+ "content": "<extra_id_85>",
142
+ "lstrip": false,
143
+ "normalized": false,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": true
147
+ },
148
+ "32015": {
149
+ "content": "<extra_id_84>",
150
+ "lstrip": false,
151
+ "normalized": false,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": true
155
+ },
156
+ "32016": {
157
+ "content": "<extra_id_83>",
158
+ "lstrip": false,
159
+ "normalized": false,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": true
163
+ },
164
+ "32017": {
165
+ "content": "<extra_id_82>",
166
+ "lstrip": false,
167
+ "normalized": false,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": true
171
+ },
172
+ "32018": {
173
+ "content": "<extra_id_81>",
174
+ "lstrip": false,
175
+ "normalized": false,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": true
179
+ },
180
+ "32019": {
181
+ "content": "<extra_id_80>",
182
+ "lstrip": false,
183
+ "normalized": false,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": true
187
+ },
188
+ "32020": {
189
+ "content": "<extra_id_79>",
190
+ "lstrip": false,
191
+ "normalized": false,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": true
195
+ },
196
+ "32021": {
197
+ "content": "<extra_id_78>",
198
+ "lstrip": false,
199
+ "normalized": false,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": true
203
+ },
204
+ "32022": {
205
+ "content": "<extra_id_77>",
206
+ "lstrip": false,
207
+ "normalized": false,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": true
211
+ },
212
+ "32023": {
213
+ "content": "<extra_id_76>",
214
+ "lstrip": false,
215
+ "normalized": false,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": true
219
+ },
220
+ "32024": {
221
+ "content": "<extra_id_75>",
222
+ "lstrip": false,
223
+ "normalized": false,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": true
227
+ },
228
+ "32025": {
229
+ "content": "<extra_id_74>",
230
+ "lstrip": false,
231
+ "normalized": false,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": true
235
+ },
236
+ "32026": {
237
+ "content": "<extra_id_73>",
238
+ "lstrip": false,
239
+ "normalized": false,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": true
243
+ },
244
+ "32027": {
245
+ "content": "<extra_id_72>",
246
+ "lstrip": false,
247
+ "normalized": false,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": true
251
+ },
252
+ "32028": {
253
+ "content": "<extra_id_71>",
254
+ "lstrip": false,
255
+ "normalized": false,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": true
259
+ },
260
+ "32029": {
261
+ "content": "<extra_id_70>",
262
+ "lstrip": false,
263
+ "normalized": false,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": true
267
+ },
268
+ "32030": {
269
+ "content": "<extra_id_69>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": true
275
+ },
276
+ "32031": {
277
+ "content": "<extra_id_68>",
278
+ "lstrip": false,
279
+ "normalized": false,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": true
283
+ },
284
+ "32032": {
285
+ "content": "<extra_id_67>",
286
+ "lstrip": false,
287
+ "normalized": false,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": true
291
+ },
292
+ "32033": {
293
+ "content": "<extra_id_66>",
294
+ "lstrip": false,
295
+ "normalized": false,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": true
299
+ },
300
+ "32034": {
301
+ "content": "<extra_id_65>",
302
+ "lstrip": false,
303
+ "normalized": false,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": true
307
+ },
308
+ "32035": {
309
+ "content": "<extra_id_64>",
310
+ "lstrip": false,
311
+ "normalized": false,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": true
315
+ },
316
+ "32036": {
317
+ "content": "<extra_id_63>",
318
+ "lstrip": false,
319
+ "normalized": false,
320
+ "rstrip": false,
321
+ "single_word": false,
322
+ "special": true
323
+ },
324
+ "32037": {
325
+ "content": "<extra_id_62>",
326
+ "lstrip": false,
327
+ "normalized": false,
328
+ "rstrip": false,
329
+ "single_word": false,
330
+ "special": true
331
+ },
332
+ "32038": {
333
+ "content": "<extra_id_61>",
334
+ "lstrip": false,
335
+ "normalized": false,
336
+ "rstrip": false,
337
+ "single_word": false,
338
+ "special": true
339
+ },
340
+ "32039": {
341
+ "content": "<extra_id_60>",
342
+ "lstrip": false,
343
+ "normalized": false,
344
+ "rstrip": false,
345
+ "single_word": false,
346
+ "special": true
347
+ },
348
+ "32040": {
349
+ "content": "<extra_id_59>",
350
+ "lstrip": false,
351
+ "normalized": false,
352
+ "rstrip": false,
353
+ "single_word": false,
354
+ "special": true
355
+ },
356
+ "32041": {
357
+ "content": "<extra_id_58>",
358
+ "lstrip": false,
359
+ "normalized": false,
360
+ "rstrip": false,
361
+ "single_word": false,
362
+ "special": true
363
+ },
364
+ "32042": {
365
+ "content": "<extra_id_57>",
366
+ "lstrip": false,
367
+ "normalized": false,
368
+ "rstrip": false,
369
+ "single_word": false,
370
+ "special": true
371
+ },
372
+ "32043": {
373
+ "content": "<extra_id_56>",
374
+ "lstrip": false,
375
+ "normalized": false,
376
+ "rstrip": false,
377
+ "single_word": false,
378
+ "special": true
379
+ },
380
+ "32044": {
381
+ "content": "<extra_id_55>",
382
+ "lstrip": false,
383
+ "normalized": false,
384
+ "rstrip": false,
385
+ "single_word": false,
386
+ "special": true
387
+ },
388
+ "32045": {
389
+ "content": "<extra_id_54>",
390
+ "lstrip": false,
391
+ "normalized": false,
392
+ "rstrip": false,
393
+ "single_word": false,
394
+ "special": true
395
+ },
396
+ "32046": {
397
+ "content": "<extra_id_53>",
398
+ "lstrip": false,
399
+ "normalized": false,
400
+ "rstrip": false,
401
+ "single_word": false,
402
+ "special": true
403
+ },
404
+ "32047": {
405
+ "content": "<extra_id_52>",
406
+ "lstrip": false,
407
+ "normalized": false,
408
+ "rstrip": false,
409
+ "single_word": false,
410
+ "special": true
411
+ },
412
+ "32048": {
413
+ "content": "<extra_id_51>",
414
+ "lstrip": false,
415
+ "normalized": false,
416
+ "rstrip": false,
417
+ "single_word": false,
418
+ "special": true
419
+ },
420
+ "32049": {
421
+ "content": "<extra_id_50>",
422
+ "lstrip": false,
423
+ "normalized": false,
424
+ "rstrip": false,
425
+ "single_word": false,
426
+ "special": true
427
+ },
428
+ "32050": {
429
+ "content": "<extra_id_49>",
430
+ "lstrip": false,
431
+ "normalized": false,
432
+ "rstrip": false,
433
+ "single_word": false,
434
+ "special": true
435
+ },
436
+ "32051": {
437
+ "content": "<extra_id_48>",
438
+ "lstrip": false,
439
+ "normalized": false,
440
+ "rstrip": false,
441
+ "single_word": false,
442
+ "special": true
443
+ },
444
+ "32052": {
445
+ "content": "<extra_id_47>",
446
+ "lstrip": false,
447
+ "normalized": false,
448
+ "rstrip": false,
449
+ "single_word": false,
450
+ "special": true
451
+ },
452
+ "32053": {
453
+ "content": "<extra_id_46>",
454
+ "lstrip": false,
455
+ "normalized": false,
456
+ "rstrip": false,
457
+ "single_word": false,
458
+ "special": true
459
+ },
460
+ "32054": {
461
+ "content": "<extra_id_45>",
462
+ "lstrip": false,
463
+ "normalized": false,
464
+ "rstrip": false,
465
+ "single_word": false,
466
+ "special": true
467
+ },
468
+ "32055": {
469
+ "content": "<extra_id_44>",
470
+ "lstrip": false,
471
+ "normalized": false,
472
+ "rstrip": false,
473
+ "single_word": false,
474
+ "special": true
475
+ },
476
+ "32056": {
477
+ "content": "<extra_id_43>",
478
+ "lstrip": false,
479
+ "normalized": false,
480
+ "rstrip": false,
481
+ "single_word": false,
482
+ "special": true
483
+ },
484
+ "32057": {
485
+ "content": "<extra_id_42>",
486
+ "lstrip": false,
487
+ "normalized": false,
488
+ "rstrip": false,
489
+ "single_word": false,
490
+ "special": true
491
+ },
492
+ "32058": {
493
+ "content": "<extra_id_41>",
494
+ "lstrip": false,
495
+ "normalized": false,
496
+ "rstrip": false,
497
+ "single_word": false,
498
+ "special": true
499
+ },
500
+ "32059": {
501
+ "content": "<extra_id_40>",
502
+ "lstrip": false,
503
+ "normalized": false,
504
+ "rstrip": false,
505
+ "single_word": false,
506
+ "special": true
507
+ },
508
+ "32060": {
509
+ "content": "<extra_id_39>",
510
+ "lstrip": false,
511
+ "normalized": false,
512
+ "rstrip": false,
513
+ "single_word": false,
514
+ "special": true
515
+ },
516
+ "32061": {
517
+ "content": "<extra_id_38>",
518
+ "lstrip": false,
519
+ "normalized": false,
520
+ "rstrip": false,
521
+ "single_word": false,
522
+ "special": true
523
+ },
524
+ "32062": {
525
+ "content": "<extra_id_37>",
526
+ "lstrip": false,
527
+ "normalized": false,
528
+ "rstrip": false,
529
+ "single_word": false,
530
+ "special": true
531
+ },
532
+ "32063": {
533
+ "content": "<extra_id_36>",
534
+ "lstrip": false,
535
+ "normalized": false,
536
+ "rstrip": false,
537
+ "single_word": false,
538
+ "special": true
539
+ },
540
+ "32064": {
541
+ "content": "<extra_id_35>",
542
+ "lstrip": false,
543
+ "normalized": false,
544
+ "rstrip": false,
545
+ "single_word": false,
546
+ "special": true
547
+ },
548
+ "32065": {
549
+ "content": "<extra_id_34>",
550
+ "lstrip": false,
551
+ "normalized": false,
552
+ "rstrip": false,
553
+ "single_word": false,
554
+ "special": true
555
+ },
556
+ "32066": {
557
+ "content": "<extra_id_33>",
558
+ "lstrip": false,
559
+ "normalized": false,
560
+ "rstrip": false,
561
+ "single_word": false,
562
+ "special": true
563
+ },
564
+ "32067": {
565
+ "content": "<extra_id_32>",
566
+ "lstrip": false,
567
+ "normalized": false,
568
+ "rstrip": false,
569
+ "single_word": false,
570
+ "special": true
571
+ },
572
+ "32068": {
573
+ "content": "<extra_id_31>",
574
+ "lstrip": false,
575
+ "normalized": false,
576
+ "rstrip": false,
577
+ "single_word": false,
578
+ "special": true
579
+ },
580
+ "32069": {
581
+ "content": "<extra_id_30>",
582
+ "lstrip": false,
583
+ "normalized": false,
584
+ "rstrip": false,
585
+ "single_word": false,
586
+ "special": true
587
+ },
588
+ "32070": {
589
+ "content": "<extra_id_29>",
590
+ "lstrip": false,
591
+ "normalized": false,
592
+ "rstrip": false,
593
+ "single_word": false,
594
+ "special": true
595
+ },
596
+ "32071": {
597
+ "content": "<extra_id_28>",
598
+ "lstrip": false,
599
+ "normalized": false,
600
+ "rstrip": false,
601
+ "single_word": false,
602
+ "special": true
603
+ },
604
+ "32072": {
605
+ "content": "<extra_id_27>",
606
+ "lstrip": false,
607
+ "normalized": false,
608
+ "rstrip": false,
609
+ "single_word": false,
610
+ "special": true
611
+ },
612
+ "32073": {
613
+ "content": "<extra_id_26>",
614
+ "lstrip": false,
615
+ "normalized": false,
616
+ "rstrip": false,
617
+ "single_word": false,
618
+ "special": true
619
+ },
620
+ "32074": {
621
+ "content": "<extra_id_25>",
622
+ "lstrip": false,
623
+ "normalized": false,
624
+ "rstrip": false,
625
+ "single_word": false,
626
+ "special": true
627
+ },
628
+ "32075": {
629
+ "content": "<extra_id_24>",
630
+ "lstrip": false,
631
+ "normalized": false,
632
+ "rstrip": false,
633
+ "single_word": false,
634
+ "special": true
635
+ },
636
+ "32076": {
637
+ "content": "<extra_id_23>",
638
+ "lstrip": false,
639
+ "normalized": false,
640
+ "rstrip": false,
641
+ "single_word": false,
642
+ "special": true
643
+ },
644
+ "32077": {
645
+ "content": "<extra_id_22>",
646
+ "lstrip": false,
647
+ "normalized": false,
648
+ "rstrip": false,
649
+ "single_word": false,
650
+ "special": true
651
+ },
652
+ "32078": {
653
+ "content": "<extra_id_21>",
654
+ "lstrip": false,
655
+ "normalized": false,
656
+ "rstrip": false,
657
+ "single_word": false,
658
+ "special": true
659
+ },
660
+ "32079": {
661
+ "content": "<extra_id_20>",
662
+ "lstrip": false,
663
+ "normalized": false,
664
+ "rstrip": false,
665
+ "single_word": false,
666
+ "special": true
667
+ },
668
+ "32080": {
669
+ "content": "<extra_id_19>",
670
+ "lstrip": false,
671
+ "normalized": false,
672
+ "rstrip": false,
673
+ "single_word": false,
674
+ "special": true
675
+ },
676
+ "32081": {
677
+ "content": "<extra_id_18>",
678
+ "lstrip": false,
679
+ "normalized": false,
680
+ "rstrip": false,
681
+ "single_word": false,
682
+ "special": true
683
+ },
684
+ "32082": {
685
+ "content": "<extra_id_17>",
686
+ "lstrip": false,
687
+ "normalized": false,
688
+ "rstrip": false,
689
+ "single_word": false,
690
+ "special": true
691
+ },
692
+ "32083": {
693
+ "content": "<extra_id_16>",
694
+ "lstrip": false,
695
+ "normalized": false,
696
+ "rstrip": false,
697
+ "single_word": false,
698
+ "special": true
699
+ },
700
+ "32084": {
701
+ "content": "<extra_id_15>",
702
+ "lstrip": false,
703
+ "normalized": false,
704
+ "rstrip": false,
705
+ "single_word": false,
706
+ "special": true
707
+ },
708
+ "32085": {
709
+ "content": "<extra_id_14>",
710
+ "lstrip": false,
711
+ "normalized": false,
712
+ "rstrip": false,
713
+ "single_word": false,
714
+ "special": true
715
+ },
716
+ "32086": {
717
+ "content": "<extra_id_13>",
718
+ "lstrip": false,
719
+ "normalized": false,
720
+ "rstrip": false,
721
+ "single_word": false,
722
+ "special": true
723
+ },
724
+ "32087": {
725
+ "content": "<extra_id_12>",
726
+ "lstrip": false,
727
+ "normalized": false,
728
+ "rstrip": false,
729
+ "single_word": false,
730
+ "special": true
731
+ },
732
+ "32088": {
733
+ "content": "<extra_id_11>",
734
+ "lstrip": false,
735
+ "normalized": false,
736
+ "rstrip": false,
737
+ "single_word": false,
738
+ "special": true
739
+ },
740
+ "32089": {
741
+ "content": "<extra_id_10>",
742
+ "lstrip": false,
743
+ "normalized": false,
744
+ "rstrip": false,
745
+ "single_word": false,
746
+ "special": true
747
+ },
748
+ "32090": {
749
+ "content": "<extra_id_9>",
750
+ "lstrip": false,
751
+ "normalized": false,
752
+ "rstrip": false,
753
+ "single_word": false,
754
+ "special": true
755
+ },
756
+ "32091": {
757
+ "content": "<extra_id_8>",
758
+ "lstrip": false,
759
+ "normalized": false,
760
+ "rstrip": false,
761
+ "single_word": false,
762
+ "special": true
763
+ },
764
+ "32092": {
765
+ "content": "<extra_id_7>",
766
+ "lstrip": false,
767
+ "normalized": false,
768
+ "rstrip": false,
769
+ "single_word": false,
770
+ "special": true
771
+ },
772
+ "32093": {
773
+ "content": "<extra_id_6>",
774
+ "lstrip": false,
775
+ "normalized": false,
776
+ "rstrip": false,
777
+ "single_word": false,
778
+ "special": true
779
+ },
780
+ "32094": {
781
+ "content": "<extra_id_5>",
782
+ "lstrip": false,
783
+ "normalized": false,
784
+ "rstrip": false,
785
+ "single_word": false,
786
+ "special": true
787
+ },
788
+ "32095": {
789
+ "content": "<extra_id_4>",
790
+ "lstrip": false,
791
+ "normalized": false,
792
+ "rstrip": false,
793
+ "single_word": false,
794
+ "special": true
795
+ },
796
+ "32096": {
797
+ "content": "<extra_id_3>",
798
+ "lstrip": false,
799
+ "normalized": false,
800
+ "rstrip": false,
801
+ "single_word": false,
802
+ "special": true
803
+ },
804
+ "32097": {
805
+ "content": "<extra_id_2>",
806
+ "lstrip": false,
807
+ "normalized": false,
808
+ "rstrip": false,
809
+ "single_word": false,
810
+ "special": true
811
+ },
812
+ "32098": {
813
+ "content": "<extra_id_1>",
814
+ "lstrip": false,
815
+ "normalized": false,
816
+ "rstrip": false,
817
+ "single_word": false,
818
+ "special": true
819
+ },
820
+ "32099": {
821
+ "content": "<extra_id_0>",
822
+ "lstrip": false,
823
+ "normalized": false,
824
+ "rstrip": false,
825
+ "single_word": false,
826
+ "special": true
827
+ }
828
+ },
829
+ "additional_special_tokens": [
830
+ "<extra_id_0>",
831
+ "<extra_id_1>",
832
+ "<extra_id_2>",
833
+ "<extra_id_3>",
834
+ "<extra_id_4>",
835
+ "<extra_id_5>",
836
+ "<extra_id_6>",
837
+ "<extra_id_7>",
838
+ "<extra_id_8>",
839
+ "<extra_id_9>",
840
+ "<extra_id_10>",
841
+ "<extra_id_11>",
842
+ "<extra_id_12>",
843
+ "<extra_id_13>",
844
+ "<extra_id_14>",
845
+ "<extra_id_15>",
846
+ "<extra_id_16>",
847
+ "<extra_id_17>",
848
+ "<extra_id_18>",
849
+ "<extra_id_19>",
850
+ "<extra_id_20>",
851
+ "<extra_id_21>",
852
+ "<extra_id_22>",
853
+ "<extra_id_23>",
854
+ "<extra_id_24>",
855
+ "<extra_id_25>",
856
+ "<extra_id_26>",
857
+ "<extra_id_27>",
858
+ "<extra_id_28>",
859
+ "<extra_id_29>",
860
+ "<extra_id_30>",
861
+ "<extra_id_31>",
862
+ "<extra_id_32>",
863
+ "<extra_id_33>",
864
+ "<extra_id_34>",
865
+ "<extra_id_35>",
866
+ "<extra_id_36>",
867
+ "<extra_id_37>",
868
+ "<extra_id_38>",
869
+ "<extra_id_39>",
870
+ "<extra_id_40>",
871
+ "<extra_id_41>",
872
+ "<extra_id_42>",
873
+ "<extra_id_43>",
874
+ "<extra_id_44>",
875
+ "<extra_id_45>",
876
+ "<extra_id_46>",
877
+ "<extra_id_47>",
878
+ "<extra_id_48>",
879
+ "<extra_id_49>",
880
+ "<extra_id_50>",
881
+ "<extra_id_51>",
882
+ "<extra_id_52>",
883
+ "<extra_id_53>",
884
+ "<extra_id_54>",
885
+ "<extra_id_55>",
886
+ "<extra_id_56>",
887
+ "<extra_id_57>",
888
+ "<extra_id_58>",
889
+ "<extra_id_59>",
890
+ "<extra_id_60>",
891
+ "<extra_id_61>",
892
+ "<extra_id_62>",
893
+ "<extra_id_63>",
894
+ "<extra_id_64>",
895
+ "<extra_id_65>",
896
+ "<extra_id_66>",
897
+ "<extra_id_67>",
898
+ "<extra_id_68>",
899
+ "<extra_id_69>",
900
+ "<extra_id_70>",
901
+ "<extra_id_71>",
902
+ "<extra_id_72>",
903
+ "<extra_id_73>",
904
+ "<extra_id_74>",
905
+ "<extra_id_75>",
906
+ "<extra_id_76>",
907
+ "<extra_id_77>",
908
+ "<extra_id_78>",
909
+ "<extra_id_79>",
910
+ "<extra_id_80>",
911
+ "<extra_id_81>",
912
+ "<extra_id_82>",
913
+ "<extra_id_83>",
914
+ "<extra_id_84>",
915
+ "<extra_id_85>",
916
+ "<extra_id_86>",
917
+ "<extra_id_87>",
918
+ "<extra_id_88>",
919
+ "<extra_id_89>",
920
+ "<extra_id_90>",
921
+ "<extra_id_91>",
922
+ "<extra_id_92>",
923
+ "<extra_id_93>",
924
+ "<extra_id_94>",
925
+ "<extra_id_95>",
926
+ "<extra_id_96>",
927
+ "<extra_id_97>",
928
+ "<extra_id_98>",
929
+ "<extra_id_99>"
930
+ ],
931
+ "clean_up_tokenization_spaces": false,
932
+ "eos_token": "</s>",
933
+ "extra_ids": 100,
934
+ "extra_special_tokens": {},
935
+ "model_max_length": 512,
936
+ "pad_token": "<pad>",
937
+ "sp_model_kwargs": {},
938
+ "tokenizer_class": "T5Tokenizer",
939
+ "unk_token": "<unk>"
940
+ }
tmp_results/checkpoint-3405/trainer_state.json ADDED
@@ -0,0 +1,2526 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 15.0,
6
+ "eval_steps": 500,
7
+ "global_step": 3405,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.04405286343612335,
14
+ "grad_norm": 32.503074645996094,
15
+ "learning_rate": 0.0002992070484581498,
16
+ "loss": 18.6527,
17
+ "step": 10
18
+ },
19
+ {
20
+ "epoch": 0.0881057268722467,
21
+ "grad_norm": 14.659343719482422,
22
+ "learning_rate": 0.0002983259911894273,
23
+ "loss": 6.941,
24
+ "step": 20
25
+ },
26
+ {
27
+ "epoch": 0.13215859030837004,
28
+ "grad_norm": 13.076040267944336,
29
+ "learning_rate": 0.00029744493392070483,
30
+ "loss": 4.3562,
31
+ "step": 30
32
+ },
33
+ {
34
+ "epoch": 0.1762114537444934,
35
+ "grad_norm": 21.742950439453125,
36
+ "learning_rate": 0.00029656387665198236,
37
+ "loss": 3.5182,
38
+ "step": 40
39
+ },
40
+ {
41
+ "epoch": 0.22026431718061673,
42
+ "grad_norm": 19.481538772583008,
43
+ "learning_rate": 0.0002956828193832599,
44
+ "loss": 2.6543,
45
+ "step": 50
46
+ },
47
+ {
48
+ "epoch": 0.2643171806167401,
49
+ "grad_norm": 8.22918701171875,
50
+ "learning_rate": 0.0002948017621145374,
51
+ "loss": 1.7678,
52
+ "step": 60
53
+ },
54
+ {
55
+ "epoch": 0.30837004405286345,
56
+ "grad_norm": 5.820700168609619,
57
+ "learning_rate": 0.00029392070484581494,
58
+ "loss": 1.2462,
59
+ "step": 70
60
+ },
61
+ {
62
+ "epoch": 0.3524229074889868,
63
+ "grad_norm": 2.173941135406494,
64
+ "learning_rate": 0.00029303964757709247,
65
+ "loss": 1.3665,
66
+ "step": 80
67
+ },
68
+ {
69
+ "epoch": 0.3964757709251101,
70
+ "grad_norm": 1.8419830799102783,
71
+ "learning_rate": 0.00029215859030837,
72
+ "loss": 1.1257,
73
+ "step": 90
74
+ },
75
+ {
76
+ "epoch": 0.44052863436123346,
77
+ "grad_norm": 1.7700544595718384,
78
+ "learning_rate": 0.0002912775330396475,
79
+ "loss": 1.0317,
80
+ "step": 100
81
+ },
82
+ {
83
+ "epoch": 0.4845814977973568,
84
+ "grad_norm": 1.2313016653060913,
85
+ "learning_rate": 0.0002903964757709251,
86
+ "loss": 0.8453,
87
+ "step": 110
88
+ },
89
+ {
90
+ "epoch": 0.5286343612334802,
91
+ "grad_norm": 1.3880990743637085,
92
+ "learning_rate": 0.00028951541850220263,
93
+ "loss": 0.8675,
94
+ "step": 120
95
+ },
96
+ {
97
+ "epoch": 0.5726872246696035,
98
+ "grad_norm": 2.3586199283599854,
99
+ "learning_rate": 0.00028863436123348016,
100
+ "loss": 0.8509,
101
+ "step": 130
102
+ },
103
+ {
104
+ "epoch": 0.6167400881057269,
105
+ "grad_norm": 1.3902242183685303,
106
+ "learning_rate": 0.0002877533039647577,
107
+ "loss": 0.8528,
108
+ "step": 140
109
+ },
110
+ {
111
+ "epoch": 0.6607929515418502,
112
+ "grad_norm": 1.0430246591567993,
113
+ "learning_rate": 0.0002868722466960352,
114
+ "loss": 0.8726,
115
+ "step": 150
116
+ },
117
+ {
118
+ "epoch": 0.7048458149779736,
119
+ "grad_norm": 1.3441425561904907,
120
+ "learning_rate": 0.00028599118942731274,
121
+ "loss": 1.1029,
122
+ "step": 160
123
+ },
124
+ {
125
+ "epoch": 0.748898678414097,
126
+ "grad_norm": 1.18771231174469,
127
+ "learning_rate": 0.00028511013215859026,
128
+ "loss": 0.9229,
129
+ "step": 170
130
+ },
131
+ {
132
+ "epoch": 0.7929515418502202,
133
+ "grad_norm": 0.8050010800361633,
134
+ "learning_rate": 0.0002842290748898678,
135
+ "loss": 0.8363,
136
+ "step": 180
137
+ },
138
+ {
139
+ "epoch": 0.8370044052863436,
140
+ "grad_norm": 1.1800554990768433,
141
+ "learning_rate": 0.0002833480176211453,
142
+ "loss": 0.8128,
143
+ "step": 190
144
+ },
145
+ {
146
+ "epoch": 0.8810572687224669,
147
+ "grad_norm": 1.3873122930526733,
148
+ "learning_rate": 0.00028246696035242285,
149
+ "loss": 0.8275,
150
+ "step": 200
151
+ },
152
+ {
153
+ "epoch": 0.9251101321585903,
154
+ "grad_norm": 1.1561434268951416,
155
+ "learning_rate": 0.0002815859030837004,
156
+ "loss": 0.8961,
157
+ "step": 210
158
+ },
159
+ {
160
+ "epoch": 0.9691629955947136,
161
+ "grad_norm": 0.6641005277633667,
162
+ "learning_rate": 0.00028070484581497795,
163
+ "loss": 0.6599,
164
+ "step": 220
165
+ },
166
+ {
167
+ "epoch": 1.0,
168
+ "eval_loss": 0.6834425330162048,
169
+ "eval_runtime": 5.6075,
170
+ "eval_samples_per_second": 36.023,
171
+ "eval_steps_per_second": 4.637,
172
+ "step": 227
173
+ },
174
+ {
175
+ "epoch": 1.013215859030837,
176
+ "grad_norm": 1.0736805200576782,
177
+ "learning_rate": 0.0002798237885462555,
178
+ "loss": 0.7283,
179
+ "step": 230
180
+ },
181
+ {
182
+ "epoch": 1.0572687224669604,
183
+ "grad_norm": 1.069932460784912,
184
+ "learning_rate": 0.000278942731277533,
185
+ "loss": 0.825,
186
+ "step": 240
187
+ },
188
+ {
189
+ "epoch": 1.1013215859030836,
190
+ "grad_norm": 1.3098456859588623,
191
+ "learning_rate": 0.0002780616740088106,
192
+ "loss": 0.7152,
193
+ "step": 250
194
+ },
195
+ {
196
+ "epoch": 1.145374449339207,
197
+ "grad_norm": 1.0547797679901123,
198
+ "learning_rate": 0.0002771806167400881,
199
+ "loss": 0.746,
200
+ "step": 260
201
+ },
202
+ {
203
+ "epoch": 1.1894273127753303,
204
+ "grad_norm": 1.58526611328125,
205
+ "learning_rate": 0.00027629955947136564,
206
+ "loss": 0.6655,
207
+ "step": 270
208
+ },
209
+ {
210
+ "epoch": 1.2334801762114538,
211
+ "grad_norm": 1.4090569019317627,
212
+ "learning_rate": 0.0002754185022026431,
213
+ "loss": 0.7397,
214
+ "step": 280
215
+ },
216
+ {
217
+ "epoch": 1.277533039647577,
218
+ "grad_norm": 1.3417810201644897,
219
+ "learning_rate": 0.00027453744493392064,
220
+ "loss": 0.6534,
221
+ "step": 290
222
+ },
223
+ {
224
+ "epoch": 1.3215859030837005,
225
+ "grad_norm": 0.7320300936698914,
226
+ "learning_rate": 0.0002736563876651982,
227
+ "loss": 0.719,
228
+ "step": 300
229
+ },
230
+ {
231
+ "epoch": 1.3656387665198237,
232
+ "grad_norm": 1.7867811918258667,
233
+ "learning_rate": 0.00027277533039647575,
234
+ "loss": 0.8878,
235
+ "step": 310
236
+ },
237
+ {
238
+ "epoch": 1.4096916299559472,
239
+ "grad_norm": 1.0332417488098145,
240
+ "learning_rate": 0.0002718942731277533,
241
+ "loss": 0.7058,
242
+ "step": 320
243
+ },
244
+ {
245
+ "epoch": 1.4537444933920705,
246
+ "grad_norm": 1.1942096948623657,
247
+ "learning_rate": 0.0002710132158590308,
248
+ "loss": 0.6599,
249
+ "step": 330
250
+ },
251
+ {
252
+ "epoch": 1.497797356828194,
253
+ "grad_norm": 1.0352708101272583,
254
+ "learning_rate": 0.00027013215859030833,
255
+ "loss": 0.6487,
256
+ "step": 340
257
+ },
258
+ {
259
+ "epoch": 1.5418502202643172,
260
+ "grad_norm": 1.2984694242477417,
261
+ "learning_rate": 0.0002692511013215859,
262
+ "loss": 0.7225,
263
+ "step": 350
264
+ },
265
+ {
266
+ "epoch": 1.5859030837004404,
267
+ "grad_norm": 1.1419997215270996,
268
+ "learning_rate": 0.00026837004405286344,
269
+ "loss": 0.7233,
270
+ "step": 360
271
+ },
272
+ {
273
+ "epoch": 1.6299559471365639,
274
+ "grad_norm": 1.5873011350631714,
275
+ "learning_rate": 0.00026748898678414097,
276
+ "loss": 0.5429,
277
+ "step": 370
278
+ },
279
+ {
280
+ "epoch": 1.6740088105726874,
281
+ "grad_norm": 1.058026671409607,
282
+ "learning_rate": 0.0002666079295154185,
283
+ "loss": 0.7621,
284
+ "step": 380
285
+ },
286
+ {
287
+ "epoch": 1.7180616740088106,
288
+ "grad_norm": 1.424886703491211,
289
+ "learning_rate": 0.000265726872246696,
290
+ "loss": 0.7103,
291
+ "step": 390
292
+ },
293
+ {
294
+ "epoch": 1.7621145374449338,
295
+ "grad_norm": 0.9987337589263916,
296
+ "learning_rate": 0.00026484581497797355,
297
+ "loss": 0.6882,
298
+ "step": 400
299
+ },
300
+ {
301
+ "epoch": 1.8061674008810573,
302
+ "grad_norm": 1.0241808891296387,
303
+ "learning_rate": 0.0002639647577092511,
304
+ "loss": 0.6754,
305
+ "step": 410
306
+ },
307
+ {
308
+ "epoch": 1.8502202643171806,
309
+ "grad_norm": 0.7069824934005737,
310
+ "learning_rate": 0.0002630837004405286,
311
+ "loss": 0.6426,
312
+ "step": 420
313
+ },
314
+ {
315
+ "epoch": 1.894273127753304,
316
+ "grad_norm": 1.18909752368927,
317
+ "learning_rate": 0.00026220264317180613,
318
+ "loss": 0.7879,
319
+ "step": 430
320
+ },
321
+ {
322
+ "epoch": 1.9383259911894273,
323
+ "grad_norm": 0.8950007557868958,
324
+ "learning_rate": 0.00026132158590308366,
325
+ "loss": 0.7197,
326
+ "step": 440
327
+ },
328
+ {
329
+ "epoch": 1.9823788546255505,
330
+ "grad_norm": 1.3497512340545654,
331
+ "learning_rate": 0.00026044052863436124,
332
+ "loss": 0.6892,
333
+ "step": 450
334
+ },
335
+ {
336
+ "epoch": 2.0,
337
+ "eval_loss": 0.6154947876930237,
338
+ "eval_runtime": 5.3228,
339
+ "eval_samples_per_second": 37.95,
340
+ "eval_steps_per_second": 4.885,
341
+ "step": 454
342
+ },
343
+ {
344
+ "epoch": 2.026431718061674,
345
+ "grad_norm": 1.0325676202774048,
346
+ "learning_rate": 0.00025955947136563877,
347
+ "loss": 0.7104,
348
+ "step": 460
349
+ },
350
+ {
351
+ "epoch": 2.0704845814977975,
352
+ "grad_norm": 1.4701021909713745,
353
+ "learning_rate": 0.0002586784140969163,
354
+ "loss": 0.6249,
355
+ "step": 470
356
+ },
357
+ {
358
+ "epoch": 2.1145374449339207,
359
+ "grad_norm": 1.2472504377365112,
360
+ "learning_rate": 0.0002577973568281938,
361
+ "loss": 0.7115,
362
+ "step": 480
363
+ },
364
+ {
365
+ "epoch": 2.158590308370044,
366
+ "grad_norm": 1.01516592502594,
367
+ "learning_rate": 0.00025691629955947135,
368
+ "loss": 0.6039,
369
+ "step": 490
370
+ },
371
+ {
372
+ "epoch": 2.202643171806167,
373
+ "grad_norm": 1.3985668420791626,
374
+ "learning_rate": 0.0002560352422907489,
375
+ "loss": 0.5976,
376
+ "step": 500
377
+ },
378
+ {
379
+ "epoch": 2.246696035242291,
380
+ "grad_norm": 0.6047684550285339,
381
+ "learning_rate": 0.0002551541850220264,
382
+ "loss": 0.5158,
383
+ "step": 510
384
+ },
385
+ {
386
+ "epoch": 2.290748898678414,
387
+ "grad_norm": 0.8428493142127991,
388
+ "learning_rate": 0.00025427312775330393,
389
+ "loss": 0.6338,
390
+ "step": 520
391
+ },
392
+ {
393
+ "epoch": 2.3348017621145374,
394
+ "grad_norm": 1.0199517011642456,
395
+ "learning_rate": 0.00025339207048458146,
396
+ "loss": 0.469,
397
+ "step": 530
398
+ },
399
+ {
400
+ "epoch": 2.3788546255506606,
401
+ "grad_norm": 0.8641414642333984,
402
+ "learning_rate": 0.000252511013215859,
403
+ "loss": 0.583,
404
+ "step": 540
405
+ },
406
+ {
407
+ "epoch": 2.4229074889867843,
408
+ "grad_norm": 0.8442863821983337,
409
+ "learning_rate": 0.00025162995594713657,
410
+ "loss": 0.6108,
411
+ "step": 550
412
+ },
413
+ {
414
+ "epoch": 2.4669603524229076,
415
+ "grad_norm": 0.8864941000938416,
416
+ "learning_rate": 0.0002507488986784141,
417
+ "loss": 0.5572,
418
+ "step": 560
419
+ },
420
+ {
421
+ "epoch": 2.511013215859031,
422
+ "grad_norm": 0.9025411605834961,
423
+ "learning_rate": 0.0002498678414096916,
424
+ "loss": 0.6174,
425
+ "step": 570
426
+ },
427
+ {
428
+ "epoch": 2.555066079295154,
429
+ "grad_norm": 0.8481220602989197,
430
+ "learning_rate": 0.00024898678414096915,
431
+ "loss": 0.6118,
432
+ "step": 580
433
+ },
434
+ {
435
+ "epoch": 2.5991189427312777,
436
+ "grad_norm": 0.9391738772392273,
437
+ "learning_rate": 0.0002481057268722467,
438
+ "loss": 0.569,
439
+ "step": 590
440
+ },
441
+ {
442
+ "epoch": 2.643171806167401,
443
+ "grad_norm": 1.0381453037261963,
444
+ "learning_rate": 0.0002472246696035242,
445
+ "loss": 0.4904,
446
+ "step": 600
447
+ },
448
+ {
449
+ "epoch": 2.6872246696035242,
450
+ "grad_norm": 1.023573398590088,
451
+ "learning_rate": 0.00024634361233480173,
452
+ "loss": 0.5969,
453
+ "step": 610
454
+ },
455
+ {
456
+ "epoch": 2.7312775330396475,
457
+ "grad_norm": 1.2105042934417725,
458
+ "learning_rate": 0.00024546255506607926,
459
+ "loss": 0.6589,
460
+ "step": 620
461
+ },
462
+ {
463
+ "epoch": 2.7753303964757707,
464
+ "grad_norm": 1.1160320043563843,
465
+ "learning_rate": 0.0002445814977973568,
466
+ "loss": 0.5242,
467
+ "step": 630
468
+ },
469
+ {
470
+ "epoch": 2.8193832599118944,
471
+ "grad_norm": 1.1934391260147095,
472
+ "learning_rate": 0.00024370044052863436,
473
+ "loss": 0.6548,
474
+ "step": 640
475
+ },
476
+ {
477
+ "epoch": 2.8634361233480177,
478
+ "grad_norm": 1.1788102388381958,
479
+ "learning_rate": 0.0002428193832599119,
480
+ "loss": 0.5828,
481
+ "step": 650
482
+ },
483
+ {
484
+ "epoch": 2.907488986784141,
485
+ "grad_norm": 1.2889748811721802,
486
+ "learning_rate": 0.00024193832599118942,
487
+ "loss": 0.6468,
488
+ "step": 660
489
+ },
490
+ {
491
+ "epoch": 2.951541850220264,
492
+ "grad_norm": 0.8837119340896606,
493
+ "learning_rate": 0.00024105726872246695,
494
+ "loss": 0.6694,
495
+ "step": 670
496
+ },
497
+ {
498
+ "epoch": 2.995594713656388,
499
+ "grad_norm": 1.0216575860977173,
500
+ "learning_rate": 0.00024017621145374447,
501
+ "loss": 0.633,
502
+ "step": 680
503
+ },
504
+ {
505
+ "epoch": 3.0,
506
+ "eval_loss": 0.592506468296051,
507
+ "eval_runtime": 5.3852,
508
+ "eval_samples_per_second": 37.51,
509
+ "eval_steps_per_second": 4.828,
510
+ "step": 681
511
+ },
512
+ {
513
+ "epoch": 3.039647577092511,
514
+ "grad_norm": 1.58785080909729,
515
+ "learning_rate": 0.00023929515418502203,
516
+ "loss": 0.4512,
517
+ "step": 690
518
+ },
519
+ {
520
+ "epoch": 3.0837004405286343,
521
+ "grad_norm": 1.0036600828170776,
522
+ "learning_rate": 0.00023841409691629955,
523
+ "loss": 0.9613,
524
+ "step": 700
525
+ },
526
+ {
527
+ "epoch": 3.1277533039647576,
528
+ "grad_norm": 0.9956134557723999,
529
+ "learning_rate": 0.00023753303964757708,
530
+ "loss": 0.479,
531
+ "step": 710
532
+ },
533
+ {
534
+ "epoch": 3.171806167400881,
535
+ "grad_norm": 1.1154946088790894,
536
+ "learning_rate": 0.0002366519823788546,
537
+ "loss": 0.5444,
538
+ "step": 720
539
+ },
540
+ {
541
+ "epoch": 3.2158590308370045,
542
+ "grad_norm": 1.3544610738754272,
543
+ "learning_rate": 0.00023577092511013214,
544
+ "loss": 0.5163,
545
+ "step": 730
546
+ },
547
+ {
548
+ "epoch": 3.2599118942731278,
549
+ "grad_norm": 0.7720727920532227,
550
+ "learning_rate": 0.0002348898678414097,
551
+ "loss": 0.5317,
552
+ "step": 740
553
+ },
554
+ {
555
+ "epoch": 3.303964757709251,
556
+ "grad_norm": 0.9804306030273438,
557
+ "learning_rate": 0.00023400881057268722,
558
+ "loss": 0.5179,
559
+ "step": 750
560
+ },
561
+ {
562
+ "epoch": 3.3480176211453743,
563
+ "grad_norm": 1.0230934619903564,
564
+ "learning_rate": 0.00023312775330396474,
565
+ "loss": 0.5261,
566
+ "step": 760
567
+ },
568
+ {
569
+ "epoch": 3.392070484581498,
570
+ "grad_norm": 0.8620821237564087,
571
+ "learning_rate": 0.00023224669603524227,
572
+ "loss": 0.4998,
573
+ "step": 770
574
+ },
575
+ {
576
+ "epoch": 3.436123348017621,
577
+ "grad_norm": 0.8884461522102356,
578
+ "learning_rate": 0.0002313656387665198,
579
+ "loss": 0.5527,
580
+ "step": 780
581
+ },
582
+ {
583
+ "epoch": 3.4801762114537445,
584
+ "grad_norm": 0.7721192836761475,
585
+ "learning_rate": 0.00023048458149779735,
586
+ "loss": 0.5279,
587
+ "step": 790
588
+ },
589
+ {
590
+ "epoch": 3.5242290748898677,
591
+ "grad_norm": 1.0769802331924438,
592
+ "learning_rate": 0.00022960352422907488,
593
+ "loss": 0.5851,
594
+ "step": 800
595
+ },
596
+ {
597
+ "epoch": 3.568281938325991,
598
+ "grad_norm": 1.3999199867248535,
599
+ "learning_rate": 0.0002287224669603524,
600
+ "loss": 0.44,
601
+ "step": 810
602
+ },
603
+ {
604
+ "epoch": 3.6123348017621146,
605
+ "grad_norm": 0.9963156580924988,
606
+ "learning_rate": 0.00022784140969162993,
607
+ "loss": 0.6028,
608
+ "step": 820
609
+ },
610
+ {
611
+ "epoch": 3.656387665198238,
612
+ "grad_norm": 0.9077759981155396,
613
+ "learning_rate": 0.00022696035242290746,
614
+ "loss": 0.6824,
615
+ "step": 830
616
+ },
617
+ {
618
+ "epoch": 3.700440528634361,
619
+ "grad_norm": 0.9758647680282593,
620
+ "learning_rate": 0.00022607929515418502,
621
+ "loss": 0.5424,
622
+ "step": 840
623
+ },
624
+ {
625
+ "epoch": 3.744493392070485,
626
+ "grad_norm": 0.9838646054267883,
627
+ "learning_rate": 0.00022519823788546254,
628
+ "loss": 0.5588,
629
+ "step": 850
630
+ },
631
+ {
632
+ "epoch": 3.788546255506608,
633
+ "grad_norm": 1.1924773454666138,
634
+ "learning_rate": 0.00022431718061674007,
635
+ "loss": 0.6215,
636
+ "step": 860
637
+ },
638
+ {
639
+ "epoch": 3.8325991189427313,
640
+ "grad_norm": 1.27988600730896,
641
+ "learning_rate": 0.0002234361233480176,
642
+ "loss": 0.5336,
643
+ "step": 870
644
+ },
645
+ {
646
+ "epoch": 3.8766519823788546,
647
+ "grad_norm": 1.0098719596862793,
648
+ "learning_rate": 0.00022255506607929512,
649
+ "loss": 0.6623,
650
+ "step": 880
651
+ },
652
+ {
653
+ "epoch": 3.920704845814978,
654
+ "grad_norm": 1.301437497138977,
655
+ "learning_rate": 0.00022167400881057268,
656
+ "loss": 0.4837,
657
+ "step": 890
658
+ },
659
+ {
660
+ "epoch": 3.964757709251101,
661
+ "grad_norm": 1.3062794208526611,
662
+ "learning_rate": 0.0002207929515418502,
663
+ "loss": 0.4399,
664
+ "step": 900
665
+ },
666
+ {
667
+ "epoch": 4.0,
668
+ "eval_loss": 0.5791140198707581,
669
+ "eval_runtime": 5.2768,
670
+ "eval_samples_per_second": 38.281,
671
+ "eval_steps_per_second": 4.927,
672
+ "step": 908
673
+ },
674
+ {
675
+ "epoch": 4.008810572687224,
676
+ "grad_norm": 1.2243441343307495,
677
+ "learning_rate": 0.00021991189427312773,
678
+ "loss": 0.5225,
679
+ "step": 910
680
+ },
681
+ {
682
+ "epoch": 4.052863436123348,
683
+ "grad_norm": 1.0874862670898438,
684
+ "learning_rate": 0.00021903083700440526,
685
+ "loss": 0.5878,
686
+ "step": 920
687
+ },
688
+ {
689
+ "epoch": 4.096916299559472,
690
+ "grad_norm": 1.1561787128448486,
691
+ "learning_rate": 0.0002181497797356828,
692
+ "loss": 0.4172,
693
+ "step": 930
694
+ },
695
+ {
696
+ "epoch": 4.140969162995595,
697
+ "grad_norm": 0.9504215121269226,
698
+ "learning_rate": 0.00021726872246696034,
699
+ "loss": 0.454,
700
+ "step": 940
701
+ },
702
+ {
703
+ "epoch": 4.185022026431718,
704
+ "grad_norm": 1.0901755094528198,
705
+ "learning_rate": 0.00021638766519823787,
706
+ "loss": 0.5222,
707
+ "step": 950
708
+ },
709
+ {
710
+ "epoch": 4.229074889867841,
711
+ "grad_norm": 0.7518570423126221,
712
+ "learning_rate": 0.0002155066079295154,
713
+ "loss": 0.4048,
714
+ "step": 960
715
+ },
716
+ {
717
+ "epoch": 4.273127753303965,
718
+ "grad_norm": 0.9933887720108032,
719
+ "learning_rate": 0.00021462555066079292,
720
+ "loss": 0.4545,
721
+ "step": 970
722
+ },
723
+ {
724
+ "epoch": 4.317180616740088,
725
+ "grad_norm": 0.8956694006919861,
726
+ "learning_rate": 0.00021374449339207048,
727
+ "loss": 0.5703,
728
+ "step": 980
729
+ },
730
+ {
731
+ "epoch": 4.361233480176211,
732
+ "grad_norm": 1.0768828392028809,
733
+ "learning_rate": 0.000212863436123348,
734
+ "loss": 0.411,
735
+ "step": 990
736
+ },
737
+ {
738
+ "epoch": 4.405286343612334,
739
+ "grad_norm": 1.3219349384307861,
740
+ "learning_rate": 0.00021198237885462553,
741
+ "loss": 0.5096,
742
+ "step": 1000
743
+ },
744
+ {
745
+ "epoch": 4.4493392070484585,
746
+ "grad_norm": 0.6028145551681519,
747
+ "learning_rate": 0.00021110132158590306,
748
+ "loss": 0.5427,
749
+ "step": 1010
750
+ },
751
+ {
752
+ "epoch": 4.493392070484582,
753
+ "grad_norm": 0.6015641689300537,
754
+ "learning_rate": 0.00021022026431718059,
755
+ "loss": 0.4855,
756
+ "step": 1020
757
+ },
758
+ {
759
+ "epoch": 4.537444933920705,
760
+ "grad_norm": 0.7184689044952393,
761
+ "learning_rate": 0.00020933920704845814,
762
+ "loss": 0.4893,
763
+ "step": 1030
764
+ },
765
+ {
766
+ "epoch": 4.581497797356828,
767
+ "grad_norm": 1.445830225944519,
768
+ "learning_rate": 0.00020845814977973567,
769
+ "loss": 0.4412,
770
+ "step": 1040
771
+ },
772
+ {
773
+ "epoch": 4.6255506607929515,
774
+ "grad_norm": 0.9506711959838867,
775
+ "learning_rate": 0.0002075770925110132,
776
+ "loss": 0.5596,
777
+ "step": 1050
778
+ },
779
+ {
780
+ "epoch": 4.669603524229075,
781
+ "grad_norm": 0.9642265439033508,
782
+ "learning_rate": 0.00020669603524229072,
783
+ "loss": 0.3944,
784
+ "step": 1060
785
+ },
786
+ {
787
+ "epoch": 4.713656387665198,
788
+ "grad_norm": 0.9548330307006836,
789
+ "learning_rate": 0.00020581497797356825,
790
+ "loss": 0.4925,
791
+ "step": 1070
792
+ },
793
+ {
794
+ "epoch": 4.757709251101321,
795
+ "grad_norm": 1.5850030183792114,
796
+ "learning_rate": 0.0002049339207048458,
797
+ "loss": 0.5114,
798
+ "step": 1080
799
+ },
800
+ {
801
+ "epoch": 4.8017621145374445,
802
+ "grad_norm": 0.7429970502853394,
803
+ "learning_rate": 0.00020405286343612333,
804
+ "loss": 0.5556,
805
+ "step": 1090
806
+ },
807
+ {
808
+ "epoch": 4.845814977973569,
809
+ "grad_norm": 0.9865929484367371,
810
+ "learning_rate": 0.00020317180616740086,
811
+ "loss": 0.4612,
812
+ "step": 1100
813
+ },
814
+ {
815
+ "epoch": 4.889867841409692,
816
+ "grad_norm": 0.8113177418708801,
817
+ "learning_rate": 0.00020229074889867838,
818
+ "loss": 0.5196,
819
+ "step": 1110
820
+ },
821
+ {
822
+ "epoch": 4.933920704845815,
823
+ "grad_norm": 1.1767125129699707,
824
+ "learning_rate": 0.0002014096916299559,
825
+ "loss": 0.5321,
826
+ "step": 1120
827
+ },
828
+ {
829
+ "epoch": 4.977973568281938,
830
+ "grad_norm": 0.8367587327957153,
831
+ "learning_rate": 0.00020052863436123347,
832
+ "loss": 0.506,
833
+ "step": 1130
834
+ },
835
+ {
836
+ "epoch": 5.0,
837
+ "eval_loss": 0.5730367302894592,
838
+ "eval_runtime": 6.1337,
839
+ "eval_samples_per_second": 32.933,
840
+ "eval_steps_per_second": 4.239,
841
+ "step": 1135
842
+ },
843
+ {
844
+ "epoch": 5.022026431718062,
845
+ "grad_norm": 1.1242823600769043,
846
+ "learning_rate": 0.000199647577092511,
847
+ "loss": 0.4678,
848
+ "step": 1140
849
+ },
850
+ {
851
+ "epoch": 5.066079295154185,
852
+ "grad_norm": 1.0385881662368774,
853
+ "learning_rate": 0.00019876651982378852,
854
+ "loss": 0.3968,
855
+ "step": 1150
856
+ },
857
+ {
858
+ "epoch": 5.110132158590308,
859
+ "grad_norm": 0.9282165765762329,
860
+ "learning_rate": 0.00019788546255506605,
861
+ "loss": 0.5089,
862
+ "step": 1160
863
+ },
864
+ {
865
+ "epoch": 5.154185022026431,
866
+ "grad_norm": 1.401548147201538,
867
+ "learning_rate": 0.00019700440528634357,
868
+ "loss": 0.4457,
869
+ "step": 1170
870
+ },
871
+ {
872
+ "epoch": 5.1982378854625555,
873
+ "grad_norm": 0.6676862835884094,
874
+ "learning_rate": 0.00019612334801762113,
875
+ "loss": 0.3175,
876
+ "step": 1180
877
+ },
878
+ {
879
+ "epoch": 5.242290748898679,
880
+ "grad_norm": 1.1318411827087402,
881
+ "learning_rate": 0.00019524229074889866,
882
+ "loss": 0.4468,
883
+ "step": 1190
884
+ },
885
+ {
886
+ "epoch": 5.286343612334802,
887
+ "grad_norm": 0.706200361251831,
888
+ "learning_rate": 0.00019436123348017618,
889
+ "loss": 0.3954,
890
+ "step": 1200
891
+ },
892
+ {
893
+ "epoch": 5.330396475770925,
894
+ "grad_norm": 0.6558952927589417,
895
+ "learning_rate": 0.0001934801762114537,
896
+ "loss": 0.4318,
897
+ "step": 1210
898
+ },
899
+ {
900
+ "epoch": 5.3744493392070485,
901
+ "grad_norm": 0.59174644947052,
902
+ "learning_rate": 0.00019259911894273124,
903
+ "loss": 0.3962,
904
+ "step": 1220
905
+ },
906
+ {
907
+ "epoch": 5.418502202643172,
908
+ "grad_norm": 0.9306423664093018,
909
+ "learning_rate": 0.0001917180616740088,
910
+ "loss": 0.4161,
911
+ "step": 1230
912
+ },
913
+ {
914
+ "epoch": 5.462555066079295,
915
+ "grad_norm": 1.2412904500961304,
916
+ "learning_rate": 0.00019083700440528632,
917
+ "loss": 0.4259,
918
+ "step": 1240
919
+ },
920
+ {
921
+ "epoch": 5.506607929515418,
922
+ "grad_norm": 0.8949795961380005,
923
+ "learning_rate": 0.00018995594713656385,
924
+ "loss": 0.5512,
925
+ "step": 1250
926
+ },
927
+ {
928
+ "epoch": 5.5506607929515415,
929
+ "grad_norm": 0.9977787733078003,
930
+ "learning_rate": 0.00018907488986784137,
931
+ "loss": 0.4497,
932
+ "step": 1260
933
+ },
934
+ {
935
+ "epoch": 5.594713656387665,
936
+ "grad_norm": 1.0676085948944092,
937
+ "learning_rate": 0.0001881938325991189,
938
+ "loss": 0.4344,
939
+ "step": 1270
940
+ },
941
+ {
942
+ "epoch": 5.638766519823789,
943
+ "grad_norm": 0.6446275115013123,
944
+ "learning_rate": 0.00018731277533039648,
945
+ "loss": 0.4184,
946
+ "step": 1280
947
+ },
948
+ {
949
+ "epoch": 5.682819383259912,
950
+ "grad_norm": 1.3255438804626465,
951
+ "learning_rate": 0.000186431718061674,
952
+ "loss": 0.5441,
953
+ "step": 1290
954
+ },
955
+ {
956
+ "epoch": 5.726872246696035,
957
+ "grad_norm": 0.823581337928772,
958
+ "learning_rate": 0.0001855506607929515,
959
+ "loss": 0.5028,
960
+ "step": 1300
961
+ },
962
+ {
963
+ "epoch": 5.770925110132159,
964
+ "grad_norm": 1.0471981763839722,
965
+ "learning_rate": 0.00018466960352422904,
966
+ "loss": 0.4407,
967
+ "step": 1310
968
+ },
969
+ {
970
+ "epoch": 5.814977973568282,
971
+ "grad_norm": 1.0394315719604492,
972
+ "learning_rate": 0.00018378854625550662,
973
+ "loss": 0.4788,
974
+ "step": 1320
975
+ },
976
+ {
977
+ "epoch": 5.859030837004405,
978
+ "grad_norm": 1.4738258123397827,
979
+ "learning_rate": 0.00018290748898678414,
980
+ "loss": 0.5495,
981
+ "step": 1330
982
+ },
983
+ {
984
+ "epoch": 5.903083700440528,
985
+ "grad_norm": 1.2812182903289795,
986
+ "learning_rate": 0.00018202643171806167,
987
+ "loss": 0.4335,
988
+ "step": 1340
989
+ },
990
+ {
991
+ "epoch": 5.9471365638766525,
992
+ "grad_norm": 1.4929533004760742,
993
+ "learning_rate": 0.0001811453744493392,
994
+ "loss": 0.5097,
995
+ "step": 1350
996
+ },
997
+ {
998
+ "epoch": 5.991189427312776,
999
+ "grad_norm": 1.2788587808609009,
1000
+ "learning_rate": 0.00018026431718061673,
1001
+ "loss": 0.4702,
1002
+ "step": 1360
1003
+ },
1004
+ {
1005
+ "epoch": 6.0,
1006
+ "eval_loss": 0.5740869045257568,
1007
+ "eval_runtime": 4.9653,
1008
+ "eval_samples_per_second": 40.682,
1009
+ "eval_steps_per_second": 5.236,
1010
+ "step": 1362
1011
+ },
1012
+ {
1013
+ "epoch": 6.035242290748899,
1014
+ "grad_norm": 0.9543855786323547,
1015
+ "learning_rate": 0.00017938325991189428,
1016
+ "loss": 0.4232,
1017
+ "step": 1370
1018
+ },
1019
+ {
1020
+ "epoch": 6.079295154185022,
1021
+ "grad_norm": 1.0528812408447266,
1022
+ "learning_rate": 0.0001785022026431718,
1023
+ "loss": 0.4025,
1024
+ "step": 1380
1025
+ },
1026
+ {
1027
+ "epoch": 6.1233480176211454,
1028
+ "grad_norm": 0.9573265910148621,
1029
+ "learning_rate": 0.00017762114537444933,
1030
+ "loss": 0.4127,
1031
+ "step": 1390
1032
+ },
1033
+ {
1034
+ "epoch": 6.167400881057269,
1035
+ "grad_norm": 1.7806532382965088,
1036
+ "learning_rate": 0.00017674008810572686,
1037
+ "loss": 0.4646,
1038
+ "step": 1400
1039
+ },
1040
+ {
1041
+ "epoch": 6.211453744493392,
1042
+ "grad_norm": 1.0559179782867432,
1043
+ "learning_rate": 0.0001758590308370044,
1044
+ "loss": 0.3222,
1045
+ "step": 1410
1046
+ },
1047
+ {
1048
+ "epoch": 6.255506607929515,
1049
+ "grad_norm": 0.9502829313278198,
1050
+ "learning_rate": 0.00017497797356828194,
1051
+ "loss": 0.4697,
1052
+ "step": 1420
1053
+ },
1054
+ {
1055
+ "epoch": 6.299559471365638,
1056
+ "grad_norm": 0.6869007349014282,
1057
+ "learning_rate": 0.00017409691629955947,
1058
+ "loss": 0.4155,
1059
+ "step": 1430
1060
+ },
1061
+ {
1062
+ "epoch": 6.343612334801762,
1063
+ "grad_norm": 0.6793345808982849,
1064
+ "learning_rate": 0.000173215859030837,
1065
+ "loss": 0.4236,
1066
+ "step": 1440
1067
+ },
1068
+ {
1069
+ "epoch": 6.387665198237886,
1070
+ "grad_norm": 1.067975640296936,
1071
+ "learning_rate": 0.00017233480176211452,
1072
+ "loss": 0.3558,
1073
+ "step": 1450
1074
+ },
1075
+ {
1076
+ "epoch": 6.431718061674009,
1077
+ "grad_norm": 1.0968421697616577,
1078
+ "learning_rate": 0.00017145374449339205,
1079
+ "loss": 0.4453,
1080
+ "step": 1460
1081
+ },
1082
+ {
1083
+ "epoch": 6.475770925110132,
1084
+ "grad_norm": 1.1832313537597656,
1085
+ "learning_rate": 0.0001705726872246696,
1086
+ "loss": 0.5115,
1087
+ "step": 1470
1088
+ },
1089
+ {
1090
+ "epoch": 6.5198237885462555,
1091
+ "grad_norm": 0.9857836365699768,
1092
+ "learning_rate": 0.00016969162995594713,
1093
+ "loss": 0.4274,
1094
+ "step": 1480
1095
+ },
1096
+ {
1097
+ "epoch": 6.563876651982379,
1098
+ "grad_norm": 0.9006336331367493,
1099
+ "learning_rate": 0.00016881057268722466,
1100
+ "loss": 0.3865,
1101
+ "step": 1490
1102
+ },
1103
+ {
1104
+ "epoch": 6.607929515418502,
1105
+ "grad_norm": 1.1091986894607544,
1106
+ "learning_rate": 0.0001679295154185022,
1107
+ "loss": 0.3988,
1108
+ "step": 1500
1109
+ },
1110
+ {
1111
+ "epoch": 6.651982378854625,
1112
+ "grad_norm": 1.423886775970459,
1113
+ "learning_rate": 0.00016704845814977971,
1114
+ "loss": 0.5057,
1115
+ "step": 1510
1116
+ },
1117
+ {
1118
+ "epoch": 6.6960352422907485,
1119
+ "grad_norm": 0.9245197176933289,
1120
+ "learning_rate": 0.00016616740088105727,
1121
+ "loss": 0.3966,
1122
+ "step": 1520
1123
+ },
1124
+ {
1125
+ "epoch": 6.740088105726873,
1126
+ "grad_norm": 0.944870114326477,
1127
+ "learning_rate": 0.0001652863436123348,
1128
+ "loss": 0.4521,
1129
+ "step": 1530
1130
+ },
1131
+ {
1132
+ "epoch": 6.784140969162996,
1133
+ "grad_norm": 0.8870773315429688,
1134
+ "learning_rate": 0.00016440528634361232,
1135
+ "loss": 0.4425,
1136
+ "step": 1540
1137
+ },
1138
+ {
1139
+ "epoch": 6.828193832599119,
1140
+ "grad_norm": 0.7404115200042725,
1141
+ "learning_rate": 0.00016352422907488985,
1142
+ "loss": 0.3207,
1143
+ "step": 1550
1144
+ },
1145
+ {
1146
+ "epoch": 6.872246696035242,
1147
+ "grad_norm": 0.9958137273788452,
1148
+ "learning_rate": 0.00016264317180616738,
1149
+ "loss": 0.4244,
1150
+ "step": 1560
1151
+ },
1152
+ {
1153
+ "epoch": 6.916299559471366,
1154
+ "grad_norm": 1.0651079416275024,
1155
+ "learning_rate": 0.00016176211453744493,
1156
+ "loss": 0.4075,
1157
+ "step": 1570
1158
+ },
1159
+ {
1160
+ "epoch": 6.960352422907489,
1161
+ "grad_norm": 0.9528789520263672,
1162
+ "learning_rate": 0.00016088105726872246,
1163
+ "loss": 0.493,
1164
+ "step": 1580
1165
+ },
1166
+ {
1167
+ "epoch": 7.0,
1168
+ "eval_loss": 0.5702072381973267,
1169
+ "eval_runtime": 5.1787,
1170
+ "eval_samples_per_second": 39.006,
1171
+ "eval_steps_per_second": 5.021,
1172
+ "step": 1589
1173
+ },
1174
+ {
1175
+ "epoch": 7.004405286343612,
1176
+ "grad_norm": 1.0486853122711182,
1177
+ "learning_rate": 0.00015999999999999999,
1178
+ "loss": 0.4131,
1179
+ "step": 1590
1180
+ },
1181
+ {
1182
+ "epoch": 7.048458149779735,
1183
+ "grad_norm": 1.2176262140274048,
1184
+ "learning_rate": 0.0001591189427312775,
1185
+ "loss": 0.4417,
1186
+ "step": 1600
1187
+ },
1188
+ {
1189
+ "epoch": 7.092511013215859,
1190
+ "grad_norm": 1.187107801437378,
1191
+ "learning_rate": 0.00015823788546255504,
1192
+ "loss": 0.4372,
1193
+ "step": 1610
1194
+ },
1195
+ {
1196
+ "epoch": 7.136563876651983,
1197
+ "grad_norm": 0.9459372758865356,
1198
+ "learning_rate": 0.0001573568281938326,
1199
+ "loss": 0.356,
1200
+ "step": 1620
1201
+ },
1202
+ {
1203
+ "epoch": 7.180616740088106,
1204
+ "grad_norm": 0.8114103078842163,
1205
+ "learning_rate": 0.00015647577092511012,
1206
+ "loss": 0.308,
1207
+ "step": 1630
1208
+ },
1209
+ {
1210
+ "epoch": 7.224669603524229,
1211
+ "grad_norm": 1.035370945930481,
1212
+ "learning_rate": 0.00015559471365638765,
1213
+ "loss": 0.3738,
1214
+ "step": 1640
1215
+ },
1216
+ {
1217
+ "epoch": 7.2687224669603525,
1218
+ "grad_norm": 1.0260848999023438,
1219
+ "learning_rate": 0.00015471365638766518,
1220
+ "loss": 0.342,
1221
+ "step": 1650
1222
+ },
1223
+ {
1224
+ "epoch": 7.312775330396476,
1225
+ "grad_norm": 0.8079932928085327,
1226
+ "learning_rate": 0.00015383259911894273,
1227
+ "loss": 0.4381,
1228
+ "step": 1660
1229
+ },
1230
+ {
1231
+ "epoch": 7.356828193832599,
1232
+ "grad_norm": 1.318695068359375,
1233
+ "learning_rate": 0.00015295154185022026,
1234
+ "loss": 0.3685,
1235
+ "step": 1670
1236
+ },
1237
+ {
1238
+ "epoch": 7.400881057268722,
1239
+ "grad_norm": 1.3181859254837036,
1240
+ "learning_rate": 0.00015207048458149778,
1241
+ "loss": 0.3465,
1242
+ "step": 1680
1243
+ },
1244
+ {
1245
+ "epoch": 7.4449339207048455,
1246
+ "grad_norm": 1.0277948379516602,
1247
+ "learning_rate": 0.0001511894273127753,
1248
+ "loss": 0.3659,
1249
+ "step": 1690
1250
+ },
1251
+ {
1252
+ "epoch": 7.48898678414097,
1253
+ "grad_norm": 1.1619762182235718,
1254
+ "learning_rate": 0.00015030837004405284,
1255
+ "loss": 0.4304,
1256
+ "step": 1700
1257
+ },
1258
+ {
1259
+ "epoch": 7.533039647577093,
1260
+ "grad_norm": 1.2854048013687134,
1261
+ "learning_rate": 0.0001494273127753304,
1262
+ "loss": 0.4372,
1263
+ "step": 1710
1264
+ },
1265
+ {
1266
+ "epoch": 7.577092511013216,
1267
+ "grad_norm": 1.032459020614624,
1268
+ "learning_rate": 0.00014854625550660792,
1269
+ "loss": 0.3687,
1270
+ "step": 1720
1271
+ },
1272
+ {
1273
+ "epoch": 7.621145374449339,
1274
+ "grad_norm": 0.9430228471755981,
1275
+ "learning_rate": 0.00014766519823788545,
1276
+ "loss": 0.3967,
1277
+ "step": 1730
1278
+ },
1279
+ {
1280
+ "epoch": 7.665198237885463,
1281
+ "grad_norm": 1.2012503147125244,
1282
+ "learning_rate": 0.00014678414096916297,
1283
+ "loss": 0.4028,
1284
+ "step": 1740
1285
+ },
1286
+ {
1287
+ "epoch": 7.709251101321586,
1288
+ "grad_norm": 0.9703013896942139,
1289
+ "learning_rate": 0.00014590308370044053,
1290
+ "loss": 0.4037,
1291
+ "step": 1750
1292
+ },
1293
+ {
1294
+ "epoch": 7.753303964757709,
1295
+ "grad_norm": 1.2811229228973389,
1296
+ "learning_rate": 0.00014502202643171806,
1297
+ "loss": 0.3725,
1298
+ "step": 1760
1299
+ },
1300
+ {
1301
+ "epoch": 7.797356828193832,
1302
+ "grad_norm": 0.9879553914070129,
1303
+ "learning_rate": 0.00014414096916299558,
1304
+ "loss": 0.4385,
1305
+ "step": 1770
1306
+ },
1307
+ {
1308
+ "epoch": 7.841409691629956,
1309
+ "grad_norm": 1.4015151262283325,
1310
+ "learning_rate": 0.0001432599118942731,
1311
+ "loss": 0.4046,
1312
+ "step": 1780
1313
+ },
1314
+ {
1315
+ "epoch": 7.885462555066079,
1316
+ "grad_norm": 0.9369928240776062,
1317
+ "learning_rate": 0.00014237885462555064,
1318
+ "loss": 0.4232,
1319
+ "step": 1790
1320
+ },
1321
+ {
1322
+ "epoch": 7.929515418502203,
1323
+ "grad_norm": 0.7787442803382874,
1324
+ "learning_rate": 0.0001414977973568282,
1325
+ "loss": 0.3679,
1326
+ "step": 1800
1327
+ },
1328
+ {
1329
+ "epoch": 7.973568281938326,
1330
+ "grad_norm": 0.7212619781494141,
1331
+ "learning_rate": 0.00014061674008810572,
1332
+ "loss": 0.4299,
1333
+ "step": 1810
1334
+ },
1335
+ {
1336
+ "epoch": 8.0,
1337
+ "eval_loss": 0.57987380027771,
1338
+ "eval_runtime": 4.8534,
1339
+ "eval_samples_per_second": 41.62,
1340
+ "eval_steps_per_second": 5.357,
1341
+ "step": 1816
1342
+ },
1343
+ {
1344
+ "epoch": 8.017621145374449,
1345
+ "grad_norm": 1.1815301179885864,
1346
+ "learning_rate": 0.00013973568281938325,
1347
+ "loss": 0.4058,
1348
+ "step": 1820
1349
+ },
1350
+ {
1351
+ "epoch": 8.061674008810572,
1352
+ "grad_norm": 0.7913572192192078,
1353
+ "learning_rate": 0.00013885462555066077,
1354
+ "loss": 0.2876,
1355
+ "step": 1830
1356
+ },
1357
+ {
1358
+ "epoch": 8.105726872246697,
1359
+ "grad_norm": 0.9591747522354126,
1360
+ "learning_rate": 0.0001379735682819383,
1361
+ "loss": 0.2801,
1362
+ "step": 1840
1363
+ },
1364
+ {
1365
+ "epoch": 8.14977973568282,
1366
+ "grad_norm": 1.2883862257003784,
1367
+ "learning_rate": 0.00013709251101321585,
1368
+ "loss": 0.3435,
1369
+ "step": 1850
1370
+ },
1371
+ {
1372
+ "epoch": 8.193832599118943,
1373
+ "grad_norm": 1.2138097286224365,
1374
+ "learning_rate": 0.00013621145374449338,
1375
+ "loss": 0.4603,
1376
+ "step": 1860
1377
+ },
1378
+ {
1379
+ "epoch": 8.237885462555067,
1380
+ "grad_norm": 0.9017927050590515,
1381
+ "learning_rate": 0.0001353303964757709,
1382
+ "loss": 0.328,
1383
+ "step": 1870
1384
+ },
1385
+ {
1386
+ "epoch": 8.28193832599119,
1387
+ "grad_norm": 1.0213032960891724,
1388
+ "learning_rate": 0.00013444933920704844,
1389
+ "loss": 0.4241,
1390
+ "step": 1880
1391
+ },
1392
+ {
1393
+ "epoch": 8.325991189427313,
1394
+ "grad_norm": 0.782507598400116,
1395
+ "learning_rate": 0.00013356828193832596,
1396
+ "loss": 0.287,
1397
+ "step": 1890
1398
+ },
1399
+ {
1400
+ "epoch": 8.370044052863436,
1401
+ "grad_norm": 0.8239027261734009,
1402
+ "learning_rate": 0.00013268722466960352,
1403
+ "loss": 0.3471,
1404
+ "step": 1900
1405
+ },
1406
+ {
1407
+ "epoch": 8.41409691629956,
1408
+ "grad_norm": 0.9952473044395447,
1409
+ "learning_rate": 0.00013180616740088104,
1410
+ "loss": 0.325,
1411
+ "step": 1910
1412
+ },
1413
+ {
1414
+ "epoch": 8.458149779735683,
1415
+ "grad_norm": 0.7988440990447998,
1416
+ "learning_rate": 0.00013092511013215857,
1417
+ "loss": 0.3397,
1418
+ "step": 1920
1419
+ },
1420
+ {
1421
+ "epoch": 8.502202643171806,
1422
+ "grad_norm": 1.2881464958190918,
1423
+ "learning_rate": 0.0001300440528634361,
1424
+ "loss": 0.4655,
1425
+ "step": 1930
1426
+ },
1427
+ {
1428
+ "epoch": 8.54625550660793,
1429
+ "grad_norm": 0.9545268416404724,
1430
+ "learning_rate": 0.00012916299559471365,
1431
+ "loss": 0.4031,
1432
+ "step": 1940
1433
+ },
1434
+ {
1435
+ "epoch": 8.590308370044053,
1436
+ "grad_norm": 1.550424337387085,
1437
+ "learning_rate": 0.00012828193832599118,
1438
+ "loss": 0.3697,
1439
+ "step": 1950
1440
+ },
1441
+ {
1442
+ "epoch": 8.634361233480176,
1443
+ "grad_norm": 1.2041224241256714,
1444
+ "learning_rate": 0.0001274008810572687,
1445
+ "loss": 0.43,
1446
+ "step": 1960
1447
+ },
1448
+ {
1449
+ "epoch": 8.678414096916299,
1450
+ "grad_norm": 0.8280724287033081,
1451
+ "learning_rate": 0.00012651982378854626,
1452
+ "loss": 0.4045,
1453
+ "step": 1970
1454
+ },
1455
+ {
1456
+ "epoch": 8.722466960352422,
1457
+ "grad_norm": 0.8164283037185669,
1458
+ "learning_rate": 0.00012563876651982376,
1459
+ "loss": 0.4001,
1460
+ "step": 1980
1461
+ },
1462
+ {
1463
+ "epoch": 8.766519823788546,
1464
+ "grad_norm": 0.9470929503440857,
1465
+ "learning_rate": 0.00012475770925110132,
1466
+ "loss": 0.3767,
1467
+ "step": 1990
1468
+ },
1469
+ {
1470
+ "epoch": 8.810572687224669,
1471
+ "grad_norm": 0.7390472888946533,
1472
+ "learning_rate": 0.00012387665198237884,
1473
+ "loss": 0.4206,
1474
+ "step": 2000
1475
+ },
1476
+ {
1477
+ "epoch": 8.854625550660792,
1478
+ "grad_norm": 0.8382723927497864,
1479
+ "learning_rate": 0.00012299559471365637,
1480
+ "loss": 0.3061,
1481
+ "step": 2010
1482
+ },
1483
+ {
1484
+ "epoch": 8.898678414096917,
1485
+ "grad_norm": 1.060539722442627,
1486
+ "learning_rate": 0.00012211453744493392,
1487
+ "loss": 0.4921,
1488
+ "step": 2020
1489
+ },
1490
+ {
1491
+ "epoch": 8.94273127753304,
1492
+ "grad_norm": 0.6955994367599487,
1493
+ "learning_rate": 0.00012123348017621144,
1494
+ "loss": 0.4077,
1495
+ "step": 2030
1496
+ },
1497
+ {
1498
+ "epoch": 8.986784140969164,
1499
+ "grad_norm": 0.8158656358718872,
1500
+ "learning_rate": 0.00012035242290748898,
1501
+ "loss": 0.3759,
1502
+ "step": 2040
1503
+ },
1504
+ {
1505
+ "epoch": 9.0,
1506
+ "eval_loss": 0.582844614982605,
1507
+ "eval_runtime": 4.8405,
1508
+ "eval_samples_per_second": 41.732,
1509
+ "eval_steps_per_second": 5.371,
1510
+ "step": 2043
1511
+ },
1512
+ {
1513
+ "epoch": 9.030837004405287,
1514
+ "grad_norm": 0.9192315936088562,
1515
+ "learning_rate": 0.0001194713656387665,
1516
+ "loss": 0.3809,
1517
+ "step": 2050
1518
+ },
1519
+ {
1520
+ "epoch": 9.07488986784141,
1521
+ "grad_norm": 1.0536017417907715,
1522
+ "learning_rate": 0.00011859030837004403,
1523
+ "loss": 0.3321,
1524
+ "step": 2060
1525
+ },
1526
+ {
1527
+ "epoch": 9.118942731277533,
1528
+ "grad_norm": 1.1080108880996704,
1529
+ "learning_rate": 0.00011770925110132157,
1530
+ "loss": 0.407,
1531
+ "step": 2070
1532
+ },
1533
+ {
1534
+ "epoch": 9.162995594713657,
1535
+ "grad_norm": 0.9956775903701782,
1536
+ "learning_rate": 0.0001168281938325991,
1537
+ "loss": 0.3423,
1538
+ "step": 2080
1539
+ },
1540
+ {
1541
+ "epoch": 9.20704845814978,
1542
+ "grad_norm": 0.746013343334198,
1543
+ "learning_rate": 0.00011594713656387664,
1544
+ "loss": 0.3794,
1545
+ "step": 2090
1546
+ },
1547
+ {
1548
+ "epoch": 9.251101321585903,
1549
+ "grad_norm": 1.126372218132019,
1550
+ "learning_rate": 0.00011506607929515417,
1551
+ "loss": 0.4121,
1552
+ "step": 2100
1553
+ },
1554
+ {
1555
+ "epoch": 9.295154185022026,
1556
+ "grad_norm": 1.4978642463684082,
1557
+ "learning_rate": 0.00011418502202643172,
1558
+ "loss": 0.3358,
1559
+ "step": 2110
1560
+ },
1561
+ {
1562
+ "epoch": 9.33920704845815,
1563
+ "grad_norm": 0.7826859951019287,
1564
+ "learning_rate": 0.00011330396475770924,
1565
+ "loss": 0.2931,
1566
+ "step": 2120
1567
+ },
1568
+ {
1569
+ "epoch": 9.383259911894273,
1570
+ "grad_norm": 1.1644082069396973,
1571
+ "learning_rate": 0.00011242290748898676,
1572
+ "loss": 0.377,
1573
+ "step": 2130
1574
+ },
1575
+ {
1576
+ "epoch": 9.427312775330396,
1577
+ "grad_norm": 0.8106231093406677,
1578
+ "learning_rate": 0.00011154185022026432,
1579
+ "loss": 0.3562,
1580
+ "step": 2140
1581
+ },
1582
+ {
1583
+ "epoch": 9.47136563876652,
1584
+ "grad_norm": 1.162919282913208,
1585
+ "learning_rate": 0.00011066079295154183,
1586
+ "loss": 0.3441,
1587
+ "step": 2150
1588
+ },
1589
+ {
1590
+ "epoch": 9.515418502202643,
1591
+ "grad_norm": 0.7184136509895325,
1592
+ "learning_rate": 0.00010977973568281939,
1593
+ "loss": 0.3254,
1594
+ "step": 2160
1595
+ },
1596
+ {
1597
+ "epoch": 9.559471365638766,
1598
+ "grad_norm": 0.9587578177452087,
1599
+ "learning_rate": 0.00010889867841409691,
1600
+ "loss": 0.3533,
1601
+ "step": 2170
1602
+ },
1603
+ {
1604
+ "epoch": 9.603524229074889,
1605
+ "grad_norm": 0.8703950643539429,
1606
+ "learning_rate": 0.00010801762114537444,
1607
+ "loss": 0.3366,
1608
+ "step": 2180
1609
+ },
1610
+ {
1611
+ "epoch": 9.647577092511014,
1612
+ "grad_norm": 0.7304671406745911,
1613
+ "learning_rate": 0.00010713656387665198,
1614
+ "loss": 0.3608,
1615
+ "step": 2190
1616
+ },
1617
+ {
1618
+ "epoch": 9.691629955947137,
1619
+ "grad_norm": 1.1611542701721191,
1620
+ "learning_rate": 0.00010625550660792951,
1621
+ "loss": 0.3353,
1622
+ "step": 2200
1623
+ },
1624
+ {
1625
+ "epoch": 9.73568281938326,
1626
+ "grad_norm": 0.7281723022460938,
1627
+ "learning_rate": 0.00010537444933920705,
1628
+ "loss": 0.3082,
1629
+ "step": 2210
1630
+ },
1631
+ {
1632
+ "epoch": 9.779735682819384,
1633
+ "grad_norm": 1.1435456275939941,
1634
+ "learning_rate": 0.00010449339207048458,
1635
+ "loss": 0.4317,
1636
+ "step": 2220
1637
+ },
1638
+ {
1639
+ "epoch": 9.823788546255507,
1640
+ "grad_norm": 0.9928381443023682,
1641
+ "learning_rate": 0.0001036123348017621,
1642
+ "loss": 0.3564,
1643
+ "step": 2230
1644
+ },
1645
+ {
1646
+ "epoch": 9.86784140969163,
1647
+ "grad_norm": 0.8395977020263672,
1648
+ "learning_rate": 0.00010273127753303964,
1649
+ "loss": 0.3531,
1650
+ "step": 2240
1651
+ },
1652
+ {
1653
+ "epoch": 9.911894273127754,
1654
+ "grad_norm": 1.0142395496368408,
1655
+ "learning_rate": 0.00010185022026431717,
1656
+ "loss": 0.3896,
1657
+ "step": 2250
1658
+ },
1659
+ {
1660
+ "epoch": 9.955947136563877,
1661
+ "grad_norm": 0.6916971802711487,
1662
+ "learning_rate": 0.00010096916299559471,
1663
+ "loss": 0.3667,
1664
+ "step": 2260
1665
+ },
1666
+ {
1667
+ "epoch": 10.0,
1668
+ "grad_norm": 0.7665943503379822,
1669
+ "learning_rate": 0.00010008810572687224,
1670
+ "loss": 0.3075,
1671
+ "step": 2270
1672
+ },
1673
+ {
1674
+ "epoch": 10.0,
1675
+ "eval_loss": 0.579430878162384,
1676
+ "eval_runtime": 4.7891,
1677
+ "eval_samples_per_second": 42.179,
1678
+ "eval_steps_per_second": 5.429,
1679
+ "step": 2270
1680
+ },
1681
+ {
1682
+ "epoch": 10.044052863436123,
1683
+ "grad_norm": 1.4675018787384033,
1684
+ "learning_rate": 9.920704845814978e-05,
1685
+ "loss": 0.3251,
1686
+ "step": 2280
1687
+ },
1688
+ {
1689
+ "epoch": 10.088105726872246,
1690
+ "grad_norm": 0.6954736709594727,
1691
+ "learning_rate": 9.83259911894273e-05,
1692
+ "loss": 0.3656,
1693
+ "step": 2290
1694
+ },
1695
+ {
1696
+ "epoch": 10.13215859030837,
1697
+ "grad_norm": 1.4188182353973389,
1698
+ "learning_rate": 9.744493392070483e-05,
1699
+ "loss": 0.335,
1700
+ "step": 2300
1701
+ },
1702
+ {
1703
+ "epoch": 10.176211453744493,
1704
+ "grad_norm": 0.9333553910255432,
1705
+ "learning_rate": 9.656387665198237e-05,
1706
+ "loss": 0.2888,
1707
+ "step": 2310
1708
+ },
1709
+ {
1710
+ "epoch": 10.220264317180616,
1711
+ "grad_norm": 0.886482834815979,
1712
+ "learning_rate": 9.56828193832599e-05,
1713
+ "loss": 0.3122,
1714
+ "step": 2320
1715
+ },
1716
+ {
1717
+ "epoch": 10.26431718061674,
1718
+ "grad_norm": 0.6795399188995361,
1719
+ "learning_rate": 9.480176211453744e-05,
1720
+ "loss": 0.3765,
1721
+ "step": 2330
1722
+ },
1723
+ {
1724
+ "epoch": 10.308370044052863,
1725
+ "grad_norm": 1.3046603202819824,
1726
+ "learning_rate": 9.392070484581497e-05,
1727
+ "loss": 0.3316,
1728
+ "step": 2340
1729
+ },
1730
+ {
1731
+ "epoch": 10.352422907488986,
1732
+ "grad_norm": 1.0006519556045532,
1733
+ "learning_rate": 9.30396475770925e-05,
1734
+ "loss": 0.3659,
1735
+ "step": 2350
1736
+ },
1737
+ {
1738
+ "epoch": 10.396475770925111,
1739
+ "grad_norm": 1.1640467643737793,
1740
+ "learning_rate": 9.215859030837004e-05,
1741
+ "loss": 0.346,
1742
+ "step": 2360
1743
+ },
1744
+ {
1745
+ "epoch": 10.440528634361234,
1746
+ "grad_norm": 0.9744365811347961,
1747
+ "learning_rate": 9.127753303964756e-05,
1748
+ "loss": 0.3317,
1749
+ "step": 2370
1750
+ },
1751
+ {
1752
+ "epoch": 10.484581497797357,
1753
+ "grad_norm": 1.039802074432373,
1754
+ "learning_rate": 9.03964757709251e-05,
1755
+ "loss": 0.3162,
1756
+ "step": 2380
1757
+ },
1758
+ {
1759
+ "epoch": 10.52863436123348,
1760
+ "grad_norm": 0.9926576614379883,
1761
+ "learning_rate": 8.951541850220263e-05,
1762
+ "loss": 0.3559,
1763
+ "step": 2390
1764
+ },
1765
+ {
1766
+ "epoch": 10.572687224669604,
1767
+ "grad_norm": 1.0141366720199585,
1768
+ "learning_rate": 8.863436123348016e-05,
1769
+ "loss": 0.3196,
1770
+ "step": 2400
1771
+ },
1772
+ {
1773
+ "epoch": 10.616740088105727,
1774
+ "grad_norm": 0.5856879353523254,
1775
+ "learning_rate": 8.77533039647577e-05,
1776
+ "loss": 0.2919,
1777
+ "step": 2410
1778
+ },
1779
+ {
1780
+ "epoch": 10.66079295154185,
1781
+ "grad_norm": 0.9484356045722961,
1782
+ "learning_rate": 8.687224669603523e-05,
1783
+ "loss": 0.339,
1784
+ "step": 2420
1785
+ },
1786
+ {
1787
+ "epoch": 10.704845814977974,
1788
+ "grad_norm": 0.9014990925788879,
1789
+ "learning_rate": 8.599118942731277e-05,
1790
+ "loss": 0.3089,
1791
+ "step": 2430
1792
+ },
1793
+ {
1794
+ "epoch": 10.748898678414097,
1795
+ "grad_norm": 0.9830072522163391,
1796
+ "learning_rate": 8.51101321585903e-05,
1797
+ "loss": 0.3461,
1798
+ "step": 2440
1799
+ },
1800
+ {
1801
+ "epoch": 10.79295154185022,
1802
+ "grad_norm": 1.051647424697876,
1803
+ "learning_rate": 8.422907488986782e-05,
1804
+ "loss": 0.292,
1805
+ "step": 2450
1806
+ },
1807
+ {
1808
+ "epoch": 10.837004405286343,
1809
+ "grad_norm": 1.0580625534057617,
1810
+ "learning_rate": 8.334801762114536e-05,
1811
+ "loss": 0.4052,
1812
+ "step": 2460
1813
+ },
1814
+ {
1815
+ "epoch": 10.881057268722467,
1816
+ "grad_norm": 1.01996648311615,
1817
+ "learning_rate": 8.246696035242289e-05,
1818
+ "loss": 0.3927,
1819
+ "step": 2470
1820
+ },
1821
+ {
1822
+ "epoch": 10.92511013215859,
1823
+ "grad_norm": 0.6538860201835632,
1824
+ "learning_rate": 8.158590308370044e-05,
1825
+ "loss": 0.3451,
1826
+ "step": 2480
1827
+ },
1828
+ {
1829
+ "epoch": 10.969162995594713,
1830
+ "grad_norm": 0.9368380308151245,
1831
+ "learning_rate": 8.070484581497796e-05,
1832
+ "loss": 0.3932,
1833
+ "step": 2490
1834
+ },
1835
+ {
1836
+ "epoch": 11.0,
1837
+ "eval_loss": 0.5825287103652954,
1838
+ "eval_runtime": 4.811,
1839
+ "eval_samples_per_second": 41.987,
1840
+ "eval_steps_per_second": 5.404,
1841
+ "step": 2497
1842
+ },
1843
+ {
1844
+ "epoch": 11.013215859030836,
1845
+ "grad_norm": 0.9590967893600464,
1846
+ "learning_rate": 7.982378854625551e-05,
1847
+ "loss": 0.32,
1848
+ "step": 2500
1849
+ },
1850
+ {
1851
+ "epoch": 11.05726872246696,
1852
+ "grad_norm": 0.9905742406845093,
1853
+ "learning_rate": 7.894273127753304e-05,
1854
+ "loss": 0.3029,
1855
+ "step": 2510
1856
+ },
1857
+ {
1858
+ "epoch": 11.101321585903083,
1859
+ "grad_norm": 1.2009577751159668,
1860
+ "learning_rate": 7.806167400881057e-05,
1861
+ "loss": 0.3626,
1862
+ "step": 2520
1863
+ },
1864
+ {
1865
+ "epoch": 11.145374449339206,
1866
+ "grad_norm": 1.0607908964157104,
1867
+ "learning_rate": 7.718061674008811e-05,
1868
+ "loss": 0.314,
1869
+ "step": 2530
1870
+ },
1871
+ {
1872
+ "epoch": 11.189427312775331,
1873
+ "grad_norm": 1.1098504066467285,
1874
+ "learning_rate": 7.629955947136563e-05,
1875
+ "loss": 0.3062,
1876
+ "step": 2540
1877
+ },
1878
+ {
1879
+ "epoch": 11.233480176211454,
1880
+ "grad_norm": 0.6961995959281921,
1881
+ "learning_rate": 7.541850220264317e-05,
1882
+ "loss": 0.3499,
1883
+ "step": 2550
1884
+ },
1885
+ {
1886
+ "epoch": 11.277533039647578,
1887
+ "grad_norm": 1.0727498531341553,
1888
+ "learning_rate": 7.45374449339207e-05,
1889
+ "loss": 0.2559,
1890
+ "step": 2560
1891
+ },
1892
+ {
1893
+ "epoch": 11.321585903083701,
1894
+ "grad_norm": 1.064344048500061,
1895
+ "learning_rate": 7.365638766519823e-05,
1896
+ "loss": 0.3011,
1897
+ "step": 2570
1898
+ },
1899
+ {
1900
+ "epoch": 11.365638766519824,
1901
+ "grad_norm": 1.1059036254882812,
1902
+ "learning_rate": 7.277533039647577e-05,
1903
+ "loss": 0.3415,
1904
+ "step": 2580
1905
+ },
1906
+ {
1907
+ "epoch": 11.409691629955947,
1908
+ "grad_norm": 0.8815020322799683,
1909
+ "learning_rate": 7.18942731277533e-05,
1910
+ "loss": 0.3164,
1911
+ "step": 2590
1912
+ },
1913
+ {
1914
+ "epoch": 11.45374449339207,
1915
+ "grad_norm": 0.9667496085166931,
1916
+ "learning_rate": 7.101321585903082e-05,
1917
+ "loss": 0.3642,
1918
+ "step": 2600
1919
+ },
1920
+ {
1921
+ "epoch": 11.497797356828194,
1922
+ "grad_norm": 0.942876935005188,
1923
+ "learning_rate": 7.013215859030836e-05,
1924
+ "loss": 0.3624,
1925
+ "step": 2610
1926
+ },
1927
+ {
1928
+ "epoch": 11.541850220264317,
1929
+ "grad_norm": 1.022675633430481,
1930
+ "learning_rate": 6.925110132158589e-05,
1931
+ "loss": 0.3351,
1932
+ "step": 2620
1933
+ },
1934
+ {
1935
+ "epoch": 11.58590308370044,
1936
+ "grad_norm": 0.9919267892837524,
1937
+ "learning_rate": 6.837004405286343e-05,
1938
+ "loss": 0.3335,
1939
+ "step": 2630
1940
+ },
1941
+ {
1942
+ "epoch": 11.629955947136564,
1943
+ "grad_norm": 0.9724282026290894,
1944
+ "learning_rate": 6.748898678414096e-05,
1945
+ "loss": 0.3154,
1946
+ "step": 2640
1947
+ },
1948
+ {
1949
+ "epoch": 11.674008810572687,
1950
+ "grad_norm": 1.3246617317199707,
1951
+ "learning_rate": 6.660792951541849e-05,
1952
+ "loss": 0.4366,
1953
+ "step": 2650
1954
+ },
1955
+ {
1956
+ "epoch": 11.71806167400881,
1957
+ "grad_norm": 1.0111949443817139,
1958
+ "learning_rate": 6.572687224669603e-05,
1959
+ "loss": 0.3324,
1960
+ "step": 2660
1961
+ },
1962
+ {
1963
+ "epoch": 11.762114537444933,
1964
+ "grad_norm": 0.8399791717529297,
1965
+ "learning_rate": 6.484581497797357e-05,
1966
+ "loss": 0.2669,
1967
+ "step": 2670
1968
+ },
1969
+ {
1970
+ "epoch": 11.806167400881057,
1971
+ "grad_norm": 0.917736828327179,
1972
+ "learning_rate": 6.39647577092511e-05,
1973
+ "loss": 0.324,
1974
+ "step": 2680
1975
+ },
1976
+ {
1977
+ "epoch": 11.85022026431718,
1978
+ "grad_norm": 0.9939138293266296,
1979
+ "learning_rate": 6.308370044052864e-05,
1980
+ "loss": 0.2888,
1981
+ "step": 2690
1982
+ },
1983
+ {
1984
+ "epoch": 11.894273127753303,
1985
+ "grad_norm": 0.9510142803192139,
1986
+ "learning_rate": 6.220264317180616e-05,
1987
+ "loss": 0.3428,
1988
+ "step": 2700
1989
+ },
1990
+ {
1991
+ "epoch": 11.938325991189426,
1992
+ "grad_norm": 1.3216148614883423,
1993
+ "learning_rate": 6.132158590308369e-05,
1994
+ "loss": 0.3254,
1995
+ "step": 2710
1996
+ },
1997
+ {
1998
+ "epoch": 11.982378854625551,
1999
+ "grad_norm": 1.2755056619644165,
2000
+ "learning_rate": 6.0440528634361224e-05,
2001
+ "loss": 0.3188,
2002
+ "step": 2720
2003
+ },
2004
+ {
2005
+ "epoch": 12.0,
2006
+ "eval_loss": 0.5862967371940613,
2007
+ "eval_runtime": 5.4591,
2008
+ "eval_samples_per_second": 37.002,
2009
+ "eval_steps_per_second": 4.763,
2010
+ "step": 2724
2011
+ },
2012
+ {
2013
+ "epoch": 12.026431718061675,
2014
+ "grad_norm": 0.8617092967033386,
2015
+ "learning_rate": 5.955947136563876e-05,
2016
+ "loss": 0.2953,
2017
+ "step": 2730
2018
+ },
2019
+ {
2020
+ "epoch": 12.070484581497798,
2021
+ "grad_norm": 0.7434670329093933,
2022
+ "learning_rate": 5.86784140969163e-05,
2023
+ "loss": 0.312,
2024
+ "step": 2740
2025
+ },
2026
+ {
2027
+ "epoch": 12.114537444933921,
2028
+ "grad_norm": 0.9274753332138062,
2029
+ "learning_rate": 5.779735682819383e-05,
2030
+ "loss": 0.2664,
2031
+ "step": 2750
2032
+ },
2033
+ {
2034
+ "epoch": 12.158590308370044,
2035
+ "grad_norm": 1.058923363685608,
2036
+ "learning_rate": 5.691629955947135e-05,
2037
+ "loss": 0.3236,
2038
+ "step": 2760
2039
+ },
2040
+ {
2041
+ "epoch": 12.202643171806168,
2042
+ "grad_norm": 0.7601414918899536,
2043
+ "learning_rate": 5.6035242290748894e-05,
2044
+ "loss": 0.3076,
2045
+ "step": 2770
2046
+ },
2047
+ {
2048
+ "epoch": 12.246696035242291,
2049
+ "grad_norm": 0.7787047624588013,
2050
+ "learning_rate": 5.515418502202643e-05,
2051
+ "loss": 0.2618,
2052
+ "step": 2780
2053
+ },
2054
+ {
2055
+ "epoch": 12.290748898678414,
2056
+ "grad_norm": 0.9064326882362366,
2057
+ "learning_rate": 5.427312775330396e-05,
2058
+ "loss": 0.3305,
2059
+ "step": 2790
2060
+ },
2061
+ {
2062
+ "epoch": 12.334801762114537,
2063
+ "grad_norm": 1.0712478160858154,
2064
+ "learning_rate": 5.3392070484581496e-05,
2065
+ "loss": 0.341,
2066
+ "step": 2800
2067
+ },
2068
+ {
2069
+ "epoch": 12.37885462555066,
2070
+ "grad_norm": 0.6585920453071594,
2071
+ "learning_rate": 5.251101321585903e-05,
2072
+ "loss": 0.3476,
2073
+ "step": 2810
2074
+ },
2075
+ {
2076
+ "epoch": 12.422907488986784,
2077
+ "grad_norm": 1.1152169704437256,
2078
+ "learning_rate": 5.162995594713656e-05,
2079
+ "loss": 0.3418,
2080
+ "step": 2820
2081
+ },
2082
+ {
2083
+ "epoch": 12.466960352422907,
2084
+ "grad_norm": 0.926008403301239,
2085
+ "learning_rate": 5.074889867841409e-05,
2086
+ "loss": 0.2543,
2087
+ "step": 2830
2088
+ },
2089
+ {
2090
+ "epoch": 12.51101321585903,
2091
+ "grad_norm": 1.1506083011627197,
2092
+ "learning_rate": 4.9867841409691625e-05,
2093
+ "loss": 0.2895,
2094
+ "step": 2840
2095
+ },
2096
+ {
2097
+ "epoch": 12.555066079295154,
2098
+ "grad_norm": 0.8726121783256531,
2099
+ "learning_rate": 4.898678414096916e-05,
2100
+ "loss": 0.2917,
2101
+ "step": 2850
2102
+ },
2103
+ {
2104
+ "epoch": 12.599118942731277,
2105
+ "grad_norm": 1.1620839834213257,
2106
+ "learning_rate": 4.810572687224669e-05,
2107
+ "loss": 0.3585,
2108
+ "step": 2860
2109
+ },
2110
+ {
2111
+ "epoch": 12.6431718061674,
2112
+ "grad_norm": 1.1911215782165527,
2113
+ "learning_rate": 4.7224669603524226e-05,
2114
+ "loss": 0.3177,
2115
+ "step": 2870
2116
+ },
2117
+ {
2118
+ "epoch": 12.687224669603523,
2119
+ "grad_norm": 0.9236161708831787,
2120
+ "learning_rate": 4.6343612334801754e-05,
2121
+ "loss": 0.3203,
2122
+ "step": 2880
2123
+ },
2124
+ {
2125
+ "epoch": 12.731277533039648,
2126
+ "grad_norm": 1.0384935140609741,
2127
+ "learning_rate": 4.546255506607929e-05,
2128
+ "loss": 0.3264,
2129
+ "step": 2890
2130
+ },
2131
+ {
2132
+ "epoch": 12.775330396475772,
2133
+ "grad_norm": 1.3048256635665894,
2134
+ "learning_rate": 4.458149779735682e-05,
2135
+ "loss": 0.3544,
2136
+ "step": 2900
2137
+ },
2138
+ {
2139
+ "epoch": 12.819383259911895,
2140
+ "grad_norm": 1.127678394317627,
2141
+ "learning_rate": 4.370044052863436e-05,
2142
+ "loss": 0.3768,
2143
+ "step": 2910
2144
+ },
2145
+ {
2146
+ "epoch": 12.863436123348018,
2147
+ "grad_norm": 0.9425409436225891,
2148
+ "learning_rate": 4.2819383259911896e-05,
2149
+ "loss": 0.2778,
2150
+ "step": 2920
2151
+ },
2152
+ {
2153
+ "epoch": 12.907488986784141,
2154
+ "grad_norm": 1.2469598054885864,
2155
+ "learning_rate": 4.1938325991189416e-05,
2156
+ "loss": 0.3532,
2157
+ "step": 2930
2158
+ },
2159
+ {
2160
+ "epoch": 12.951541850220265,
2161
+ "grad_norm": 0.7975876927375793,
2162
+ "learning_rate": 4.105726872246696e-05,
2163
+ "loss": 0.3189,
2164
+ "step": 2940
2165
+ },
2166
+ {
2167
+ "epoch": 12.995594713656388,
2168
+ "grad_norm": 0.8869457840919495,
2169
+ "learning_rate": 4.017621145374449e-05,
2170
+ "loss": 0.3282,
2171
+ "step": 2950
2172
+ },
2173
+ {
2174
+ "epoch": 13.0,
2175
+ "eval_loss": 0.5989018678665161,
2176
+ "eval_runtime": 4.9822,
2177
+ "eval_samples_per_second": 40.544,
2178
+ "eval_steps_per_second": 5.219,
2179
+ "step": 2951
2180
+ },
2181
+ {
2182
+ "epoch": 13.039647577092511,
2183
+ "grad_norm": 1.1934689283370972,
2184
+ "learning_rate": 3.9295154185022025e-05,
2185
+ "loss": 0.2919,
2186
+ "step": 2960
2187
+ },
2188
+ {
2189
+ "epoch": 13.083700440528634,
2190
+ "grad_norm": 1.1812618970870972,
2191
+ "learning_rate": 3.841409691629956e-05,
2192
+ "loss": 0.3219,
2193
+ "step": 2970
2194
+ },
2195
+ {
2196
+ "epoch": 13.127753303964758,
2197
+ "grad_norm": 1.2065187692642212,
2198
+ "learning_rate": 3.753303964757709e-05,
2199
+ "loss": 0.29,
2200
+ "step": 2980
2201
+ },
2202
+ {
2203
+ "epoch": 13.17180616740088,
2204
+ "grad_norm": 0.8890476822853088,
2205
+ "learning_rate": 3.665198237885462e-05,
2206
+ "loss": 0.2462,
2207
+ "step": 2990
2208
+ },
2209
+ {
2210
+ "epoch": 13.215859030837004,
2211
+ "grad_norm": 1.2433491945266724,
2212
+ "learning_rate": 3.5770925110132154e-05,
2213
+ "loss": 0.2614,
2214
+ "step": 3000
2215
+ },
2216
+ {
2217
+ "epoch": 13.259911894273127,
2218
+ "grad_norm": 0.7791047692298889,
2219
+ "learning_rate": 3.488986784140969e-05,
2220
+ "loss": 0.2824,
2221
+ "step": 3010
2222
+ },
2223
+ {
2224
+ "epoch": 13.30396475770925,
2225
+ "grad_norm": 1.1994709968566895,
2226
+ "learning_rate": 3.400881057268722e-05,
2227
+ "loss": 0.2873,
2228
+ "step": 3020
2229
+ },
2230
+ {
2231
+ "epoch": 13.348017621145374,
2232
+ "grad_norm": 1.082380771636963,
2233
+ "learning_rate": 3.3127753303964756e-05,
2234
+ "loss": 0.2571,
2235
+ "step": 3030
2236
+ },
2237
+ {
2238
+ "epoch": 13.392070484581497,
2239
+ "grad_norm": 0.9533681273460388,
2240
+ "learning_rate": 3.224669603524229e-05,
2241
+ "loss": 0.347,
2242
+ "step": 3040
2243
+ },
2244
+ {
2245
+ "epoch": 13.43612334801762,
2246
+ "grad_norm": 0.9235092997550964,
2247
+ "learning_rate": 3.1365638766519824e-05,
2248
+ "loss": 0.3015,
2249
+ "step": 3050
2250
+ },
2251
+ {
2252
+ "epoch": 13.480176211453745,
2253
+ "grad_norm": 0.7734026908874512,
2254
+ "learning_rate": 3.0484581497797354e-05,
2255
+ "loss": 0.3521,
2256
+ "step": 3060
2257
+ },
2258
+ {
2259
+ "epoch": 13.524229074889869,
2260
+ "grad_norm": 1.0365204811096191,
2261
+ "learning_rate": 2.9603524229074888e-05,
2262
+ "loss": 0.3765,
2263
+ "step": 3070
2264
+ },
2265
+ {
2266
+ "epoch": 13.568281938325992,
2267
+ "grad_norm": 0.7667551040649414,
2268
+ "learning_rate": 2.8722466960352422e-05,
2269
+ "loss": 0.2976,
2270
+ "step": 3080
2271
+ },
2272
+ {
2273
+ "epoch": 13.612334801762115,
2274
+ "grad_norm": 1.3401521444320679,
2275
+ "learning_rate": 2.7841409691629952e-05,
2276
+ "loss": 0.3571,
2277
+ "step": 3090
2278
+ },
2279
+ {
2280
+ "epoch": 13.656387665198238,
2281
+ "grad_norm": 1.1603132486343384,
2282
+ "learning_rate": 2.6960352422907486e-05,
2283
+ "loss": 0.3513,
2284
+ "step": 3100
2285
+ },
2286
+ {
2287
+ "epoch": 13.700440528634362,
2288
+ "grad_norm": 0.9537465572357178,
2289
+ "learning_rate": 2.607929515418502e-05,
2290
+ "loss": 0.3365,
2291
+ "step": 3110
2292
+ },
2293
+ {
2294
+ "epoch": 13.744493392070485,
2295
+ "grad_norm": 1.5475343465805054,
2296
+ "learning_rate": 2.519823788546255e-05,
2297
+ "loss": 0.2835,
2298
+ "step": 3120
2299
+ },
2300
+ {
2301
+ "epoch": 13.788546255506608,
2302
+ "grad_norm": 1.308053970336914,
2303
+ "learning_rate": 2.4317180616740088e-05,
2304
+ "loss": 0.367,
2305
+ "step": 3130
2306
+ },
2307
+ {
2308
+ "epoch": 13.832599118942731,
2309
+ "grad_norm": 0.9297582507133484,
2310
+ "learning_rate": 2.343612334801762e-05,
2311
+ "loss": 0.2539,
2312
+ "step": 3140
2313
+ },
2314
+ {
2315
+ "epoch": 13.876651982378855,
2316
+ "grad_norm": 0.8061990737915039,
2317
+ "learning_rate": 2.2555066079295153e-05,
2318
+ "loss": 0.3235,
2319
+ "step": 3150
2320
+ },
2321
+ {
2322
+ "epoch": 13.920704845814978,
2323
+ "grad_norm": 0.9529415369033813,
2324
+ "learning_rate": 2.1674008810572687e-05,
2325
+ "loss": 0.2427,
2326
+ "step": 3160
2327
+ },
2328
+ {
2329
+ "epoch": 13.964757709251101,
2330
+ "grad_norm": 1.0907659530639648,
2331
+ "learning_rate": 2.0792951541850217e-05,
2332
+ "loss": 0.3928,
2333
+ "step": 3170
2334
+ },
2335
+ {
2336
+ "epoch": 14.0,
2337
+ "eval_loss": 0.5977996587753296,
2338
+ "eval_runtime": 4.957,
2339
+ "eval_samples_per_second": 40.75,
2340
+ "eval_steps_per_second": 5.245,
2341
+ "step": 3178
2342
+ },
2343
+ {
2344
+ "epoch": 14.008810572687224,
2345
+ "grad_norm": 0.9147399663925171,
2346
+ "learning_rate": 1.991189427312775e-05,
2347
+ "loss": 0.3046,
2348
+ "step": 3180
2349
+ },
2350
+ {
2351
+ "epoch": 14.052863436123348,
2352
+ "grad_norm": 0.9036657214164734,
2353
+ "learning_rate": 1.9030837004405285e-05,
2354
+ "loss": 0.3805,
2355
+ "step": 3190
2356
+ },
2357
+ {
2358
+ "epoch": 14.09691629955947,
2359
+ "grad_norm": 0.8942423462867737,
2360
+ "learning_rate": 1.814977973568282e-05,
2361
+ "loss": 0.3361,
2362
+ "step": 3200
2363
+ },
2364
+ {
2365
+ "epoch": 14.140969162995594,
2366
+ "grad_norm": 0.8469783067703247,
2367
+ "learning_rate": 1.7268722466960353e-05,
2368
+ "loss": 0.2552,
2369
+ "step": 3210
2370
+ },
2371
+ {
2372
+ "epoch": 14.185022026431717,
2373
+ "grad_norm": 0.9594734311103821,
2374
+ "learning_rate": 1.6387665198237883e-05,
2375
+ "loss": 0.3417,
2376
+ "step": 3220
2377
+ },
2378
+ {
2379
+ "epoch": 14.229074889867842,
2380
+ "grad_norm": 0.7858956456184387,
2381
+ "learning_rate": 1.5506607929515417e-05,
2382
+ "loss": 0.3343,
2383
+ "step": 3230
2384
+ },
2385
+ {
2386
+ "epoch": 14.273127753303966,
2387
+ "grad_norm": 1.0662245750427246,
2388
+ "learning_rate": 1.4625550660792951e-05,
2389
+ "loss": 0.311,
2390
+ "step": 3240
2391
+ },
2392
+ {
2393
+ "epoch": 14.317180616740089,
2394
+ "grad_norm": 0.7350011467933655,
2395
+ "learning_rate": 1.3744493392070483e-05,
2396
+ "loss": 0.2985,
2397
+ "step": 3250
2398
+ },
2399
+ {
2400
+ "epoch": 14.361233480176212,
2401
+ "grad_norm": 1.1536651849746704,
2402
+ "learning_rate": 1.2863436123348016e-05,
2403
+ "loss": 0.2556,
2404
+ "step": 3260
2405
+ },
2406
+ {
2407
+ "epoch": 14.405286343612335,
2408
+ "grad_norm": 1.1384259462356567,
2409
+ "learning_rate": 1.198237885462555e-05,
2410
+ "loss": 0.3369,
2411
+ "step": 3270
2412
+ },
2413
+ {
2414
+ "epoch": 14.449339207048459,
2415
+ "grad_norm": 0.9650891423225403,
2416
+ "learning_rate": 1.1101321585903083e-05,
2417
+ "loss": 0.366,
2418
+ "step": 3280
2419
+ },
2420
+ {
2421
+ "epoch": 14.493392070484582,
2422
+ "grad_norm": 0.6376497149467468,
2423
+ "learning_rate": 1.0220264317180616e-05,
2424
+ "loss": 0.2398,
2425
+ "step": 3290
2426
+ },
2427
+ {
2428
+ "epoch": 14.537444933920705,
2429
+ "grad_norm": 0.9311153888702393,
2430
+ "learning_rate": 9.33920704845815e-06,
2431
+ "loss": 0.2665,
2432
+ "step": 3300
2433
+ },
2434
+ {
2435
+ "epoch": 14.581497797356828,
2436
+ "grad_norm": 0.8357282876968384,
2437
+ "learning_rate": 8.458149779735682e-06,
2438
+ "loss": 0.2894,
2439
+ "step": 3310
2440
+ },
2441
+ {
2442
+ "epoch": 14.625550660792952,
2443
+ "grad_norm": 0.8187097311019897,
2444
+ "learning_rate": 7.577092511013215e-06,
2445
+ "loss": 0.2763,
2446
+ "step": 3320
2447
+ },
2448
+ {
2449
+ "epoch": 14.669603524229075,
2450
+ "grad_norm": 0.7701286673545837,
2451
+ "learning_rate": 6.696035242290749e-06,
2452
+ "loss": 0.3006,
2453
+ "step": 3330
2454
+ },
2455
+ {
2456
+ "epoch": 14.713656387665198,
2457
+ "grad_norm": 0.8439558744430542,
2458
+ "learning_rate": 5.814977973568281e-06,
2459
+ "loss": 0.2882,
2460
+ "step": 3340
2461
+ },
2462
+ {
2463
+ "epoch": 14.757709251101321,
2464
+ "grad_norm": 1.196057677268982,
2465
+ "learning_rate": 4.933920704845815e-06,
2466
+ "loss": 0.2544,
2467
+ "step": 3350
2468
+ },
2469
+ {
2470
+ "epoch": 14.801762114537445,
2471
+ "grad_norm": 0.9785457253456116,
2472
+ "learning_rate": 4.052863436123348e-06,
2473
+ "loss": 0.3665,
2474
+ "step": 3360
2475
+ },
2476
+ {
2477
+ "epoch": 14.845814977973568,
2478
+ "grad_norm": 0.7792004346847534,
2479
+ "learning_rate": 3.1718061674008807e-06,
2480
+ "loss": 0.3404,
2481
+ "step": 3370
2482
+ },
2483
+ {
2484
+ "epoch": 14.889867841409691,
2485
+ "grad_norm": 0.6765570044517517,
2486
+ "learning_rate": 2.2907488986784137e-06,
2487
+ "loss": 0.2711,
2488
+ "step": 3380
2489
+ },
2490
+ {
2491
+ "epoch": 14.933920704845814,
2492
+ "grad_norm": 1.0028444528579712,
2493
+ "learning_rate": 1.409691629955947e-06,
2494
+ "loss": 0.2378,
2495
+ "step": 3390
2496
+ },
2497
+ {
2498
+ "epoch": 14.97797356828194,
2499
+ "grad_norm": 0.9199230074882507,
2500
+ "learning_rate": 5.286343612334801e-07,
2501
+ "loss": 0.3425,
2502
+ "step": 3400
2503
+ }
2504
+ ],
2505
+ "logging_steps": 10,
2506
+ "max_steps": 3405,
2507
+ "num_input_tokens_seen": 0,
2508
+ "num_train_epochs": 15,
2509
+ "save_steps": 500,
2510
+ "stateful_callbacks": {
2511
+ "TrainerControl": {
2512
+ "args": {
2513
+ "should_epoch_stop": false,
2514
+ "should_evaluate": false,
2515
+ "should_log": false,
2516
+ "should_save": true,
2517
+ "should_training_stop": true
2518
+ },
2519
+ "attributes": {}
2520
+ }
2521
+ },
2522
+ "total_flos": 5052503917854720.0,
2523
+ "train_batch_size": 8,
2524
+ "trial_name": null,
2525
+ "trial_params": null
2526
+ }
tmp_results/checkpoint-3405/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1edef10a07a64822c4f208cd46f60e8c0cb7451bbb616f0e25deb26d71f518ec
3
+ size 5969
tmp_results/runs/Jan03_12-06-34_macbook-16.local/events.out.tfevents.1767438395.macbook-16.local.99236.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37a1ba043541246b2d8b5f926e6c7c0b9fe4bfd972977811548ea4805e799a62
3
+ size 82151
verify_converted_to_tflite.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import tensorflow as tf
3
+ from transformers import AutoTokenizer
4
+ from pathlib import Path
5
+
6
+ # --- KONFIGURACJA ---
7
+ BASE_DIR = Path(__file__).resolve().parent.parent
8
+ MODEL_PATH = BASE_DIR / "summarizer" / "models" / "summarizer.tflite"
9
+ TOKENIZER_DIR = BASE_DIR / "summarizer" / "models" / "flan_t5_custom"
10
+
11
+ # Te wartości muszą być zgodne z tymi, które ustawiliśmy podczas konwersji (256)
12
+ MAX_LEN = 256
13
+
14
+
15
+ def generate_tflite(prompt, interpreter, tokenizer):
16
+ # 1. Tokenizacja wejścia (Enkoder)
17
+ input_ids = tokenizer.encode(prompt, max_length=MAX_LEN, truncation=True, padding="max_length")
18
+ input_ids = np.array([input_ids], dtype=np.int32)
19
+
20
+ # 2. Przygotowanie wejścia dla Dekodera (zaczynamy od tokena PAD/START = 0)
21
+ decoder_input_ids = np.zeros((1, MAX_LEN), dtype=np.int32)
22
+ output_tokens = [0]
23
+
24
+ # Pobranie szczegółów tensorów wejściowych i wyjściowych
25
+ input_details = interpreter.get_input_details()
26
+ output_details = interpreter.get_output_details()
27
+
28
+ # Logika generowania (identyczna jak we Flutterze)
29
+ generated_text = ""
30
+ print(f"⏳ Generowanie dla promptu: '{prompt[:30]}...'")
31
+
32
+ for i in range(MAX_LEN - 1):
33
+ # Wypełniamy decoder_input_ids dotychczasowymi tokenami
34
+ for j, token in enumerate(output_tokens):
35
+ decoder_input_ids[0, j] = token
36
+
37
+ # Uruchomienie interpretera
38
+ # Uwaga: kolejność zależy od tego, jak model został zapisany
39
+ # Sprawdzamy nazwy tensorów, aby dopasować dane
40
+ for detail in input_details:
41
+ if "input_ids" in detail['name'] and "decoder" not in detail['name']:
42
+ interpreter.set_tensor(detail['index'], input_ids)
43
+ elif "decoder_input_ids" in detail['name']:
44
+ interpreter.set_tensor(detail['index'], decoder_input_ids)
45
+
46
+ interpreter.invoke()
47
+
48
+ # Pobranie logitów z wyjścia [1, 256, 32128]
49
+ output_data = interpreter.get_tensor(output_details[0]['index'])
50
+
51
+ # Interesuje nas logit dla ostatniego wygenerowanego tokena
52
+ next_token_logits = output_data[0, len(output_tokens) - 1, :]
53
+
54
+ # Greedy Search (wybieramy najlepszy token - Argmax)
55
+ next_token = int(np.argmax(next_token_logits))
56
+
57
+ # Warunki stopu
58
+ if next_token == 1: # 1 to EOS (End of String) w T5
59
+ print("LOG: Otrzymano token EOS (1)")
60
+ break
61
+
62
+ output_tokens.append(next_token)
63
+
64
+ # Dekodowanie na bieżąco
65
+ word = tokenizer.decode([next_token])
66
+ generated_text += word
67
+ print(f" Step {i}: {next_token} -> '{word}'")
68
+
69
+ if len(output_tokens) >= MAX_LEN:
70
+ break
71
+
72
+ return generated_text.strip()
73
+
74
+
75
+ def main():
76
+ if not MODEL_PATH.exists():
77
+ print(f"❌ Nie znaleziono pliku modelu w: {MODEL_PATH}")
78
+ return
79
+
80
+ print(f"🚀 Ładowanie modelu TFLite: {MODEL_PATH}")
81
+ interpreter = tf.lite.Interpreter(model_path=str(MODEL_PATH))
82
+ interpreter.allocate_tensors()
83
+
84
+ print(f"🚀 Ładowanie tokenizera z: {TOKENIZER_DIR}")
85
+ tokenizer = AutoTokenizer.from_pretrained(TOKENIZER_DIR)
86
+
87
+ # Przykładowy test (używamy tekstu, który już znamy)
88
+ sample_text = "Matura 2005 przykład RZECZPOSPOLITA POLSKA ŚWIADECTWO DOJRZAŁOŚCI Janina Kosińska-Iksińska"
89
+
90
+ # Test 1: Tytuł
91
+ title = generate_tflite(f"headline: {sample_text}", interpreter, tokenizer)
92
+ print(f"\n📌 FINALNY TYTUŁ TFLITE: {title}")
93
+
94
+ # Test 2: Podsumowanie
95
+ summary = generate_tflite(f"summarize: {sample_text}", interpreter, tokenizer)
96
+ print(f"\n📝 FINALNE PODSUMOWANIE TFLITE: {summary}")
97
+
98
+
99
+ if __name__ == "__main__":
100
+ main()
verify_summarizer_before_converting_to_tflite.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
3
+ import pytesseract
4
+ import json
5
+ from pathlib import Path
6
+ from PIL import Image
7
+ from pdf2image import convert_from_path
8
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
9
+
10
+ # --- KONFIGURACJA ---
11
+ # Ścieżka do Tesseracta (zgodnie z Twoim systemem)
12
+ pytesseract.pytesseract.tesseract_cmd = r'/opt/homebrew/bin/tesseract'
13
+
14
+ # Ścieżki relatywne
15
+ SUMMARIZER_DIR = Path(__file__).resolve().parent
16
+ BASE_DIR = SUMMARIZER_DIR.parent
17
+ MODEL_PATH = SUMMARIZER_DIR / "models" / "flan_t5_custom"
18
+ VERIFY_DIR = SUMMARIZER_DIR / "scans_to_verify_summary"
19
+
20
+ # Urządzenie (wykryte mps w Twoich logach)
21
+ device = "mps" if torch.backends.mps.is_available() else "cpu"
22
+
23
+
24
+ def perform_ocr(file_path):
25
+ """Konwertuje obraz/PDF na tekst."""
26
+ text = ""
27
+ try:
28
+ if file_path.suffix.lower() == ".pdf":
29
+ pages = convert_from_path(file_path)
30
+ for page in pages:
31
+ text += pytesseract.image_to_string(page, lang='pol+eng')
32
+ else:
33
+ text = pytesseract.image_to_string(Image.open(file_path), lang='pol+eng')
34
+ except Exception as e:
35
+ print(f" [!] Błąd OCR dla {file_path.name}: {e}")
36
+ return text
37
+
38
+
39
+ def load_model():
40
+ print(f"🚀 Ładowanie modelu z: {MODEL_PATH}...")
41
+ if not MODEL_PATH.exists():
42
+ raise FileNotFoundError(f"❌ Nie znaleziono modelu w {MODEL_PATH}.")
43
+
44
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH)
45
+ model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_PATH).to(device)
46
+
47
+ # --- DEBUG TOKENIZERA ---
48
+ print("\n" + "=" * 40)
49
+ print("🔍 TOKENIZER VERIFICATION (Dla porównania z Flutterem)")
50
+
51
+ for word in ["Janina", "Joanna"]:
52
+ encoded = tokenizer.encode(word, add_special_tokens=False)
53
+ print(f" ID dla słowa '{word}': {encoded}")
54
+
55
+ # Dodatkowy test na dekodowanie
56
+ test_ids = [0, 2664, 15, 1] # Przykładowe ID
57
+ decoded = tokenizer.decode(test_ids)
58
+ print(f" Test dekodowania {test_ids}: '{decoded}'")
59
+ print("=" * 40 + "\n")
60
+ # -----------------------
61
+
62
+ return tokenizer, model
63
+
64
+
65
+ def generate_text(prompt, tokenizer, model):
66
+ # Logowanie długości inputu
67
+ inputs = tokenizer(prompt, return_tensors="pt", max_length=512, truncation=True).to(device)
68
+ input_len = inputs['input_ids'].shape[1]
69
+
70
+ outputs = model.generate(
71
+ **inputs,
72
+ max_new_tokens=128,
73
+ num_beams=4,
74
+ early_stopping=True
75
+ )
76
+
77
+ result = tokenizer.decode(outputs[0], skip_special_tokens=True)
78
+ return result, input_len
79
+
80
+
81
+ def main():
82
+ tokenizer, model = load_model()
83
+
84
+ if not VERIFY_DIR.exists():
85
+ os.makedirs(VERIFY_DIR)
86
+ print(f"📁 Folder {VERIFY_DIR} był pusty. Wrzuć tam zdjęcia dokumentów i uruchom ponownie.")
87
+ return
88
+
89
+ # Szukamy plików graficznych i PDF
90
+ extensions = [".jpg", ".jpeg", ".png", ".pdf"]
91
+ files = [f for f in VERIFY_DIR.glob("*") if f.suffix.lower() in extensions]
92
+
93
+ if not files:
94
+ print(f"ℹ️ Brak obrazów lub plików PDF w {VERIFY_DIR}.")
95
+ return
96
+
97
+ print(f"🔍 Znaleziono {len(files)} dokumentów do weryfikacji.\n")
98
+
99
+ for file_path in files:
100
+ print(f"📄 PRZETWARZANIE: {file_path.name}")
101
+ print("⏳ Wykonywanie OCR...")
102
+
103
+ ocr_text = perform_ocr(file_path)
104
+
105
+ if not ocr_text.strip():
106
+ print(f"⚠️ Nie udało się odczytać tekstu z {file_path.name}. Pomijam.")
107
+ continue
108
+
109
+ print(f"📊 Długość tekstu OCR: {len(ocr_text)} znaków")
110
+ print(f"📝 Pierwsze 100 znaków OCR: {ocr_text[:100].replace('\n', ' ')}...")
111
+ print("-" * 30)
112
+
113
+ # Zadanie 1: Tytuł
114
+ title, t_len = generate_text(f"headline: {ocr_text}", tokenizer, model)
115
+ print(f"📌 TYTUŁ (Tokeny wejściowe: {t_len}):\n{title}\n")
116
+
117
+ # Zadanie 2: Streszczenie
118
+ summary, s_len = generate_text(f"summarize: {ocr_text}", tokenizer, model)
119
+ print(f"📝 STRESZCZENIE (Tokeny wejściowe: {s_len}):\n{summary}")
120
+ print("=" * 60 + "\n")
121
+
122
+
123
+ if __name__ == "__main__":
124
+ main()