LamaDiab commited on
Commit
4e7b919
·
verified ·
1 Parent(s): 405af33

Final training metrics

Browse files
Files changed (1) hide show
  1. training_metrics.json +102 -0
training_metrics.json ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metrics_history": [
3
+ {
4
+ "epoch": 0.18684603886397608,
5
+ "global_step": 2000,
6
+ "eval_loss": 0.49471989274024963,
7
+ "eval_cosine_accuracy": 0.8986994028091431
8
+ },
9
+ {
10
+ "epoch": 0.37369207772795215,
11
+ "global_step": 4000,
12
+ "eval_loss": 0.4586535394191742,
13
+ "eval_cosine_accuracy": 0.9260864853858948
14
+ },
15
+ {
16
+ "epoch": 0.5605381165919282,
17
+ "global_step": 6000,
18
+ "eval_loss": 0.41501784324645996,
19
+ "eval_cosine_accuracy": 0.9477635622024536
20
+ },
21
+ {
22
+ "epoch": 0.7473841554559043,
23
+ "global_step": 8000,
24
+ "eval_loss": 0.395673006772995,
25
+ "eval_cosine_accuracy": 0.9500898718833923
26
+ },
27
+ {
28
+ "epoch": 0.9342301943198804,
29
+ "global_step": 10000,
30
+ "eval_loss": 0.37747085094451904,
31
+ "eval_cosine_accuracy": 0.9434281587600708
32
+ },
33
+ {
34
+ "epoch": 1.1209406494960805,
35
+ "global_step": 12000,
36
+ "eval_loss": 0.36152419447898865,
37
+ "eval_cosine_accuracy": 0.958760678768158
38
+ },
39
+ {
40
+ "epoch": 1.3075774542739829,
41
+ "global_step": 14000,
42
+ "eval_loss": 0.3521560728549957,
43
+ "eval_cosine_accuracy": 0.960769772529602
44
+ },
45
+ {
46
+ "epoch": 1.494214259051885,
47
+ "global_step": 16000,
48
+ "eval_loss": 0.3508636951446533,
49
+ "eval_cosine_accuracy": 0.9578090310096741
50
+ },
51
+ {
52
+ "epoch": 1.6808510638297873,
53
+ "global_step": 18000,
54
+ "eval_loss": 0.34350287914276123,
55
+ "eval_cosine_accuracy": 0.9620386958122253
56
+ },
57
+ {
58
+ "epoch": 1.8674878686076894,
59
+ "global_step": 20000,
60
+ "eval_loss": 0.3393498957157135,
61
+ "eval_cosine_accuracy": 0.959395170211792
62
+ },
63
+ {
64
+ "epoch": 2.0541246733855916,
65
+ "global_step": 22000,
66
+ "eval_loss": 0.33905890583992004,
67
+ "eval_cosine_accuracy": 0.9651052355766296
68
+ },
69
+ {
70
+ "epoch": 2.2407614781634937,
71
+ "global_step": 24000,
72
+ "eval_loss": 0.3363184630870819,
73
+ "eval_cosine_accuracy": 0.9655281901359558
74
+ },
75
+ {
76
+ "epoch": 2.427398282941396,
77
+ "global_step": 26000,
78
+ "eval_loss": 0.33477866649627686,
79
+ "eval_cosine_accuracy": 0.9634133577346802
80
+ },
81
+ {
82
+ "epoch": 2.6140350877192984,
83
+ "global_step": 28000,
84
+ "eval_loss": 0.33503878116607666,
85
+ "eval_cosine_accuracy": 0.9642592668533325
86
+ },
87
+ {
88
+ "epoch": 2.8006718924972005,
89
+ "global_step": 30000,
90
+ "eval_loss": 0.33804062008857727,
91
+ "eval_cosine_accuracy": 0.9646822214126587
92
+ },
93
+ {
94
+ "epoch": 2.9873086972751026,
95
+ "global_step": 32000,
96
+ "eval_loss": 0.33818456530570984,
97
+ "eval_cosine_accuracy": 0.9647879600524902
98
+ }
99
+ ],
100
+ "total_epochs": 2.9977603583426653,
101
+ "total_steps": 32112
102
+ }