Check commited on
Commit
f8d937e
Β·
1 Parent(s): d995c83

"auto-commit"

Browse files
Files changed (23) hide show
  1. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/config.json +0 -0
  2. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/optimizer.pt +1 -1
  3. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/preprocessor_config.json +0 -0
  4. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/pytorch_model.bin +1 -1
  5. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/rng_state.pth +2 -2
  6. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/scaler.pt +1 -1
  7. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/scheduler.pt +1 -1
  8. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/trainer_state.json +951 -3
  9. model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/training_args.bin +0 -0
  10. model-bin/finetune/base/log/1629885731.4375427/events.out.tfevents.1629885731.7e498afd5545.905.103 +3 -0
  11. model-bin/finetune/base/log/1629886673.7174067/events.out.tfevents.1629886673.7e498afd5545.6884.1 +3 -0
  12. model-bin/finetune/base/log/1629887166.966957/events.out.tfevents.1629887166.7e498afd5545.6884.3 +3 -0
  13. model-bin/finetune/base/log/1629887754.1185539/events.out.tfevents.1629887754.7e498afd5545.6884.5 +3 -0
  14. model-bin/finetune/base/log/1629888265.905986/events.out.tfevents.1629888265.7e498afd5545.6884.7 +3 -0
  15. model-bin/finetune/base/log/1629888670.3141375/events.out.tfevents.1629888670.7e498afd5545.7645.1 +3 -0
  16. model-bin/finetune/base/log/1629889158.7460623/events.out.tfevents.1629889158.7e498afd5545.7645.3 +3 -0
  17. model-bin/finetune/base/log/events.out.tfevents.1629885731.7e498afd5545.905.102 +3 -0
  18. model-bin/finetune/base/log/events.out.tfevents.1629886673.7e498afd5545.6884.0 +3 -0
  19. model-bin/finetune/base/log/events.out.tfevents.1629887166.7e498afd5545.6884.2 +3 -0
  20. model-bin/finetune/base/log/events.out.tfevents.1629887754.7e498afd5545.6884.4 +3 -0
  21. model-bin/finetune/base/log/events.out.tfevents.1629888265.7e498afd5545.6884.6 +3 -0
  22. model-bin/finetune/base/log/events.out.tfevents.1629888670.7e498afd5545.7645.0 +3 -0
  23. model-bin/finetune/base/log/events.out.tfevents.1629889158.7e498afd5545.7645.2 +3 -0
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/config.json RENAMED
File without changes
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/optimizer.pt RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:241cf9a0d25f6dfe6c4b09f0250d75c7b8556e0708ad5a758a4ae728e99ed97f
3
  size 722165393
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3050cf20869e3a9448541918d2c5f60759d8b463489abec7d2351b162043b44
3
  size 722165393
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/preprocessor_config.json RENAMED
File without changes
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/pytorch_model.bin RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:07eddd7e3b2c639376f65debd5c6e71a40abb39191deddef03737b5becdd9cb4
3
  size 377909911
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d64d095173484c67aac73bbcda03020e34b97526f9fe227094721e85f2e05439
3
  size 377909911
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/rng_state.pth RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff20bb0fbeb061a0d474c60bb2f68154a4d0944df8867df4e634564652ff6a60
3
- size 14503
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f3897e9bd2592aac309ca2470aa4e1dbf7e9bfe7ea5bc705b40d20586c37fd3
3
+ size 14567
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/scaler.pt RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6aaef4b9249c4bb80c33d2e747e6a2aee3a93d57119debf58fe887e8e98126a
3
  size 559
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae3fdca3ff26c556338acdb54399668f539904fe5603e88e8af61c26dd7b7c6d
3
  size 559
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/scheduler.pt RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d36509b0709cb1c6a514d0b59c68cd5a3a2275d7fb28dc5813d880c7d4a4f0e
3
  size 623
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e90f7e3da290c47dbc970fd03b66691cf691563f4a2d2ab877317c750af9d81
3
  size 623
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/trainer_state.json RENAMED
@@ -1,8 +1,8 @@
1
  {
2
  "best_metric": 0.18412114350410416,
3
  "best_model_checkpoint": "./model-bin/finetune/base/checkpoint-69565",
4
- "epoch": 605.9960159362549,
5
- "global_step": 75787,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
@@ -195789,11 +195789,959 @@
195789
  "eval_steps_per_second": 0.662,
195790
  "eval_wer": 0.1924281241104469,
195791
  "step": 75787
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195792
  }
195793
  ],
195794
  "max_steps": 625000,
195795
  "num_train_epochs": 5000,
195796
- "total_flos": 2.1327406074811505e+20,
195797
  "trial_name": null,
195798
  "trial_params": null
195799
  }
 
1
  {
2
  "best_metric": 0.18412114350410416,
3
  "best_model_checkpoint": "./model-bin/finetune/base/checkpoint-69565",
4
+ "epoch": 612.0,
5
+ "global_step": 76532,
6
  "is_hyper_param_search": false,
7
  "is_local_process_zero": true,
8
  "is_world_process_zero": true,
 
195789
  "eval_steps_per_second": 0.662,
195790
  "eval_wer": 0.1924281241104469,
195791
  "step": 75787
195792
+ },
195793
+ {
195794
+ "epoch": 611.02,
195795
+ "learning_rate": 8.801907051282052e-06,
195796
+ "loss": 0.3247,
195797
+ "step": 75790
195798
+ },
195799
+ {
195800
+ "epoch": 611.06,
195801
+ "learning_rate": 8.801826923076923e-06,
195802
+ "loss": 0.3314,
195803
+ "step": 75795
195804
+ },
195805
+ {
195806
+ "epoch": 611.1,
195807
+ "learning_rate": 8.801746794871796e-06,
195808
+ "loss": 0.3502,
195809
+ "step": 75800
195810
+ },
195811
+ {
195812
+ "epoch": 611.14,
195813
+ "learning_rate": 8.801666666666667e-06,
195814
+ "loss": 0.3793,
195815
+ "step": 75805
195816
+ },
195817
+ {
195818
+ "epoch": 611.18,
195819
+ "learning_rate": 8.801586538461539e-06,
195820
+ "loss": 0.7933,
195821
+ "step": 75810
195822
+ },
195823
+ {
195824
+ "epoch": 611.22,
195825
+ "learning_rate": 8.80150641025641e-06,
195826
+ "loss": 0.9573,
195827
+ "step": 75815
195828
+ },
195829
+ {
195830
+ "epoch": 611.27,
195831
+ "learning_rate": 8.801426282051283e-06,
195832
+ "loss": 0.2859,
195833
+ "step": 75820
195834
+ },
195835
+ {
195836
+ "epoch": 611.31,
195837
+ "learning_rate": 8.801346153846155e-06,
195838
+ "loss": 0.4085,
195839
+ "step": 75825
195840
+ },
195841
+ {
195842
+ "epoch": 611.35,
195843
+ "learning_rate": 8.801266025641026e-06,
195844
+ "loss": 0.3864,
195845
+ "step": 75830
195846
+ },
195847
+ {
195848
+ "epoch": 611.39,
195849
+ "learning_rate": 8.801185897435897e-06,
195850
+ "loss": 0.8069,
195851
+ "step": 75835
195852
+ },
195853
+ {
195854
+ "epoch": 611.43,
195855
+ "learning_rate": 8.80110576923077e-06,
195856
+ "loss": 0.7998,
195857
+ "step": 75840
195858
+ },
195859
+ {
195860
+ "epoch": 611.47,
195861
+ "learning_rate": 8.801025641025642e-06,
195862
+ "loss": 0.3127,
195863
+ "step": 75845
195864
+ },
195865
+ {
195866
+ "epoch": 611.51,
195867
+ "learning_rate": 8.800945512820513e-06,
195868
+ "loss": 0.4849,
195869
+ "step": 75850
195870
+ },
195871
+ {
195872
+ "epoch": 611.55,
195873
+ "learning_rate": 8.800865384615386e-06,
195874
+ "loss": 0.3793,
195875
+ "step": 75855
195876
+ },
195877
+ {
195878
+ "epoch": 611.59,
195879
+ "learning_rate": 8.800785256410257e-06,
195880
+ "loss": 0.7649,
195881
+ "step": 75860
195882
+ },
195883
+ {
195884
+ "epoch": 611.63,
195885
+ "learning_rate": 8.800705128205129e-06,
195886
+ "loss": 0.9171,
195887
+ "step": 75865
195888
+ },
195889
+ {
195890
+ "epoch": 611.67,
195891
+ "learning_rate": 8.800625e-06,
195892
+ "loss": 0.3034,
195893
+ "step": 75870
195894
+ },
195895
+ {
195896
+ "epoch": 611.71,
195897
+ "learning_rate": 8.800544871794873e-06,
195898
+ "loss": 0.3238,
195899
+ "step": 75875
195900
+ },
195901
+ {
195902
+ "epoch": 611.75,
195903
+ "learning_rate": 8.800464743589745e-06,
195904
+ "loss": 0.3996,
195905
+ "step": 75880
195906
+ },
195907
+ {
195908
+ "epoch": 611.79,
195909
+ "learning_rate": 8.800384615384616e-06,
195910
+ "loss": 0.8372,
195911
+ "step": 75885
195912
+ },
195913
+ {
195914
+ "epoch": 611.83,
195915
+ "learning_rate": 8.800304487179487e-06,
195916
+ "loss": 0.8874,
195917
+ "step": 75890
195918
+ },
195919
+ {
195920
+ "epoch": 611.87,
195921
+ "learning_rate": 8.80022435897436e-06,
195922
+ "loss": 0.3511,
195923
+ "step": 75895
195924
+ },
195925
+ {
195926
+ "epoch": 611.91,
195927
+ "learning_rate": 8.800144230769232e-06,
195928
+ "loss": 0.5591,
195929
+ "step": 75900
195930
+ },
195931
+ {
195932
+ "epoch": 611.95,
195933
+ "learning_rate": 8.800064102564103e-06,
195934
+ "loss": 0.411,
195935
+ "step": 75905
195936
+ },
195937
+ {
195938
+ "epoch": 611.99,
195939
+ "learning_rate": 8.799983974358976e-06,
195940
+ "loss": 0.8997,
195941
+ "step": 75910
195942
+ },
195943
+ {
195944
+ "epoch": 612.0,
195945
+ "eval_loss": 0.4298833906650543,
195946
+ "eval_runtime": 40.7832,
195947
+ "eval_samples_per_second": 20.597,
195948
+ "eval_steps_per_second": 0.662,
195949
+ "eval_wer": 0.18921908413833788,
195950
+ "step": 75911
195951
+ },
195952
+ {
195953
+ "epoch": 612.03,
195954
+ "learning_rate": 8.79021001615509e-06,
195955
+ "loss": 0.3468,
195956
+ "step": 75915
195957
+ },
195958
+ {
195959
+ "epoch": 612.07,
195960
+ "learning_rate": 8.790129240710824e-06,
195961
+ "loss": 0.2899,
195962
+ "step": 75920
195963
+ },
195964
+ {
195965
+ "epoch": 612.11,
195966
+ "learning_rate": 8.79004846526656e-06,
195967
+ "loss": 0.357,
195968
+ "step": 75925
195969
+ },
195970
+ {
195971
+ "epoch": 612.15,
195972
+ "learning_rate": 8.789967689822296e-06,
195973
+ "loss": 0.4627,
195974
+ "step": 75930
195975
+ },
195976
+ {
195977
+ "epoch": 612.19,
195978
+ "learning_rate": 8.78988691437803e-06,
195979
+ "loss": 0.8461,
195980
+ "step": 75935
195981
+ },
195982
+ {
195983
+ "epoch": 612.23,
195984
+ "learning_rate": 8.789806138933766e-06,
195985
+ "loss": 0.7147,
195986
+ "step": 75940
195987
+ },
195988
+ {
195989
+ "epoch": 612.27,
195990
+ "learning_rate": 8.7897253634895e-06,
195991
+ "loss": 0.2815,
195992
+ "step": 75945
195993
+ },
195994
+ {
195995
+ "epoch": 612.31,
195996
+ "learning_rate": 8.789644588045236e-06,
195997
+ "loss": 0.3392,
195998
+ "step": 75950
195999
+ },
196000
+ {
196001
+ "epoch": 612.35,
196002
+ "learning_rate": 8.78956381260097e-06,
196003
+ "loss": 0.4352,
196004
+ "step": 75955
196005
+ },
196006
+ {
196007
+ "epoch": 612.4,
196008
+ "learning_rate": 8.789483037156706e-06,
196009
+ "loss": 0.9716,
196010
+ "step": 75960
196011
+ },
196012
+ {
196013
+ "epoch": 612.44,
196014
+ "learning_rate": 8.78940226171244e-06,
196015
+ "loss": 0.6372,
196016
+ "step": 75965
196017
+ },
196018
+ {
196019
+ "epoch": 612.48,
196020
+ "learning_rate": 8.789321486268176e-06,
196021
+ "loss": 0.3296,
196022
+ "step": 75970
196023
+ },
196024
+ {
196025
+ "epoch": 612.52,
196026
+ "learning_rate": 8.78924071082391e-06,
196027
+ "loss": 0.3629,
196028
+ "step": 75975
196029
+ },
196030
+ {
196031
+ "epoch": 612.56,
196032
+ "learning_rate": 8.789159935379646e-06,
196033
+ "loss": 0.3986,
196034
+ "step": 75980
196035
+ },
196036
+ {
196037
+ "epoch": 612.6,
196038
+ "learning_rate": 8.789079159935382e-06,
196039
+ "loss": 0.9025,
196040
+ "step": 75985
196041
+ },
196042
+ {
196043
+ "epoch": 612.64,
196044
+ "learning_rate": 8.788998384491116e-06,
196045
+ "loss": 0.888,
196046
+ "step": 75990
196047
+ },
196048
+ {
196049
+ "epoch": 612.68,
196050
+ "learning_rate": 8.788917609046852e-06,
196051
+ "loss": 0.378,
196052
+ "step": 75995
196053
+ },
196054
+ {
196055
+ "epoch": 612.72,
196056
+ "learning_rate": 8.788836833602586e-06,
196057
+ "loss": 0.3348,
196058
+ "step": 76000
196059
+ },
196060
+ {
196061
+ "epoch": 612.76,
196062
+ "learning_rate": 8.788756058158322e-06,
196063
+ "loss": 0.4165,
196064
+ "step": 76005
196065
+ },
196066
+ {
196067
+ "epoch": 612.8,
196068
+ "learning_rate": 8.788675282714056e-06,
196069
+ "loss": 0.8769,
196070
+ "step": 76010
196071
+ },
196072
+ {
196073
+ "epoch": 612.84,
196074
+ "learning_rate": 8.788594507269792e-06,
196075
+ "loss": 0.6691,
196076
+ "step": 76015
196077
+ },
196078
+ {
196079
+ "epoch": 612.88,
196080
+ "learning_rate": 8.788513731825526e-06,
196081
+ "loss": 0.3324,
196082
+ "step": 76020
196083
+ },
196084
+ {
196085
+ "epoch": 612.92,
196086
+ "learning_rate": 8.788432956381262e-06,
196087
+ "loss": 0.3127,
196088
+ "step": 76025
196089
+ },
196090
+ {
196091
+ "epoch": 612.96,
196092
+ "learning_rate": 8.788352180936996e-06,
196093
+ "loss": 0.4538,
196094
+ "step": 76030
196095
+ },
196096
+ {
196097
+ "epoch": 613.0,
196098
+ "learning_rate": 8.788271405492731e-06,
196099
+ "loss": 1.2654,
196100
+ "step": 76035
196101
+ },
196102
+ {
196103
+ "epoch": 613.0,
196104
+ "eval_loss": 0.42023032903671265,
196105
+ "eval_runtime": 45.5765,
196106
+ "eval_samples_per_second": 18.452,
196107
+ "eval_steps_per_second": 0.592,
196108
+ "eval_wer": 0.18862385321100916,
196109
+ "step": 76035
196110
+ },
196111
+ {
196112
+ "epoch": 613.04,
196113
+ "learning_rate": 8.788190630048467e-06,
196114
+ "loss": 0.3605,
196115
+ "step": 76040
196116
+ },
196117
+ {
196118
+ "epoch": 613.08,
196119
+ "learning_rate": 8.788109854604201e-06,
196120
+ "loss": 0.3461,
196121
+ "step": 76045
196122
+ },
196123
+ {
196124
+ "epoch": 613.12,
196125
+ "learning_rate": 8.788029079159937e-06,
196126
+ "loss": 0.3285,
196127
+ "step": 76050
196128
+ },
196129
+ {
196130
+ "epoch": 613.16,
196131
+ "learning_rate": 8.787948303715671e-06,
196132
+ "loss": 0.4423,
196133
+ "step": 76055
196134
+ },
196135
+ {
196136
+ "epoch": 613.2,
196137
+ "learning_rate": 8.787867528271407e-06,
196138
+ "loss": 1.1246,
196139
+ "step": 76060
196140
+ },
196141
+ {
196142
+ "epoch": 613.24,
196143
+ "learning_rate": 8.787786752827141e-06,
196144
+ "loss": 0.4245,
196145
+ "step": 76065
196146
+ },
196147
+ {
196148
+ "epoch": 613.28,
196149
+ "learning_rate": 8.787705977382877e-06,
196150
+ "loss": 0.3375,
196151
+ "step": 76070
196152
+ },
196153
+ {
196154
+ "epoch": 613.32,
196155
+ "learning_rate": 8.787625201938611e-06,
196156
+ "loss": 0.3201,
196157
+ "step": 76075
196158
+ },
196159
+ {
196160
+ "epoch": 613.36,
196161
+ "learning_rate": 8.787544426494347e-06,
196162
+ "loss": 0.4722,
196163
+ "step": 76080
196164
+ },
196165
+ {
196166
+ "epoch": 613.4,
196167
+ "learning_rate": 8.787463651050081e-06,
196168
+ "loss": 1.5154,
196169
+ "step": 76085
196170
+ },
196171
+ {
196172
+ "epoch": 613.44,
196173
+ "learning_rate": 8.787382875605817e-06,
196174
+ "loss": 0.3286,
196175
+ "step": 76090
196176
+ },
196177
+ {
196178
+ "epoch": 613.48,
196179
+ "learning_rate": 8.787302100161551e-06,
196180
+ "loss": 0.3029,
196181
+ "step": 76095
196182
+ },
196183
+ {
196184
+ "epoch": 613.52,
196185
+ "learning_rate": 8.787221324717287e-06,
196186
+ "loss": 0.3839,
196187
+ "step": 76100
196188
+ },
196189
+ {
196190
+ "epoch": 613.56,
196191
+ "learning_rate": 8.787140549273023e-06,
196192
+ "loss": 0.4412,
196193
+ "step": 76105
196194
+ },
196195
+ {
196196
+ "epoch": 613.6,
196197
+ "learning_rate": 8.787059773828757e-06,
196198
+ "loss": 1.21,
196199
+ "step": 76110
196200
+ },
196201
+ {
196202
+ "epoch": 613.64,
196203
+ "learning_rate": 8.786978998384493e-06,
196204
+ "loss": 0.3518,
196205
+ "step": 76115
196206
+ },
196207
+ {
196208
+ "epoch": 613.68,
196209
+ "learning_rate": 8.786898222940227e-06,
196210
+ "loss": 0.3239,
196211
+ "step": 76120
196212
+ },
196213
+ {
196214
+ "epoch": 613.72,
196215
+ "learning_rate": 8.786817447495963e-06,
196216
+ "loss": 0.36,
196217
+ "step": 76125
196218
+ },
196219
+ {
196220
+ "epoch": 613.76,
196221
+ "learning_rate": 8.786736672051697e-06,
196222
+ "loss": 0.4635,
196223
+ "step": 76130
196224
+ },
196225
+ {
196226
+ "epoch": 613.8,
196227
+ "learning_rate": 8.786655896607433e-06,
196228
+ "loss": 1.2953,
196229
+ "step": 76135
196230
+ },
196231
+ {
196232
+ "epoch": 613.84,
196233
+ "learning_rate": 8.786575121163167e-06,
196234
+ "loss": 0.3409,
196235
+ "step": 76140
196236
+ },
196237
+ {
196238
+ "epoch": 613.88,
196239
+ "learning_rate": 8.786494345718903e-06,
196240
+ "loss": 0.3064,
196241
+ "step": 76145
196242
+ },
196243
+ {
196244
+ "epoch": 613.92,
196245
+ "learning_rate": 8.786413570274637e-06,
196246
+ "loss": 0.3543,
196247
+ "step": 76150
196248
+ },
196249
+ {
196250
+ "epoch": 613.96,
196251
+ "learning_rate": 8.786332794830373e-06,
196252
+ "loss": 0.6372,
196253
+ "step": 76155
196254
+ },
196255
+ {
196256
+ "epoch": 614.0,
196257
+ "eval_loss": 0.35451439023017883,
196258
+ "eval_runtime": 43.26,
196259
+ "eval_samples_per_second": 19.417,
196260
+ "eval_steps_per_second": 0.624,
196261
+ "eval_wer": 0.1899717514124294,
196262
+ "step": 76159
196263
+ },
196264
+ {
196265
+ "epoch": 614.01,
196266
+ "learning_rate": 8.786252019386107e-06,
196267
+ "loss": 0.3222,
196268
+ "step": 76160
196269
+ },
196270
+ {
196271
+ "epoch": 614.05,
196272
+ "learning_rate": 8.786171243941843e-06,
196273
+ "loss": 0.2906,
196274
+ "step": 76165
196275
+ },
196276
+ {
196277
+ "epoch": 614.09,
196278
+ "learning_rate": 8.786090468497579e-06,
196279
+ "loss": 0.3531,
196280
+ "step": 76170
196281
+ },
196282
+ {
196283
+ "epoch": 614.13,
196284
+ "learning_rate": 8.786009693053313e-06,
196285
+ "loss": 0.3645,
196286
+ "step": 76175
196287
+ },
196288
+ {
196289
+ "epoch": 614.17,
196290
+ "learning_rate": 8.785928917609049e-06,
196291
+ "loss": 0.5599,
196292
+ "step": 76180
196293
+ },
196294
+ {
196295
+ "epoch": 614.21,
196296
+ "learning_rate": 8.785848142164783e-06,
196297
+ "loss": 1.053,
196298
+ "step": 76185
196299
+ },
196300
+ {
196301
+ "epoch": 614.25,
196302
+ "learning_rate": 8.785767366720519e-06,
196303
+ "loss": 0.3648,
196304
+ "step": 76190
196305
+ },
196306
+ {
196307
+ "epoch": 614.29,
196308
+ "learning_rate": 8.785686591276253e-06,
196309
+ "loss": 0.3137,
196310
+ "step": 76195
196311
+ },
196312
+ {
196313
+ "epoch": 614.33,
196314
+ "learning_rate": 8.785605815831989e-06,
196315
+ "loss": 0.3806,
196316
+ "step": 76200
196317
+ },
196318
+ {
196319
+ "epoch": 614.37,
196320
+ "learning_rate": 8.785525040387723e-06,
196321
+ "loss": 0.6722,
196322
+ "step": 76205
196323
+ },
196324
+ {
196325
+ "epoch": 614.41,
196326
+ "learning_rate": 8.785444264943459e-06,
196327
+ "loss": 1.2406,
196328
+ "step": 76210
196329
+ },
196330
+ {
196331
+ "epoch": 614.45,
196332
+ "learning_rate": 8.785363489499193e-06,
196333
+ "loss": 0.3377,
196334
+ "step": 76215
196335
+ },
196336
+ {
196337
+ "epoch": 614.49,
196338
+ "learning_rate": 8.785282714054929e-06,
196339
+ "loss": 0.3204,
196340
+ "step": 76220
196341
+ },
196342
+ {
196343
+ "epoch": 614.53,
196344
+ "learning_rate": 8.785201938610663e-06,
196345
+ "loss": 0.3461,
196346
+ "step": 76225
196347
+ },
196348
+ {
196349
+ "epoch": 614.57,
196350
+ "learning_rate": 8.785121163166399e-06,
196351
+ "loss": 0.5858,
196352
+ "step": 76230
196353
+ },
196354
+ {
196355
+ "epoch": 614.61,
196356
+ "learning_rate": 8.785040387722134e-06,
196357
+ "loss": 1.2116,
196358
+ "step": 76235
196359
+ },
196360
+ {
196361
+ "epoch": 614.65,
196362
+ "learning_rate": 8.784959612277869e-06,
196363
+ "loss": 0.3268,
196364
+ "step": 76240
196365
+ },
196366
+ {
196367
+ "epoch": 614.69,
196368
+ "learning_rate": 8.784878836833604e-06,
196369
+ "loss": 0.2913,
196370
+ "step": 76245
196371
+ },
196372
+ {
196373
+ "epoch": 614.73,
196374
+ "learning_rate": 8.784798061389338e-06,
196375
+ "loss": 0.3399,
196376
+ "step": 76250
196377
+ },
196378
+ {
196379
+ "epoch": 614.77,
196380
+ "learning_rate": 8.784717285945074e-06,
196381
+ "loss": 0.6186,
196382
+ "step": 76255
196383
+ },
196384
+ {
196385
+ "epoch": 614.81,
196386
+ "learning_rate": 8.784636510500808e-06,
196387
+ "loss": 1.1712,
196388
+ "step": 76260
196389
+ },
196390
+ {
196391
+ "epoch": 614.85,
196392
+ "learning_rate": 8.784555735056544e-06,
196393
+ "loss": 0.2874,
196394
+ "step": 76265
196395
+ },
196396
+ {
196397
+ "epoch": 614.89,
196398
+ "learning_rate": 8.784474959612278e-06,
196399
+ "loss": 0.3038,
196400
+ "step": 76270
196401
+ },
196402
+ {
196403
+ "epoch": 614.93,
196404
+ "learning_rate": 8.784394184168014e-06,
196405
+ "loss": 0.3714,
196406
+ "step": 76275
196407
+ },
196408
+ {
196409
+ "epoch": 614.97,
196410
+ "learning_rate": 8.784313408723748e-06,
196411
+ "loss": 0.6097,
196412
+ "step": 76280
196413
+ },
196414
+ {
196415
+ "epoch": 615.0,
196416
+ "eval_loss": 0.36748769879341125,
196417
+ "eval_runtime": 45.6308,
196418
+ "eval_samples_per_second": 18.321,
196419
+ "eval_steps_per_second": 0.592,
196420
+ "eval_wer": 0.19178379944935517,
196421
+ "step": 76283
196422
+ },
196423
+ {
196424
+ "epoch": 615.02,
196425
+ "learning_rate": 8.784232633279484e-06,
196426
+ "loss": 0.4276,
196427
+ "step": 76285
196428
+ },
196429
+ {
196430
+ "epoch": 615.06,
196431
+ "learning_rate": 8.784151857835218e-06,
196432
+ "loss": 0.3103,
196433
+ "step": 76290
196434
+ },
196435
+ {
196436
+ "epoch": 615.1,
196437
+ "learning_rate": 8.784071082390954e-06,
196438
+ "loss": 0.345,
196439
+ "step": 76295
196440
+ },
196441
+ {
196442
+ "epoch": 615.14,
196443
+ "learning_rate": 8.783990306946688e-06,
196444
+ "loss": 0.4322,
196445
+ "step": 76300
196446
+ },
196447
+ {
196448
+ "epoch": 615.18,
196449
+ "learning_rate": 8.783909531502424e-06,
196450
+ "loss": 0.5953,
196451
+ "step": 76305
196452
+ },
196453
+ {
196454
+ "epoch": 615.22,
196455
+ "learning_rate": 8.78382875605816e-06,
196456
+ "loss": 1.0507,
196457
+ "step": 76310
196458
+ },
196459
+ {
196460
+ "epoch": 615.26,
196461
+ "learning_rate": 8.783747980613894e-06,
196462
+ "loss": 0.2948,
196463
+ "step": 76315
196464
+ },
196465
+ {
196466
+ "epoch": 615.3,
196467
+ "learning_rate": 8.78366720516963e-06,
196468
+ "loss": 0.3103,
196469
+ "step": 76320
196470
+ },
196471
+ {
196472
+ "epoch": 615.34,
196473
+ "learning_rate": 8.783586429725364e-06,
196474
+ "loss": 0.3602,
196475
+ "step": 76325
196476
+ },
196477
+ {
196478
+ "epoch": 615.38,
196479
+ "learning_rate": 8.7835056542811e-06,
196480
+ "loss": 0.6729,
196481
+ "step": 76330
196482
+ },
196483
+ {
196484
+ "epoch": 615.42,
196485
+ "learning_rate": 8.783424878836834e-06,
196486
+ "loss": 1.1124,
196487
+ "step": 76335
196488
+ },
196489
+ {
196490
+ "epoch": 615.46,
196491
+ "learning_rate": 8.78334410339257e-06,
196492
+ "loss": 0.3663,
196493
+ "step": 76340
196494
+ },
196495
+ {
196496
+ "epoch": 615.5,
196497
+ "learning_rate": 8.783263327948304e-06,
196498
+ "loss": 0.2851,
196499
+ "step": 76345
196500
+ },
196501
+ {
196502
+ "epoch": 615.54,
196503
+ "learning_rate": 8.78318255250404e-06,
196504
+ "loss": 0.3856,
196505
+ "step": 76350
196506
+ },
196507
+ {
196508
+ "epoch": 615.58,
196509
+ "learning_rate": 8.783101777059774e-06,
196510
+ "loss": 0.6889,
196511
+ "step": 76355
196512
+ },
196513
+ {
196514
+ "epoch": 615.62,
196515
+ "learning_rate": 8.78302100161551e-06,
196516
+ "loss": 1.1928,
196517
+ "step": 76360
196518
+ },
196519
+ {
196520
+ "epoch": 615.66,
196521
+ "learning_rate": 8.782940226171244e-06,
196522
+ "loss": 0.336,
196523
+ "step": 76365
196524
+ },
196525
+ {
196526
+ "epoch": 615.7,
196527
+ "learning_rate": 8.78285945072698e-06,
196528
+ "loss": 0.3681,
196529
+ "step": 76370
196530
+ },
196531
+ {
196532
+ "epoch": 615.74,
196533
+ "learning_rate": 8.782778675282716e-06,
196534
+ "loss": 0.3314,
196535
+ "step": 76375
196536
+ },
196537
+ {
196538
+ "epoch": 615.78,
196539
+ "learning_rate": 8.78269789983845e-06,
196540
+ "loss": 0.6444,
196541
+ "step": 76380
196542
+ },
196543
+ {
196544
+ "epoch": 615.82,
196545
+ "learning_rate": 8.782617124394186e-06,
196546
+ "loss": 0.9598,
196547
+ "step": 76385
196548
+ },
196549
+ {
196550
+ "epoch": 615.86,
196551
+ "learning_rate": 8.78253634894992e-06,
196552
+ "loss": 0.3099,
196553
+ "step": 76390
196554
+ },
196555
+ {
196556
+ "epoch": 615.9,
196557
+ "learning_rate": 8.782455573505656e-06,
196558
+ "loss": 0.3403,
196559
+ "step": 76395
196560
+ },
196561
+ {
196562
+ "epoch": 615.94,
196563
+ "learning_rate": 8.78237479806139e-06,
196564
+ "loss": 0.3402,
196565
+ "step": 76400
196566
+ },
196567
+ {
196568
+ "epoch": 615.98,
196569
+ "learning_rate": 8.782294022617126e-06,
196570
+ "loss": 0.7314,
196571
+ "step": 76405
196572
+ },
196573
+ {
196574
+ "epoch": 616.0,
196575
+ "eval_loss": 0.37296634912490845,
196576
+ "eval_runtime": 44.7239,
196577
+ "eval_samples_per_second": 18.692,
196578
+ "eval_steps_per_second": 0.604,
196579
+ "eval_wer": 0.1930858806404658,
196580
+ "step": 76407
196581
+ },
196582
+ {
196583
+ "epoch": 611.02,
196584
+ "learning_rate": 8.78221324717286e-06,
196585
+ "loss": 0.4121,
196586
+ "step": 76410
196587
+ },
196588
+ {
196589
+ "epoch": 611.06,
196590
+ "learning_rate": 8.782132471728596e-06,
196591
+ "loss": 0.3243,
196592
+ "step": 76415
196593
+ },
196594
+ {
196595
+ "epoch": 611.1,
196596
+ "learning_rate": 8.78205169628433e-06,
196597
+ "loss": 0.2929,
196598
+ "step": 76420
196599
+ },
196600
+ {
196601
+ "epoch": 611.14,
196602
+ "learning_rate": 8.781970920840066e-06,
196603
+ "loss": 0.4178,
196604
+ "step": 76425
196605
+ },
196606
+ {
196607
+ "epoch": 611.18,
196608
+ "learning_rate": 8.7818901453958e-06,
196609
+ "loss": 0.7328,
196610
+ "step": 76430
196611
+ },
196612
+ {
196613
+ "epoch": 611.22,
196614
+ "learning_rate": 8.781809369951536e-06,
196615
+ "loss": 0.9123,
196616
+ "step": 76435
196617
+ },
196618
+ {
196619
+ "epoch": 611.26,
196620
+ "learning_rate": 8.781728594507271e-06,
196621
+ "loss": 0.3589,
196622
+ "step": 76440
196623
+ },
196624
+ {
196625
+ "epoch": 611.3,
196626
+ "learning_rate": 8.781647819063006e-06,
196627
+ "loss": 0.328,
196628
+ "step": 76445
196629
+ },
196630
+ {
196631
+ "epoch": 611.34,
196632
+ "learning_rate": 8.781567043618741e-06,
196633
+ "loss": 0.3831,
196634
+ "step": 76450
196635
+ },
196636
+ {
196637
+ "epoch": 611.38,
196638
+ "learning_rate": 8.781486268174476e-06,
196639
+ "loss": 0.8885,
196640
+ "step": 76455
196641
+ },
196642
+ {
196643
+ "epoch": 611.42,
196644
+ "learning_rate": 8.781405492730211e-06,
196645
+ "loss": 0.8831,
196646
+ "step": 76460
196647
+ },
196648
+ {
196649
+ "epoch": 611.46,
196650
+ "learning_rate": 8.781324717285945e-06,
196651
+ "loss": 0.3094,
196652
+ "step": 76465
196653
+ },
196654
+ {
196655
+ "epoch": 611.5,
196656
+ "learning_rate": 8.781243941841681e-06,
196657
+ "loss": 0.349,
196658
+ "step": 76470
196659
+ },
196660
+ {
196661
+ "epoch": 611.54,
196662
+ "learning_rate": 8.781163166397415e-06,
196663
+ "loss": 0.3866,
196664
+ "step": 76475
196665
+ },
196666
+ {
196667
+ "epoch": 611.58,
196668
+ "learning_rate": 8.781082390953151e-06,
196669
+ "loss": 0.7114,
196670
+ "step": 76480
196671
+ },
196672
+ {
196673
+ "epoch": 611.62,
196674
+ "learning_rate": 8.781001615508885e-06,
196675
+ "loss": 0.92,
196676
+ "step": 76485
196677
+ },
196678
+ {
196679
+ "epoch": 611.66,
196680
+ "learning_rate": 8.780920840064621e-06,
196681
+ "loss": 0.3971,
196682
+ "step": 76490
196683
+ },
196684
+ {
196685
+ "epoch": 611.7,
196686
+ "learning_rate": 8.780840064620355e-06,
196687
+ "loss": 0.3134,
196688
+ "step": 76495
196689
+ },
196690
+ {
196691
+ "epoch": 611.74,
196692
+ "learning_rate": 8.780759289176091e-06,
196693
+ "loss": 0.3991,
196694
+ "step": 76500
196695
+ },
196696
+ {
196697
+ "epoch": 611.78,
196698
+ "learning_rate": 8.780678513731825e-06,
196699
+ "loss": 0.7837,
196700
+ "step": 76505
196701
+ },
196702
+ {
196703
+ "epoch": 611.82,
196704
+ "learning_rate": 8.780597738287561e-06,
196705
+ "loss": 0.8353,
196706
+ "step": 76510
196707
+ },
196708
+ {
196709
+ "epoch": 611.86,
196710
+ "learning_rate": 8.780516962843297e-06,
196711
+ "loss": 0.3332,
196712
+ "step": 76515
196713
+ },
196714
+ {
196715
+ "epoch": 611.9,
196716
+ "learning_rate": 8.780436187399031e-06,
196717
+ "loss": 0.3274,
196718
+ "step": 76520
196719
+ },
196720
+ {
196721
+ "epoch": 611.94,
196722
+ "learning_rate": 8.780355411954767e-06,
196723
+ "loss": 0.3416,
196724
+ "step": 76525
196725
+ },
196726
+ {
196727
+ "epoch": 611.98,
196728
+ "learning_rate": 8.780274636510501e-06,
196729
+ "loss": 0.8589,
196730
+ "step": 76530
196731
+ },
196732
+ {
196733
+ "epoch": 612.0,
196734
+ "eval_loss": 0.39867308735847473,
196735
+ "eval_runtime": 43.2092,
196736
+ "eval_samples_per_second": 19.348,
196737
+ "eval_steps_per_second": 0.625,
196738
+ "eval_wer": 0.18634209392503231,
196739
+ "step": 76532
196740
  }
196741
  ],
196742
  "max_steps": 625000,
196743
  "num_train_epochs": 5000,
196744
+ "total_flos": 2.1537393625642425e+20,
196745
  "trial_name": null,
196746
  "trial_params": null
196747
  }
model-bin/finetune/base/{checkpoint-75787 β†’ checkpoint-76532}/training_args.bin RENAMED
File without changes
model-bin/finetune/base/log/1629885731.4375427/events.out.tfevents.1629885731.7e498afd5545.905.103 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36a6da31eefd7ffcaa0e8679cde1cd51f449d40931f5cab27833d9e7f49fbbf5
3
+ size 4194
model-bin/finetune/base/log/1629886673.7174067/events.out.tfevents.1629886673.7e498afd5545.6884.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d74f46ca1c7fd0a37637440eb5e393c900b931ebffb6e9f79f89fe7b987f6d3
3
+ size 4194
model-bin/finetune/base/log/1629887166.966957/events.out.tfevents.1629887166.7e498afd5545.6884.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c73db601adf9bf691a8a648aea8ba46f4e54821cc80d4526a5a554e8065b4282
3
+ size 4194
model-bin/finetune/base/log/1629887754.1185539/events.out.tfevents.1629887754.7e498afd5545.6884.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1e26f78794d7f6ffd2b00b6d9ef3e22821b21e2508f541e178364b277d0f026
3
+ size 4194
model-bin/finetune/base/log/1629888265.905986/events.out.tfevents.1629888265.7e498afd5545.6884.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1bfa56c24bb168c6959bdfd406183d0478e7b3cea14e19c2fbddf4655dadf0c
3
+ size 4194
model-bin/finetune/base/log/1629888670.3141375/events.out.tfevents.1629888670.7e498afd5545.7645.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f272ccac5f572ccb9d62044928df58ab0257203e84d8b6072f7892177516e05
3
+ size 4194
model-bin/finetune/base/log/1629889158.7460623/events.out.tfevents.1629889158.7e498afd5545.7645.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42c46008eee645fc3b59ab7eeb013ba6fe959e277d904c666ac3ad4b1d50f3ff
3
+ size 4194
model-bin/finetune/base/log/events.out.tfevents.1629885731.7e498afd5545.905.102 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32b48829287d7459553a281d2c7951e08c4d48da4bdde6fed52ea4c65723e7aa
3
+ size 8622
model-bin/finetune/base/log/events.out.tfevents.1629886673.7e498afd5545.6884.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7dc414e5d7be985ac5c250402f9dbbc24776b0afb23bb9c1366a0f9c13dff95f
3
+ size 8630
model-bin/finetune/base/log/events.out.tfevents.1629887166.7e498afd5545.6884.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b56762b9849e47c832a043579254ae91a2186a04633bcde57efd4ff10c002e1
3
+ size 8462
model-bin/finetune/base/log/events.out.tfevents.1629887754.7e498afd5545.6884.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2df9a0dd68ca38106b7d0d418ffb03edfa664c29f7748d8201978ad3bf080b65
3
+ size 8622
model-bin/finetune/base/log/events.out.tfevents.1629888265.7e498afd5545.6884.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc29f1b159e9ced388223b12cf9a654002917d554eb44d9fbe9f72abd5fba169
3
+ size 3938
model-bin/finetune/base/log/events.out.tfevents.1629888670.7e498afd5545.7645.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32f35091b2934d85891ecde443d198745a0801cd4984e4c157d396ab96c25bf7
3
+ size 8630
model-bin/finetune/base/log/events.out.tfevents.1629889158.7e498afd5545.7645.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c76070cc3d2f3aace7014ca70e3572d106002661d639b2a15e83c9110009f334
3
+ size 8622