NingsenWang commited on
Commit
6912b28
·
verified ·
1 Parent(s): 09960df

Upload folder using huggingface_hub

Browse files
best_model.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4d2a44b08ee2bdb940aab4b61fbf8620dc85b61a0b4ce1a203ce7319c492d69
3
+ size 413477355
extra_twobranch_large_strong_ls_lr5e4.csv ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ metric,value
2
+ num_samples,800
3
+ threshold,0.5
4
+ accuracy,0.82375
5
+ precision,0.8388746803069054
6
+ recall,0.8078817733990148
7
+ f1,0.823086574654956
8
+ tp,328
9
+ tn,331
10
+ fp,63
11
+ fn,78
12
+ pos_rate,0.48875
13
+ model_size,large
14
+ two_branch,True
15
+ channel_mode,rgb_grad
16
+ fft_highpass_only,False
17
+ fft_low_cut_ratio,0.1
18
+ use_tta,False
19
+ seed,42
20
+ val_ratio,0.1
21
+ checkpoint,./weights/extra_twobranch_large_strong_ls_lr5e4/best_model.pth
train.log ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,train_loss,train_acc,val_loss,val_acc,lr,elapsed_sec
2
+ 1,0.740547,0.5822,0.670832,0.6212,4.995067e-04,162.43
3
+ 2,0.673413,0.6293,0.664642,0.6325,4.980287e-04,161.60
4
+ 3,0.664199,0.6426,0.713126,0.6763,4.955718e-04,161.45
5
+ 4,0.661823,0.6432,0.631156,0.6713,4.921458e-04,161.44
6
+ 5,0.650840,0.6603,0.647439,0.6550,4.877641e-04,161.34
7
+ 6,0.652331,0.6571,0.631171,0.6913,4.824441e-04,161.29
8
+ 7,0.645701,0.6706,0.610008,0.7188,4.762068e-04,161.42
9
+ 8,0.638346,0.6776,0.608472,0.7288,4.690767e-04,161.47
10
+ 9,0.640464,0.6763,0.608383,0.7238,4.610820e-04,161.43
11
+ 10,0.635802,0.6842,0.623903,0.7025,4.522542e-04,161.23
12
+ 11,0.632505,0.6867,0.645290,0.7050,4.426283e-04,161.31
13
+ 12,0.631033,0.6867,0.612895,0.7250,4.322422e-04,161.38
14
+ 13,0.628589,0.6951,0.603211,0.7312,4.211368e-04,161.31
15
+ 14,0.624571,0.6972,0.632504,0.7375,4.093560e-04,161.31
16
+ 15,0.626181,0.6958,0.611804,0.7388,3.969463e-04,161.29
17
+ 16,0.621190,0.6982,0.589898,0.7662,3.839567e-04,161.32
18
+ 17,0.619069,0.7043,0.594120,0.7512,3.704384e-04,161.24
19
+ 18,0.619688,0.7078,0.582570,0.7725,3.564448e-04,161.17
20
+ 19,0.612435,0.7140,0.587118,0.7438,3.420311e-04,161.21
21
+ 20,0.614179,0.7125,0.583163,0.7662,3.272542e-04,161.37
22
+ 21,0.613633,0.7114,0.584706,0.7662,3.121725e-04,161.29
23
+ 22,0.608119,0.7194,0.587738,0.7488,2.968453e-04,161.30
24
+ 23,0.604900,0.7233,0.592553,0.7562,2.813333e-04,161.33
25
+ 24,0.608141,0.7161,0.575575,0.7788,2.656976e-04,161.14
26
+ 25,0.603821,0.7192,0.570351,0.7887,2.500000e-04,161.20
27
+ 26,0.597700,0.7374,0.575003,0.7812,2.343024e-04,161.12
28
+ 27,0.596823,0.7297,0.580783,0.7562,2.186667e-04,161.22
29
+ 28,0.595277,0.7392,0.576423,0.7738,2.031547e-04,161.34
30
+ 29,0.589638,0.7397,0.578443,0.7688,1.878275e-04,161.26
31
+ 30,0.590712,0.7428,0.576379,0.7900,1.727458e-04,161.33
32
+ 31,0.589526,0.7464,0.570384,0.7825,1.579689e-04,161.16
33
+ 32,0.588330,0.7442,0.565734,0.7863,1.435552e-04,161.30
34
+ 33,0.584769,0.7481,0.581504,0.7700,1.295616e-04,161.10
35
+ 34,0.582803,0.7453,0.564120,0.7925,1.160433e-04,161.17
36
+ 35,0.578925,0.7558,0.565455,0.7837,1.030537e-04,161.20
37
+ 36,0.573140,0.7632,0.569381,0.8025,9.064400e-05,161.22
38
+ 37,0.573065,0.7597,0.560922,0.8025,7.886322e-05,161.22
39
+ 38,0.573843,0.7662,0.562490,0.8063,6.775784e-05,161.13
40
+ 39,0.567062,0.7693,0.566152,0.8013,5.737169e-05,161.17
41
+ 40,0.566343,0.7696,0.562954,0.8200,4.774575e-05,161.20
42
+ 41,0.558574,0.7765,0.552812,0.8237,3.891802e-05,161.32
43
+ 42,0.561053,0.7786,0.560432,0.7963,3.092333e-05,161.13
44
+ 43,0.562574,0.7760,0.554364,0.8087,2.379324e-05,161.28
45
+ 44,0.558446,0.7810,0.560154,0.8037,1.755588e-05,161.30
46
+ 45,0.554647,0.7840,0.556155,0.8150,1.223587e-05,161.29
47
+ 46,0.553427,0.7860,0.557128,0.8163,7.854210e-06,161.26
48
+ 47,0.553275,0.7846,0.555610,0.8087,4.428187e-06,161.18
49
+ 48,0.552505,0.7851,0.553087,0.8163,1.971325e-06,161.40
50
+ 49,0.549808,0.7892,0.554638,0.8213,4.933179e-07,161.14
51
+ 50,0.547333,0.7903,0.555519,0.8175,0.000000e+00,161.11