DanSarm commited on
Commit
44a4050
·
verified ·
1 Parent(s): e0f5db5

Fine-tuned Construction Receipt Model

Browse files
Files changed (4) hide show
  1. README.md +61 -86
  2. config.json +1 -1
  3. generation_config.json +1 -1
  4. model.safetensors +1 -1
README.md CHANGED
@@ -16,7 +16,7 @@ should probably proofread and complete it, then remove this comment. -->
16
 
17
  This model is a fine-tuned version of [DanSarm/receipt-core-model](https://huggingface.co/DanSarm/receipt-core-model) on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
- - Loss: 0.1836
20
 
21
  ## Model description
22
 
@@ -48,94 +48,69 @@ The following hyperparameters were used during training:
48
 
49
  | Training Loss | Epoch | Step | Validation Loss |
50
  |:-------------:|:-----:|:----:|:---------------:|
51
- | 0.8353 | 1.0 | 3 | 0.4787 |
52
- | 0.5752 | 2.0 | 6 | 0.3820 |
53
- | 0.44 | 3.0 | 9 | 0.3257 |
54
- | 0.343 | 4.0 | 12 | 0.3046 |
55
- | 0.2676 | 5.0 | 15 | 0.2868 |
56
- | 0.2263 | 6.0 | 18 | 0.2429 |
57
- | 0.2045 | 7.0 | 21 | 0.2125 |
58
- | 0.1808 | 8.0 | 24 | 0.1952 |
59
- | 0.1367 | 9.0 | 27 | 0.1801 |
60
- | 0.1209 | 10.0 | 30 | 0.1740 |
61
- | 0.1248 | 11.0 | 33 | 0.1752 |
62
- | 0.097 | 12.0 | 36 | 0.1704 |
63
- | 0.0811 | 13.0 | 39 | 0.1629 |
64
- | 0.063 | 14.0 | 42 | 0.1592 |
65
- | 0.0649 | 15.0 | 45 | 0.1565 |
66
- | 0.0505 | 16.0 | 48 | 0.1559 |
67
- | 0.0433 | 17.0 | 51 | 0.1588 |
68
- | 0.0428 | 18.0 | 54 | 0.1630 |
69
- | 0.0402 | 19.0 | 57 | 0.1624 |
70
- | 0.0243 | 20.0 | 60 | 0.1595 |
71
- | 0.0187 | 21.0 | 63 | 0.1589 |
72
- | 0.0242 | 22.0 | 66 | 0.1659 |
73
- | 0.0151 | 23.0 | 69 | 0.1704 |
74
- | 0.022 | 24.0 | 72 | 0.1632 |
75
- | 0.0141 | 25.0 | 75 | 0.1541 |
76
- | 0.0148 | 26.0 | 78 | 0.1487 |
77
- | 0.0165 | 27.0 | 81 | 0.1475 |
78
- | 0.0116 | 28.0 | 84 | 0.1488 |
79
- | 0.0072 | 29.0 | 87 | 0.1496 |
80
- | 0.0107 | 30.0 | 90 | 0.1495 |
81
- | 0.0078 | 31.0 | 93 | 0.1495 |
82
- | 0.0076 | 32.0 | 96 | 0.1483 |
83
- | 0.0093 | 33.0 | 99 | 0.1445 |
84
- | 0.0047 | 34.0 | 102 | 0.1445 |
85
- | 0.0074 | 35.0 | 105 | 0.1461 |
86
- | 0.0073 | 36.0 | 108 | 0.1468 |
87
- | 0.009 | 37.0 | 111 | 0.1482 |
88
- | 0.0052 | 38.0 | 114 | 0.1516 |
89
- | 0.0056 | 39.0 | 117 | 0.1539 |
90
- | 0.0102 | 40.0 | 120 | 0.1570 |
91
- | 0.007 | 41.0 | 123 | 0.1569 |
92
- | 0.0063 | 42.0 | 126 | 0.1560 |
93
- | 0.005 | 43.0 | 129 | 0.1568 |
94
- | 0.0041 | 44.0 | 132 | 0.1583 |
95
- | 0.0051 | 45.0 | 135 | 0.1603 |
96
- | 0.0037 | 46.0 | 138 | 0.1613 |
97
- | 0.0074 | 47.0 | 141 | 0.1615 |
98
- | 0.0092 | 48.0 | 144 | 0.1595 |
99
- | 0.0049 | 49.0 | 147 | 0.1569 |
100
- | 0.0065 | 50.0 | 150 | 0.1545 |
101
- | 0.0119 | 51.0 | 153 | 0.1520 |
102
- | 0.0046 | 52.0 | 156 | 0.1504 |
103
- | 0.0032 | 53.0 | 159 | 0.1500 |
104
- | 0.0059 | 54.0 | 162 | 0.1503 |
105
- | 0.0033 | 55.0 | 165 | 0.1492 |
106
- | 0.0059 | 56.0 | 168 | 0.1453 |
107
- | 0.0058 | 57.0 | 171 | 0.1474 |
108
- | 0.0023 | 58.0 | 174 | 0.1503 |
109
- | 0.0029 | 59.0 | 177 | 0.1545 |
110
- | 0.0027 | 60.0 | 180 | 0.1593 |
111
- | 0.0024 | 61.0 | 183 | 0.1630 |
112
- | 0.004 | 62.0 | 186 | 0.1649 |
113
- | 0.005 | 63.0 | 189 | 0.1657 |
114
- | 0.0022 | 64.0 | 192 | 0.1658 |
115
- | 0.0024 | 65.0 | 195 | 0.1652 |
116
- | 0.0035 | 66.0 | 198 | 0.1659 |
117
- | 0.0019 | 67.0 | 201 | 0.1674 |
118
- | 0.0019 | 68.0 | 204 | 0.1690 |
119
- | 0.0019 | 69.0 | 207 | 0.1706 |
120
- | 0.0028 | 70.0 | 210 | 0.1716 |
121
- | 0.0055 | 71.0 | 213 | 0.1728 |
122
- | 0.0022 | 72.0 | 216 | 0.1734 |
123
- | 0.0029 | 73.0 | 219 | 0.1742 |
124
- | 0.0013 | 74.0 | 222 | 0.1749 |
125
- | 0.0017 | 75.0 | 225 | 0.1750 |
126
- | 0.0015 | 76.0 | 228 | 0.1756 |
127
- | 0.0028 | 77.0 | 231 | 0.1780 |
128
- | 0.0012 | 78.0 | 234 | 0.1807 |
129
- | 0.0017 | 79.0 | 237 | 0.1827 |
130
- | 0.0045 | 80.0 | 240 | 0.1837 |
131
- | 0.0017 | 81.0 | 243 | 0.1840 |
132
- | 0.0022 | 82.0 | 246 | 0.1841 |
133
- | 0.0014 | 83.0 | 249 | 0.1836 |
134
 
135
 
136
  ### Framework versions
137
 
138
- - Transformers 4.48.1
139
  - Pytorch 2.6.0+cu124
140
- - Datasets 3.2.0
141
  - Tokenizers 0.21.0
 
16
 
17
  This model is a fine-tuned version of [DanSarm/receipt-core-model](https://huggingface.co/DanSarm/receipt-core-model) on an unknown dataset.
18
  It achieves the following results on the evaluation set:
19
+ - Loss: 0.1278
20
 
21
  ## Model description
22
 
 
48
 
49
  | Training Loss | Epoch | Step | Validation Loss |
50
  |:-------------:|:-----:|:----:|:---------------:|
51
+ | 0.4517 | 1.0 | 19 | 0.2472 |
52
+ | 0.1736 | 2.0 | 38 | 0.1857 |
53
+ | 0.1242 | 3.0 | 57 | 0.1488 |
54
+ | 0.0957 | 4.0 | 76 | 0.1314 |
55
+ | 0.0691 | 5.0 | 95 | 0.1292 |
56
+ | 0.0546 | 6.0 | 114 | 0.1221 |
57
+ | 0.0456 | 7.0 | 133 | 0.1136 |
58
+ | 0.0422 | 8.0 | 152 | 0.1116 |
59
+ | 0.0321 | 9.0 | 171 | 0.1234 |
60
+ | 0.0266 | 10.0 | 190 | 0.1123 |
61
+ | 0.0228 | 11.0 | 209 | 0.1139 |
62
+ | 0.0213 | 12.0 | 228 | 0.1122 |
63
+ | 0.0201 | 13.0 | 247 | 0.1166 |
64
+ | 0.0188 | 14.0 | 266 | 0.1238 |
65
+ | 0.0146 | 15.0 | 285 | 0.1256 |
66
+ | 0.0134 | 16.0 | 304 | 0.1186 |
67
+ | 0.0124 | 17.0 | 323 | 0.1193 |
68
+ | 0.0108 | 18.0 | 342 | 0.1150 |
69
+ | 0.0084 | 19.0 | 361 | 0.1171 |
70
+ | 0.0077 | 20.0 | 380 | 0.1216 |
71
+ | 0.0082 | 21.0 | 399 | 0.1225 |
72
+ | 0.0073 | 22.0 | 418 | 0.1210 |
73
+ | 0.0066 | 23.0 | 437 | 0.1199 |
74
+ | 0.0083 | 24.0 | 456 | 0.1170 |
75
+ | 0.0087 | 25.0 | 475 | 0.1172 |
76
+ | 0.0053 | 26.0 | 494 | 0.1160 |
77
+ | 0.0061 | 27.0 | 513 | 0.1178 |
78
+ | 0.0045 | 28.0 | 532 | 0.1169 |
79
+ | 0.0048 | 29.0 | 551 | 0.1192 |
80
+ | 0.0034 | 30.0 | 570 | 0.1219 |
81
+ | 0.0032 | 31.0 | 589 | 0.1194 |
82
+ | 0.0038 | 32.0 | 608 | 0.1230 |
83
+ | 0.0036 | 33.0 | 627 | 0.1241 |
84
+ | 0.0036 | 34.0 | 646 | 0.1235 |
85
+ | 0.0039 | 35.0 | 665 | 0.1178 |
86
+ | 0.0025 | 36.0 | 684 | 0.1174 |
87
+ | 0.004 | 37.0 | 703 | 0.1146 |
88
+ | 0.003 | 38.0 | 722 | 0.1148 |
89
+ | 0.002 | 39.0 | 741 | 0.1186 |
90
+ | 0.0026 | 40.0 | 760 | 0.1137 |
91
+ | 0.0019 | 41.0 | 779 | 0.1134 |
92
+ | 0.0018 | 42.0 | 798 | 0.1135 |
93
+ | 0.0014 | 43.0 | 817 | 0.1139 |
94
+ | 0.0019 | 44.0 | 836 | 0.1189 |
95
+ | 0.0012 | 45.0 | 855 | 0.1153 |
96
+ | 0.0017 | 46.0 | 874 | 0.1155 |
97
+ | 0.0019 | 47.0 | 893 | 0.1181 |
98
+ | 0.0013 | 48.0 | 912 | 0.1189 |
99
+ | 0.0012 | 49.0 | 931 | 0.1231 |
100
+ | 0.0011 | 50.0 | 950 | 0.1211 |
101
+ | 0.0021 | 51.0 | 969 | 0.1217 |
102
+ | 0.002 | 52.0 | 988 | 0.1235 |
103
+ | 0.0022 | 53.0 | 1007 | 0.1193 |
104
+ | 0.0022 | 54.0 | 1026 | 0.1185 |
105
+ | 0.002 | 55.0 | 1045 | 0.1230 |
106
+ | 0.0014 | 56.0 | 1064 | 0.1246 |
107
+ | 0.0012 | 57.0 | 1083 | 0.1249 |
108
+ | 0.0014 | 58.0 | 1102 | 0.1278 |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
110
 
111
  ### Framework versions
112
 
113
+ - Transformers 4.49.0
114
  - Pytorch 2.6.0+cu124
115
+ - Datasets 3.3.1
116
  - Tokenizers 0.21.0
config.json CHANGED
@@ -55,7 +55,7 @@
55
  }
56
  },
57
  "torch_dtype": "float32",
58
- "transformers_version": "4.48.1",
59
  "use_cache": true,
60
  "vocab_size": 32128
61
  }
 
55
  }
56
  },
57
  "torch_dtype": "float32",
58
+ "transformers_version": "4.49.0",
59
  "use_cache": true,
60
  "vocab_size": 32128
61
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "decoder_start_token_id": 0,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.48.1"
7
  }
 
3
  "decoder_start_token_id": 0,
4
  "eos_token_id": 1,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.49.0"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c12b792d04c7aa4254583efaf091df72b32fb25b0445996bea5b891eeb47ca28
3
  size 891644712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c911e4aa8deb946a9d2079d975a098ba3ad45b4213776f7a20065972a4efc351
3
  size 891644712