FineWiki-mds-tokenized / overall_tokenization_stats.json
QuangDuy's picture
Tokenized dataset from QuangDuy/FineWiki-mds
fcd3c37 verified
raw
history blame contribute delete
429 Bytes
{
"total_tokens": 515516183,
"total_samples": 1279087,
"subset_stats": {
"000_00000": {
"num_tokens": 261945211,
"num_samples": 644603,
"skipped": false,
"output_samples": 644603,
"output_size": 269387889
},
"000_00001": {
"num_tokens": 253570972,
"num_samples": 634484,
"skipped": false,
"output_samples": 634484,
"output_size": 254216062
}
}
}