FineWiki-mds-tokenized-v2 / overall_tokenization_stats.json
QuangDuy's picture
Add files using upload-large-folder tool
435031b verified
raw
history blame contribute delete
500 Bytes
{
"total_tokens": 444572178,
"total_samples": 1279087,
"subset_stats": {
"000_00000": {
"num_tokens": 226189224,
"num_samples": 644603,
"skipped": false,
"resumed_from": 0,
"output_samples": 644603,
"output_size": 254760108
},
"000_00001": {
"num_tokens": 218382954,
"num_samples": 634484,
"skipped": false,
"resumed_from": 0,
"output_samples": 634484,
"output_size": 240509945
}
}
}