Dataset Viewer
Auto-converted to Parquet Duplicate
dataset
stringlengths
4
115
config
stringlengths
1
121
split
stringlengths
1
228
num_examples
int64
3
341M
statistics_string_text
listlengths
1
16.4k
partial
bool
2 classes
dair-ai/emotion
split
train
16,000
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 7, 37, 67, 97, 127, 157, 187, 217, 247, 277, 300 ], "hist": [ 1833, 3789, ...
false
dair-ai/emotion
split
validation
2,000
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 11, 40, 69, 98, 127, 156, 185, 214, 243, 272, 295 ], "hist": [ 293, 473, 4...
false
dair-ai/emotion
split
test
2,000
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 14, 43, 72, 101, 130, 159, 188, 217, 246, 275, 296 ], "hist": [ 346, 466, ...
false
dair-ai/emotion
unsplit
train
416,809
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 2, 85, 168, 251, 334, 417, 500, 583, 666, 749, 830 ], "hist": [ 204631, 162639, ...
false
allenai/qasper
qasper
train
888
[ { "name": "abstract", "statistics": { "histogram": { "bin_edges": [ 279, 454, 629, 804, 979, 1154, 1329, 1504, 1679, 1854, 2022 ], "hist": [ 16, 79,...
false
allenai/qasper
qasper
validation
281
[ { "name": "abstract", "statistics": { "histogram": { "bin_edges": [ 338, 492, 646, 800, 954, 1108, 1262, 1416, 1570, 1724, 1868 ], "hist": [ 9, 31, ...
false
allenai/qasper
qasper
test
416
[ { "name": "abstract", "statistics": { "histogram": { "bin_edges": [ 252, 418, 584, 750, 916, 1082, 1248, 1414, 1580, 1746, 1909 ], "hist": [ 16, 26,...
false
llm-book/wrime-sentiment
default
test
1,781
[ { "name": "datetime", "statistics": { "histogram": { "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 82, 546, 856, 297 ] }, "max": 16, "mean": 14.76811, "med...
false
llm-book/wrime-sentiment
default
train
20,149
[ { "name": "datetime", "statistics": { "histogram": { "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 1201, 6486, 9866, 2596 ] }, "max": 16, "mean": 14.68773, ...
false
llm-book/wrime-sentiment
default
validation
1,608
[ { "name": "datetime", "statistics": { "histogram": { "bin_edges": [ 13, 14, 15, 16, 16 ], "hist": [ 140, 546, 735, 187 ] }, "max": 16, "mean": 14.60261, "me...
false
ttxy/resume_ner
default
test
477
[ { "name": "label", "statistics": { "histogram": { "bin_edges": [ 9, 112, 215, 318, 421, 524, 627, 730, 833, 936, 1035 ], "hist": [ 222, 156, ...
false
ttxy/resume_ner
default
train
3,821
[ { "name": "label", "statistics": { "histogram": { "bin_edges": [ 7, 122, 237, 352, 467, 582, 697, 812, 927, 1042, 1149 ], "hist": [ 1940, 1173, ...
false
ttxy/resume_ner
default
validation
463
[ { "name": "label", "statistics": { "histogram": { "bin_edges": [ 9, 95, 181, 267, 353, 439, 525, 611, 697, 783, 859 ], "hist": [ 180, 184, ...
false
fedryanto/UnibQuADV2
plain_text
train
4,010
[ { "name": "context", "statistics": { "histogram": { "bin_edges": [ 65, 444, 823, 1202, 1581, 1960, 2339, 2718, 3097, 3476, 3845 ], "hist": [ 797, 11...
false
fedryanto/UnibQuADV2
plain_text
validation
1,036
[ { "name": "context", "statistics": { "histogram": { "bin_edges": [ 73, 485, 897, 1309, 1721, 2133, 2545, 2957, 3369, 3781, 4188 ], "hist": [ 382, 32...
false
redwoodresearch/generated_stories
default
train
3,825
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 3130, 3489, 3848, 4207, 4566, 4925, 5284, 5643, 6002, 6361, 6715 ], "hist": [ 32, 16...
false
redwoodresearch/generated_stories
default
validation
675
[ { "name": "text", "statistics": { "histogram": { "bin_edges": [ 2838, 3220, 3602, 3984, 4366, 4748, 5130, 5512, 5894, 6276, 6655 ], "hist": [ 1, 8, ...
false
ceval/ceval-exam
accountant
test
443
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 110 ], "hist": [ 212, 96, 77, ...
false
ceval/ceval-exam
accountant
val
49
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 78 ], "hist": [ 26, 5, 6, 5, ...
false
ceval/ceval-exam
advanced_mathematics
test
173
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 23, 45, 67, 89, 111, 133, 155, 177, 199, 215 ], "hist": [ 109, 23, 17, ...
false
ceval/ceval-exam
art_studies
test
298
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 204, 86, 6, 1, 0, ...
false
ceval/ceval-exam
art_studies
val
33
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 2, 3, 4, 5, 6, 7, 8, 8 ], "hist": [ 1, 9, 7, 5, 4, 4, ...
false
ceval/ceval-exam
basic_medicine
test
175
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 29 ], "hist": [ 36, 79, 23, 13...
false
ceval/ceval-exam
business_administration
test
301
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 180, 66, 21, 17, ...
false
ceval/ceval-exam
business_administration
val
33
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 46 ], "hist": [ 16, 9, 3, 2, ...
false
ceval/ceval-exam
chinese_language_and_literature
test
209
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 24 ], "hist": [ 105, 55, 15, 11, 10, 8...
false
ceval/ceval-exam
civil_servant
test
429
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 82, 85 ], "hist": [ 217, 102, 59, ...
false
ceval/ceval-exam
civil_servant
val
47
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 11, 10, 10, 6, 5, 2, ...
false
ceval/ceval-exam
clinical_medicine
test
200
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 62, 77, 28, 13, 10, 3,...
false
ceval/ceval-exam
college_economics
test
497
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 46, 49 ], "hist": [ 156, 149, 99, ...
false
ceval/ceval-exam
college_economics
val
55
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 6, 11, 16, 21, 26, 31, 36, 41, 44 ], "hist": [ 15, 20, 8, 4, 2, ...
false
ceval/ceval-exam
college_chemistry
test
224
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 12, 23, 34, 45, 56, 67, 78, 89, 100, 107 ], "hist": [ 139, 53, 21, ...
false
ceval/ceval-exam
college_programming
test
342
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 64 ], "hist": [ 161, 77, 45, ...
false
ceval/ceval-exam
college_programming
val
37
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 57 ], "hist": [ 16, 10, 3, 4,...
false
ceval/ceval-exam
college_physics
test
176
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 73, 76 ], "hist": [ 77, 56, 16, 1...
false
ceval/ceval-exam
computer_network
test
171
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 81, 49, 20, 9, 4, ...
false
ceval/ceval-exam
computer_architecture
test
193
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 35 ], "hist": [ 67, 51, 28, 19, 10...
false
ceval/ceval-exam
education_science
test
270
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 8, 14, 20, 26, 32, 38, 44, 50, 52 ], "hist": [ 207, 36, 18, 7, 0...
false
ceval/ceval-exam
discrete_mathematics
test
153
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 13, 25, 37, 49, 61, 73, 85, 97, 109, 112 ], "hist": [ 88, 19, 21, ...
false
ceval/ceval-exam
electrical_engineer
test
339
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 236, 68, 12, 9, 6...
false
ceval/ceval-exam
electrical_engineer
val
37
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 25 ], "hist": [ 10, 16, 5, 2, 2, ...
false
ceval/ceval-exam
fire_engineer
test
282
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 66 ], "hist": [ 170, 28, 30, ...
false
ceval/ceval-exam
fire_engineer
val
31
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 9, 17, 25, 33, 41, 49, 57, 65, 71 ], "hist": [ 17, 5, 7, 0, 1, ...
false
ceval/ceval-exam
high_school_chinese
test
178
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 13, 24, 35, 46, 57, 68, 79, 90, 101, 110 ], "hist": [ 28, 14, 19, ...
false
ceval/ceval-exam
high_school_biology
test
175
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 7, 12, 17, 22, 27, 32, 37, 42, 47, 49 ], "hist": [ 60, 31, 27, 1...
false
ceval/ceval-exam
environmental_impact_assessment_engineer
test
281
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 55 ], "hist": [ 150, 62, 34, ...
false
ceval/ceval-exam
environmental_impact_assessment_engineer
val
31
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 8, 10, 6, 0, ...
false
ceval/ceval-exam
high_school_geography
test
178
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 31 ], "hist": [ 54, 48, 31, 22, 15, 2,...
false
ceval/ceval-exam
high_school_history
test
182
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 50, 55, 57, 15, 3, 0,...
false
ceval/ceval-exam
high_school_chemistry
test
172
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 62 ], "hist": [ 63, 40, 33, 19, 5...
false
ceval/ceval-exam
high_school_mathematics
test
166
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 60 ], "hist": [ 72, 27, 27, 1...
false
ceval/ceval-exam
high_school_physics
test
175
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 64, 67 ], "hist": [ 48, 44, 40, 2...
false
ceval/ceval-exam
high_school_politics
test
176
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 116, 30, 18, 8, 3...
false
ceval/ceval-exam
ideological_and_moral_cultivation
test
172
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 30 ], "hist": [ 21, 63, 35, 20...
false
ceval/ceval-exam
law
test
221
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 66, 131, 196, 261, 326, 391, 456, 521, 586, 643 ], "hist": [ 219, 1, 0, ...
false
ceval/ceval-exam
legal_professional
test
215
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 15, 28, 41, 54, 67, 80, 93, 106, 119, 130 ], "hist": [ 46, 87, 47, ...
false
ceval/ceval-exam
logic
test
204
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 3, 12, 21, 30, 39, 48, 57, 66, 75, 84, 90 ], "hist": [ 36, 61, 55, ...
false
ceval/ceval-exam
mao_zedong_thought
test
219
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 53, 66, 41, 28, 19...
false
ceval/ceval-exam
marxism
test
179
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 27 ], "hist": [ 19, 35, 62, 31, 15...
false
ceval/ceval-exam
metrology_engineer
test
219
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 59 ], "hist": [ 132, 49, 16, ...
false
ceval/ceval-exam
middle_school_biology
test
192
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 32 ], "hist": [ 70, 37, 21, 19, 25, 12...
false
ceval/ceval-exam
middle_school_chemistry
test
185
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 59 ], "hist": [ 71, 52, 33, 1...
false
ceval/ceval-exam
middle_school_geography
test
108
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 29, 31 ], "hist": [ 44, 19, 13, 14...
false
ceval/ceval-exam
middle_school_history
test
207
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 26 ], "hist": [ 68, 67, 37, 14, 13...
false
ceval/ceval-exam
middle_school_mathematics
test
177
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 61 ], "hist": [ 95, 42, 21, 5, 11...
false
ceval/ceval-exam
middle_school_politics
test
193
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 26 ], "hist": [ 101, 16, 18, 25, 1...
false
ceval/ceval-exam
middle_school_physics
test
178
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 36 ], "hist": [ 44, 20, 33, 33, 28...
false
ceval/ceval-exam
modern_chinese_history
test
212
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 34, 38, 38 ], "hist": [ 57, 64, 48, 2...
false
ceval/ceval-exam
operating_system
test
179
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 33 ], "hist": [ 88, 51, 16, 16, 3,...
false
ceval/ceval-exam
physician
test
443
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 53 ], "hist": [ 259, 120, 51, 7, ...
false
ceval/ceval-exam
physician
val
49
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 4, 6, 8, 10, 12, 14, 16, 18, 19 ], "hist": [ 10, 14, 9, 3, 2, ...
false
ceval/ceval-exam
plant_protection
test
199
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 22, 25, 27 ], "hist": [ 86, 74, 19, 12, 6,...
false
ceval/ceval-exam
probability_and_statistics
test
166
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 22, 43, 64, 85, 106, 127, 148, 169, 190, 206 ], "hist": [ 101, 34, 10, ...
false
ceval/ceval-exam
sports_science
test
180
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 5, 8, 11, 14, 17, 20, 23, 26, 28 ], "hist": [ 69, 56, 26, 12, 7,...
false
ceval/ceval-exam
professional_tour_guide
test
266
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 4, 7, 10, 13, 16, 19, 21 ], "hist": [ 139, 95, 14, 10, 6, 0, 2 ...
false
ceval/ceval-exam
tax_accountant
test
443
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 10, 19, 28, 37, 46, 55, 64, 73, 81 ], "hist": [ 219, 92, 57, 36, ...
false
ceval/ceval-exam
tax_accountant
val
49
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 58 ], "hist": [ 13, 14, 7, 2,...
false
ceval/ceval-exam
teacher_qualification
test
399
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 38 ], "hist": [ 206, 101, 35, ...
false
ceval/ceval-exam
teacher_qualification
val
44
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 2, 6, 10, 14, 18, 22, 26, 30, 32 ], "hist": [ 25, 7, 5, 3, 3, 0, ...
false
ceval/ceval-exam
urban_and_rural_planner
test
418
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 56 ], "hist": [ 194, 109, 44, ...
false
ceval/ceval-exam
urban_and_rural_planner
val
46
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 8, 15, 22, 29, 36, 43, 50, 57, 63 ], "hist": [ 21, 14, 4, 0, 2, ...
false
ceval/ceval-exam
veterinary_medicine
test
210
[ { "name": "A", "statistics": { "histogram": { "bin_edges": [ 1, 5, 9, 13, 17, 21, 25, 29, 33, 37, 39 ], "hist": [ 92, 53, 26, 23...
false
THUDM/LongBench
2wikimqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
dureader
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
gov_report
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
hotpotqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
lcc
test
500
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 500 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
lsht
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
multifieldqa_en
test
150
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 150 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
multifieldqa_zh
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
musique
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
narrativeqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
passage_count
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
passage_retrieval_en
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
passage_retrieval_zh
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
qasper
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
qmsum
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
repobench-p
test
500
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 500 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
trec
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
THUDM/LongBench
triviaqa
test
200
[ { "name": "_id", "statistics": { "histogram": { "bin_edges": [ 48, 48 ], "hist": [ 200 ] }, "max": 48, "mean": 48, "median": 48, "min": 48, "nan_count": 0, "nan_proportion": 0, "std": 0 } ...
false
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
5