Dataset Viewer
Auto-converted to Parquet Duplicate
tokens
sequencelengths
6
502
labels
sequencelengths
6
502
mentions
listlengths
0
84
input_ids
sequencelengths
6
502
attention_mask
sequencelengths
6
502
[ "[CLS]", "▁Wikipedia", "[", "c", "]", "▁is", "▁a", "▁free", "▁content", "▁online", "▁encyclopedia", "▁written", "▁and", "▁maintained", "▁by", "▁a", "▁community", "▁of", "▁volunteers", "▁,", "▁known", "▁as", "▁Wikipedia", "ns", "▁,", "▁through", "▁open", "▁collab...
[ 0, 0, 0, 0, 0, 0, 0, 1, 2, 1, 2, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 2, 0, 0, 1, 2, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 7, "end_token_idx": 9, "identifier": "/wiki/Free_content", "tokens": [ "▁free", "▁content" ] }, { "start_token_idx": 9, "end_token_idx": 11, "identifier": "/wiki/Online_encyclopedia", "tokens": [ "▁online", "▁encyclopedia" ] ...
[ 1, 10320, 2550, 1207, 592, 269, 266, 484, 822, 535, 27881, 1223, 263, 4370, 293, 266, 648, 265, 4417, 366, 756, 283, 10320, 7565, 366, 390, 615, 3642, 263, 262, 14314, 1036, 103315, 323, 10320, 269, 262, 1705, 263, 370, 271, 8523, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁Documentation", "▁License", "▁at", "▁the", "▁urging", "▁of", "▁Richard", "▁Stall", "man", "▁.", "[", "W", "▁2", "]", "▁Wales", "▁is", "▁credited", "▁with", "▁defining", "▁the", "▁goal", "▁of", "▁making", "▁a", "▁publicly", "▁editable", "▁encyclopedia"...
[ 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 7, "end_token_idx": 10, "identifier": "/wiki/Richard_Stallman", "tokens": [ "▁Richard", "▁Stall", "man" ] }, { "start_token_idx": 43, "end_token_idx": 44, "identifier": "/wiki/Wiki", "tokens": [ "▁wiki" ] }, { "start_to...
[ 1, 28509, 10042, 288, 262, 14193, 265, 3155, 46570, 1246, 323, 2550, 1975, 392, 592, 5583, 269, 11653, 275, 9849, 262, 1238, 265, 570, 266, 5434, 39348, 27881, 261, 2550, 1975, 404, 592, 438, 62082, 269, 11653, 275, 262, 1808, 265, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁exclusivity", "▁and", "▁resistance", "▁to", "▁change", ".", "▁Others", "▁suggest", "▁that", "▁the", "▁growth", "▁is", "▁flattening", "▁naturally", "▁because", "▁articles", "▁that", "▁could", "▁be", "▁called", "▁\"", "low", "-", "hanging", "▁fruit", "\""...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 0, 1, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 52, "end_token_idx": 56, "identifier": "/wiki/Rey_Juan_Carlos_University", "tokens": [ "▁Rey", "▁Juan", "▁Carlos", "▁University" ] }, { "start_token_idx": 57, "end_token_idx": 58, "identifier": "/wiki/Madrid", "tokens": [ "▁M...
[ 1, 37663, 263, 3486, 264, 575, 260, 7321, 2379, 272, 262, 1074, 269, 62477, 4064, 401, 2502, 272, 387, 282, 650, 307, 7764, 271, 50462, 2879, 309, 644, 74625, 272, 2117, 11328, 299, 1030, 644, 8796, 637, 331, 994, 263, 1119, 322, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁participated", "▁in", "▁a", "▁series", "▁of", "▁coordinated", "▁protests", "▁against", "▁two", "▁proposed", "▁laws", "▁in", "▁the", "▁United", "▁States", "▁Congress", "▁—", "the", "▁Stop", "▁Online", "▁Piracy", "▁Act", "▁(", "S", "OPA", ")", "▁and", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 0, 1, 2, 2, 2, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 14, "end_token_idx": 17, "identifier": "/wiki/United_States_Congress", "tokens": [ "▁United", "▁States", "▁Congress" ] }, { "start_token_idx": 19, "end_token_idx": 23, "identifier": "/wiki/Stop_Online_Piracy_Act", "tokens": [ "▁Sto...
[ 1, 6258, 267, 266, 813, 265, 12089, 8056, 532, 375, 2640, 2326, 267, 262, 780, 1017, 2556, 533, 724, 6191, 2300, 99605, 1878, 287, 430, 59017, 285, 263, 262, 75082, 3989, 1878, 287, 80381, 558, 285, 644, 2319, 937, 510, 321, 359, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁17", "]", "▁The", "▁update", "▁initially", "▁received", "▁backlash", ",", "▁most", "▁notably", "▁when", "▁editors", "▁of", "▁the", "▁Swahili", "▁Wikipedia", "▁unanimously", "▁voted", "▁to", "▁revert", "▁the", "▁changes", ".", "▁Unlike", "▁traditional", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 15, "end_token_idx": 17, "identifier": "/wiki/Swahili_Wikipedia", "tokens": [ "▁Swahili", "▁Wikipedia" ] }, { "start_token_idx": 32, "end_token_idx": 33, "identifier": "/wiki/Procrastination", "tokens": [ "▁procrastination" ] }, ...
[ 1, 1154, 592, 279, 1981, 4114, 1018, 18905, 261, 370, 9962, 335, 9628, 265, 262, 72946, 10320, 19517, 4943, 264, 23024, 262, 1028, 260, 5492, 1471, 27881, 268, 261, 10320, 3832, 262, 44442, 4964, 1712, 262, 971, 265, 359, 822, 261, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁The", "▁most", "▁common", "▁and", "▁obvious", "▁types", "▁of", "▁vandalism", "▁include", "▁additions", "▁of", "▁obscenities", "▁and", "▁crude", "▁humor", ";", "▁it", "▁can", "▁also", "▁include", "▁advertising", "▁and", "▁other", "▁types", "▁of", "▁spam"...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 143, "end_token_idx": 147, "identifier": "/wiki/Seigenthaler_biography_incident", "tokens": [ "▁Seig", "enthaler", "▁biography", "▁incident" ] }, { "start_token_idx": 161, "end_token_idx": 164, "identifier": "/wiki/John_Seigenthaler", ...
[ 1, 279, 370, 1019, 263, 2991, 1361, 265, 29779, 680, 13219, 265, 103423, 263, 8976, 7155, 346, 278, 295, 327, 680, 2882, 263, 340, 1361, 265, 8962, 260, 2887, 9628, 5889, 29779, 293, 5290, 822, 289, 2945, 7368, 510, 266, 744, 664, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁Content", "▁in", "▁Wikipedia", "▁is", "▁subject", "▁to", "▁the", "▁laws", "▁(", "in", "▁particular", ",", "▁copyright", "▁laws", ")", "▁of", "▁the", "▁United", "▁States", "▁and", "▁of", "▁the", "▁US", "▁state", "▁of", "▁Virginia", "▁,", "▁where", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 1, 2...
[ { "start_token_idx": 13, "end_token_idx": 14, "identifier": "/wiki/Copyright", "tokens": [ "▁copyright" ] }, { "start_token_idx": 26, "end_token_idx": 27, "identifier": "/wiki/Virginia", "tokens": [ "▁Virginia" ] }, { "start_token_idx": 58, "end_to...
[ 1, 5825, 267, 10320, 269, 1284, 264, 262, 2326, 287, 547, 1070, 261, 5060, 2326, 285, 265, 262, 780, 1017, 263, 265, 262, 846, 565, 265, 3217, 366, 399, 262, 2045, 265, 10320, 280, 268, 5239, 281, 1137, 260, 2550, 1975, 1760, 592, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁not", "▁truth", "\"", "▁to", "▁express", "▁the", "▁idea", "▁that", "▁the", "▁readers", ",", "▁not", "▁the", "▁encyclopedia", ",", "▁are", "▁ultimately", "▁responsible", "▁for", "▁checking", "▁the", "▁truthfulness", "▁of", "▁the", "▁articles", "▁and", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 72, "end_token_idx": 73, "identifier": "/wiki/Anarchy", "tokens": [ "▁anarchy" ] }, { "start_token_idx": 74, "end_token_idx": 75, "identifier": "/wiki/Democracy", "tokens": [ "▁democratic" ] }, { "start_token_idx": 76, "end_tok...
[ 1, 298, 1985, 309, 264, 3614, 262, 781, 272, 262, 2611, 261, 298, 262, 27881, 261, 281, 3453, 1744, 270, 4155, 262, 75008, 265, 262, 2502, 263, 570, 308, 451, 17162, 260, 2550, 1975, 4249, 592, 329, 295, 288, 631, 917, 264, 262, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁to", "▁resolve", "▁disputes", "▁and", "▁make", "▁peace", "▁between", "▁conflicting", "▁editors", ",", "▁but", "▁to", "▁weed", "▁out", "▁problematic", "▁editors", "▁while", "▁allowing", "▁potentially", "▁productive", "▁editors", "▁back", "▁in", "▁to", "▁pa...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 120, "end_token_idx": 121, "identifier": "/wiki/Impersonator", "tokens": [ "▁impersonation" ] }, { "start_token_idx": 122, "end_token_idx": 126, "identifier": "/wiki/Anti-social_behavior", "tokens": [ "▁anti", "-", "social", ...
[ 1, 264, 5148, 10336, 263, 365, 2293, 457, 18480, 9628, 261, 304, 264, 11825, 321, 11057, 9628, 438, 2409, 3695, 5769, 9628, 396, 267, 264, 3000, 260, 3089, 261, 262, 3066, 490, 298, 18634, 262, 822, 265, 2502, 261, 1616, 278, 1359, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁codes", ",", "▁submit", "▁to", "▁a", "▁sometimes", "▁convoluted", "▁dispute", "▁resolution", "▁process", ",", "▁and", "▁learn", "▁a", "▁\"", "b", "aff", "ling", "▁culture", "▁rich", "▁with", "▁in", "-", "joke", "s", "▁and", "▁insider", "▁references"...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 42, "end_token_idx": 46, "identifier": "/wiki/Second-class_citizen", "tokens": [ "▁second", "-", "class", "▁citizens" ] }, { "start_token_idx": 98, "end_token_idx": 100, "identifier": "/wiki/IP_address", "tokens": [ "▁IP", ...
[ 1, 4321, 261, 3361, 264, 266, 1359, 38476, 6544, 2946, 568, 261, 263, 799, 266, 307, 1285, 24947, 4118, 1551, 2241, 275, 267, 271, 67204, 268, 263, 14209, 5754, 309, 260, 30297, 328, 333, 298, 3458, 267, 281, 267, 347, 1100, 307, 56...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁2024", ")", "[", "W", "▁44", "]", "▁There", "▁are", "▁currently", "▁330", "▁language", "▁editions", "▁of", "▁Wikipedia", "▁(", "also", "▁called", "▁language", "▁versions", ",", "▁or", "▁simply", "▁Wikipedia", "s", ")", ".", "▁As", "▁of", "▁June", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 2, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 44, "end_token_idx": 45, "identifier": "/wiki/English_Wikipedia", "tokens": [ "▁English" ] }, { "start_token_idx": 46, "end_token_idx": 48, "identifier": "/wiki/Cebuano_Wikipedia", "tokens": [ "▁Cebu", "ano" ] }, { "start_tok...
[ 1, 29811, 285, 2550, 1975, 4808, 592, 443, 281, 1049, 18061, 1402, 14299, 265, 10320, 287, 8898, 650, 1402, 3687, 261, 289, 891, 10320, 268, 285, 260, 463, 265, 1172, 29811, 261, 262, 1073, 1705, 261, 267, 556, 265, 1030, 2795, 261, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", ",", "▁Meta", "-", "Wiki", "▁provides", "▁important", "▁statistics", "▁on", "▁all", "▁language", "▁editions", "▁of", "▁Wikipedia", ",", "[", "W", "▁53", "]", "▁and", "▁it", "▁maintains", "▁a", "▁list", "▁of", "▁articles", "▁every", "▁Wikipedia", "▁sho...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 180, "end_token_idx": 182, "identifier": "/wiki/PLOS_One", "tokens": [ "▁PLOS", "▁One" ] }, { "start_token_idx": 227, "end_token_idx": 230, "identifier": "/wiki/Simple_English_Wikipedia", "tokens": [ "▁Simple", "▁English", "▁...
[ 1, 261, 14468, 271, 44768, 888, 539, 4644, 277, 305, 1402, 14299, 265, 10320, 261, 2550, 1975, 6433, 592, 263, 278, 9087, 266, 686, 265, 2502, 469, 10320, 403, 286, 260, 2550, 1975, 6070, 592, 279, 686, 2372, 1671, 822, 293, 1284, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁which", "▁includes", "▁more", "▁than", "▁fifty", "▁policies", "▁and", "▁nearly", "▁150", ",", "000", "▁words", "▁as", "▁of", "▁2014", ".", "[", "update", "]", "▁Critics", "▁have", "▁stated", "▁that", "▁Wikipedia", "▁exhibits", "▁systemic", "▁bias", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 0...
[ { "start_token_idx": 26, "end_token_idx": 28, "identifier": "/wiki/Systemic_bias", "tokens": [ "▁systemic", "▁bias" ] }, { "start_token_idx": 35, "end_token_idx": 37, "identifier": "/wiki/Edwin_Black", "tokens": [ "▁Edwin", "▁Black" ] }, { ...
[ 1, 319, 1006, 310, 354, 8644, 2294, 263, 1533, 3732, 261, 528, 1023, 283, 265, 1151, 260, 2550, 28860, 592, 20021, 286, 2715, 272, 10320, 10787, 13488, 7958, 323, 344, 1524, 261, 18246, 263, 7228, 22053, 1552, 1897, 10320, 283, 411, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁entries", "▁on", "▁both", "▁Wikipedia", "▁and", "▁En", "cyclo", "p", "æ", "dia", "▁Britannica", "▁by", "▁the", "▁science", "▁journal", "▁Nature", "▁found", "▁few", "▁differences", "▁in", "▁accuracy", ",", "▁and", "▁concluded", "▁that", "▁\"", "the", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 190, "end_token_idx": 192, "identifier": "/wiki/Confidence_interval", "tokens": [ "▁confidence", "▁intervals" ] }, { "start_token_idx": 212, "end_token_idx": 214, "identifier": "/wiki/Sample_size_determination", "tokens": [ "▁sample", ...
[ 1, 6022, 277, 462, 10320, 263, 6058, 45374, 1492, 21986, 13526, 56673, 293, 262, 1693, 4881, 5447, 505, 477, 3409, 267, 4815, 261, 263, 5988, 272, 307, 724, 1210, 1693, 1649, 267, 10320, 4196, 441, 654, 38816, 346, 56673, 261, 314, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁to", "▁be", "▁accurate", "▁.", ".", ".", "▁And", "▁yet", "▁it", "▁[", "is", "]", ".", "\"", "▁Man", "nix", "▁further", "▁discussed", "▁the", "▁multiple", "▁studies", "▁that", "▁have", "▁proved", "▁Wikipedia", "▁to", "▁be", "▁generally", "▁as", "...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 108, "end_token_idx": 110, "identifier": "/wiki/Reference_work", "tokens": [ "▁reference", "▁works" ] }, { "start_token_idx": 125, "end_token_idx": 126, "identifier": "/wiki/Utility", "tokens": [ "▁utility" ] }, { "start_toke...
[ 1, 264, 282, 3027, 323, 260, 260, 414, 729, 278, 647, 1890, 592, 260, 309, 2347, 37456, 839, 3232, 262, 1337, 1703, 272, 286, 4776, 10320, 264, 282, 1861, 283, 2899, 283, 21023, 56673, 261, 48650, 272, 261, 307, 260, 260, 260, 41922...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "s", "▁sake", ",", "▁you", "'", "re", "▁in", "▁college", ";", "▁don", "'", "t", "▁cite", "▁the", "▁encyclopedia", "\"", ",", "▁he", "▁said", ".", "▁In", "▁February", "▁2007", ",", "▁an", "▁article", "▁in", "▁The", "▁Harvard", "▁Crimson", "▁newsp...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2...
[ { "start_token_idx": 28, "end_token_idx": 31, "identifier": "/wiki/The_Harvard_Crimson", "tokens": [ "▁The", "▁Harvard", "▁Crimson" ] }, { "start_token_idx": 40, "end_token_idx": 42, "identifier": "/wiki/Harvard_University", "tokens": [ "▁Harvard", ...
[ 1, 268, 6798, 261, 274, 280, 368, 267, 1575, 346, 418, 280, 297, 17316, 262, 27881, 309, 261, 313, 357, 260, 344, 1555, 2097, 261, 299, 1030, 267, 279, 7231, 25766, 4308, 1411, 272, 266, 477, 265, 262, 12997, 288, 7231, 689, 332, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
[ "[CLS]", "▁'", "featured", "'", ".", "▁Heil", "man", ",", "▁who", "▁has", "▁participated", "▁in", "▁that", "▁process", "▁before", ",", "▁says", "▁'", "less", "▁than", "▁one", "▁percent", "'", "▁of", "▁Wikipedia", "'", "s", "▁medical", "▁articles", "▁have", ...
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0...
[ { "start_token_idx": 64, "end_token_idx": 65, "identifier": "/wiki/Byte#Multiple-byte_units", "tokens": [ "▁terabytes" ] }, { "start_token_idx": 66, "end_token_idx": 68, "identifier": "/wiki/Disk_space", "tokens": [ "▁disk", "▁space" ] }, { "star...
[ 1, 382, 42570, 280, 260, 49513, 1246, 261, 328, 303, 6258, 267, 272, 568, 416, 261, 652, 382, 2691, 354, 311, 864, 280, 265, 10320, 280, 268, 1159, 2502, 286, 1833, 260, 309, 10320, 7017, 264, 676, 266, 5310, 265, 305, 857, 1118, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
End of preview. Expand in Data Studio

Dataset Card for "mention-detection-deberta-v3-base"

More Information needed

Downloads last month
4