sileod commited on
Commit
1372f06
·
1 Parent(s): 05057af

Create linguisticprobing.py

Browse files
Files changed (1) hide show
  1. linguisticprobing.py +1248 -0
linguisticprobing.py ADDED
@@ -0,0 +1,1248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding=utf-8
2
+ # Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Lint as: python3
17
+ """Linguistic Probing Benchmark from SentEval"""
18
+
19
+ from __future__ import absolute_import, division, print_function
20
+
21
+ import csv
22
+ import os
23
+ import textwrap
24
+
25
+ import six
26
+
27
+ import datasets
28
+
29
+
30
+ _Linguisticprobing_CITATION = r"""@inproceedings{conneau-etal-2018-cram,
31
+ title = "What you can cram into a single {\$}{\&}!{\#}* vector: Probing sentence embeddings for linguistic properties",
32
+ author = {Conneau, Alexis and
33
+ Kruszewski, German and
34
+ Lample, Guillaume and
35
+ Barrault, Lo{\"\i}c and
36
+ Baroni, Marco},
37
+ booktitle = "Proceedings of the 56th Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)",
38
+ month = jul,
39
+ year = "2018",
40
+ address = "Melbourne, Australia",
41
+ publisher = "Association for Computational Linguistics",
42
+ url = "https://www.aclweb.org/anthology/P18-1198",
43
+ doi = "10.18653/v1/P18-1198",
44
+ pages = "2126--2136",
45
+ abstract = "Although much effort has recently been devoted to training high-quality sentence embeddings, we still have a poor understanding of what they are capturing. {``}Downstream{''} tasks, often based on sentence classification, are commonly used to evaluate the quality of sentence representations. The complexity of the tasks makes it however difficult to infer what kind of information is present in the representations. We introduce here 10 probing tasks designed to capture simple linguistic features of sentences, and we use them to study embeddings generated by three different encoders trained in eight distinct ways, uncovering intriguing properties of both encoders and training methods.",
46
+ }
47
+ """
48
+
49
+ _Linguisticprobing_DESCRIPTION = """\
50
+ 10 probing tasks designed to capture simple linguistic features of sentences,
51
+ """
52
+
53
+ DATA_URL = "https://www.dropbox.com/s/djsk4kbu8in66gp/linguisticprobing.zip?dl=1"
54
+
55
+ TASK_TO_LABELS = {
56
+ "subj_number": ["NN", "NNS"],
57
+ "word_content": [
58
+ "abandoned",
59
+ "abruptly",
60
+ "accent",
61
+ "access",
62
+ "according",
63
+ "account",
64
+ "ache",
65
+ "ached",
66
+ "acted",
67
+ "acting",
68
+ "actions",
69
+ "actual",
70
+ "address",
71
+ "advantage",
72
+ "advice",
73
+ "afford",
74
+ "agent",
75
+ "agreement",
76
+ "aiden",
77
+ "airport",
78
+ "alarm",
79
+ "albert",
80
+ "alert",
81
+ "alexander",
82
+ "alice",
83
+ "alley",
84
+ "allowing",
85
+ "aloud",
86
+ "amanda",
87
+ "amber",
88
+ "american",
89
+ "amused",
90
+ "amusement",
91
+ "ancient",
92
+ "andy",
93
+ "annie",
94
+ "announced",
95
+ "annoyed",
96
+ "answering",
97
+ "anticipation",
98
+ "anxious",
99
+ "anybody",
100
+ "apologize",
101
+ "appearance",
102
+ "appears",
103
+ "approach",
104
+ "approaching",
105
+ "arched",
106
+ "argue",
107
+ "argument",
108
+ "aria",
109
+ "arrive",
110
+ "ashe",
111
+ "assume",
112
+ "assured",
113
+ "attached",
114
+ "attacked",
115
+ "attempted",
116
+ "audience",
117
+ "available",
118
+ "awful",
119
+ "awkward",
120
+ "background",
121
+ "backpack",
122
+ "backward",
123
+ "backwards",
124
+ "bags",
125
+ "balance",
126
+ "bank",
127
+ "barn",
128
+ "bars",
129
+ "bastard",
130
+ "bath",
131
+ "beating",
132
+ "begged",
133
+ "begins",
134
+ "begun",
135
+ "behavior",
136
+ "bell",
137
+ "belly",
138
+ "belong",
139
+ "belonged",
140
+ "bench",
141
+ "biggest",
142
+ "bike",
143
+ "billy",
144
+ "bird",
145
+ "birds",
146
+ "birth",
147
+ "birthday",
148
+ "bitch",
149
+ "bitter",
150
+ "blank",
151
+ "blast",
152
+ "bleeding",
153
+ "blind",
154
+ "blocked",
155
+ "blond",
156
+ "blonde",
157
+ "bond",
158
+ "bone",
159
+ "bored",
160
+ "bothered",
161
+ "bound",
162
+ "bowed",
163
+ "bowl",
164
+ "boxes",
165
+ "branches",
166
+ "brave",
167
+ "bread",
168
+ "breast",
169
+ "breaths",
170
+ "brian",
171
+ "brick",
172
+ "brilliant",
173
+ "broad",
174
+ "brows",
175
+ "brush",
176
+ "brushing",
177
+ "buddy",
178
+ "build",
179
+ "buildings",
180
+ "bullet",
181
+ "bunch",
182
+ "butt",
183
+ "cage",
184
+ "cain",
185
+ "caleb",
186
+ "callum",
187
+ "calmly",
188
+ "cameron",
189
+ "capable",
190
+ "cards",
191
+ "career",
192
+ "carol",
193
+ "caroline",
194
+ "carriage",
195
+ "cash",
196
+ "cassidy",
197
+ "casual",
198
+ "catching",
199
+ "causing",
200
+ "centuries",
201
+ "chain",
202
+ "chairs",
203
+ "challenge",
204
+ "chamber",
205
+ "chances",
206
+ "changes",
207
+ "changing",
208
+ "charged",
209
+ "checking",
210
+ "chicken",
211
+ "chief",
212
+ "chill",
213
+ "chloe",
214
+ "chocolate",
215
+ "choked",
216
+ "chosen",
217
+ "christian",
218
+ "chuckle",
219
+ "cigarette",
220
+ "circles",
221
+ "circumstances",
222
+ "claim",
223
+ "claimed",
224
+ "clary",
225
+ "classes",
226
+ "claws",
227
+ "clay",
228
+ "cleaned",
229
+ "cleaning",
230
+ "clearing",
231
+ "clicked",
232
+ "cliff",
233
+ "climb",
234
+ "climbing",
235
+ "closely",
236
+ "closest",
237
+ "cloth",
238
+ "clothing",
239
+ "cloud",
240
+ "clue",
241
+ "clung",
242
+ "clutched",
243
+ "coach",
244
+ "cocked",
245
+ "code",
246
+ "colin",
247
+ "collapsed",
248
+ "collar",
249
+ "colors",
250
+ "commander",
251
+ "comment",
252
+ "complicated",
253
+ "concrete",
254
+ "condition",
255
+ "confidence",
256
+ "confident",
257
+ "confirmed",
258
+ "connected",
259
+ "connor",
260
+ "convince",
261
+ "convinced",
262
+ "cook",
263
+ "cops",
264
+ "copy",
265
+ "corners",
266
+ "correct",
267
+ "cost",
268
+ "courage",
269
+ "cousin",
270
+ "covering",
271
+ "covers",
272
+ "crack",
273
+ "cracked",
274
+ "crap",
275
+ "crash",
276
+ "crashed",
277
+ "crawled",
278
+ "cream",
279
+ "create",
280
+ "credit",
281
+ "crept",
282
+ "crime",
283
+ "crossing",
284
+ "crowded",
285
+ "cruel",
286
+ "crystal",
287
+ "cupped",
288
+ "curiosity",
289
+ "current",
290
+ "curse",
291
+ "cursed",
292
+ "cute",
293
+ "cutting",
294
+ "daemon",
295
+ "dagger",
296
+ "damned",
297
+ "damp",
298
+ "danced",
299
+ "danny",
300
+ "dante",
301
+ "darkened",
302
+ "darling",
303
+ "darted",
304
+ "dating",
305
+ "dave",
306
+ "dawn",
307
+ "dealing",
308
+ "delicate",
309
+ "delicious",
310
+ "della",
311
+ "demons",
312
+ "deny",
313
+ "department",
314
+ "desert",
315
+ "deserved",
316
+ "destroy",
317
+ "destroyed",
318
+ "detail",
319
+ "detective",
320
+ "devil",
321
+ "devon",
322
+ "digging",
323
+ "dining",
324
+ "direct",
325
+ "directions",
326
+ "disappear",
327
+ "disappointed",
328
+ "disappointment",
329
+ "disbelief",
330
+ "discuss",
331
+ "disgust",
332
+ "display",
333
+ "distant",
334
+ "distracted",
335
+ "dogs",
336
+ "dollars",
337
+ "double",
338
+ "doubted",
339
+ "dozen",
340
+ "drag",
341
+ "dragging",
342
+ "drake",
343
+ "drank",
344
+ "drawer",
345
+ "drinks",
346
+ "driveway",
347
+ "dropping",
348
+ "drops",
349
+ "drug",
350
+ "drugs",
351
+ "ducked",
352
+ "duke",
353
+ "dull",
354
+ "duncan",
355
+ "duty",
356
+ "dylan",
357
+ "eager",
358
+ "eased",
359
+ "east",
360
+ "eaten",
361
+ "echo",
362
+ "echoed",
363
+ "eddie",
364
+ "edges",
365
+ "elbow",
366
+ "elena",
367
+ "ellie",
368
+ "embarrassed",
369
+ "embrace",
370
+ "emerged",
371
+ "emergency",
372
+ "emotional",
373
+ "ends",
374
+ "enemies",
375
+ "enormous",
376
+ "envelope",
377
+ "equipment",
378
+ "evan",
379
+ "event",
380
+ "everybody",
381
+ "exact",
382
+ "exchange",
383
+ "exchanged",
384
+ "exclaimed",
385
+ "exhausted",
386
+ "exist",
387
+ "existed",
388
+ "existence",
389
+ "exit",
390
+ "expensive",
391
+ "experienced",
392
+ "explanation",
393
+ "exploded",
394
+ "exposed",
395
+ "extended",
396
+ "extremely",
397
+ "eyed",
398
+ "fabric",
399
+ "fade",
400
+ "fairly",
401
+ "faith",
402
+ "fake",
403
+ "falls",
404
+ "families",
405
+ "fangs",
406
+ "farm",
407
+ "farther",
408
+ "favor",
409
+ "feared",
410
+ "feed",
411
+ "fellow",
412
+ "fence",
413
+ "fierce",
414
+ "fifty",
415
+ "file",
416
+ "filling",
417
+ "fingertips",
418
+ "finn",
419
+ "fired",
420
+ "fixed",
421
+ "flame",
422
+ "flashlight",
423
+ "flicked",
424
+ "floating",
425
+ "flushed",
426
+ "foolish",
427
+ "football",
428
+ "footsteps",
429
+ "forcing",
430
+ "formed",
431
+ "frame",
432
+ "freedom",
433
+ "french",
434
+ "friday",
435
+ "friendly",
436
+ "frightened",
437
+ "frown",
438
+ "frowning",
439
+ "froze",
440
+ "frozen",
441
+ "frustrated",
442
+ "fuck",
443
+ "fucking",
444
+ "furious",
445
+ "furniture",
446
+ "fury",
447
+ "gabe",
448
+ "games",
449
+ "garage",
450
+ "garrett",
451
+ "gary",
452
+ "gasp",
453
+ "gates",
454
+ "gather",
455
+ "gathering",
456
+ "gavin",
457
+ "gear",
458
+ "gesture",
459
+ "gestured",
460
+ "ghost",
461
+ "girlfriend",
462
+ "glances",
463
+ "glare",
464
+ "glowing",
465
+ "goodbye",
466
+ "gorgeous",
467
+ "government",
468
+ "gown",
469
+ "grabbing",
470
+ "graham",
471
+ "grand",
472
+ "grandfather",
473
+ "grandma",
474
+ "grant",
475
+ "grasp",
476
+ "grave",
477
+ "greeted",
478
+ "grief",
479
+ "grinning",
480
+ "groan",
481
+ "growl",
482
+ "guessed",
483
+ "guest",
484
+ "guests",
485
+ "guns",
486
+ "gwen",
487
+ "halfway",
488
+ "hank",
489
+ "hanna",
490
+ "happily",
491
+ "happiness",
492
+ "harry",
493
+ "harsh",
494
+ "heal",
495
+ "heartbeat",
496
+ "heaven",
497
+ "height",
498
+ "hero",
499
+ "hesitation",
500
+ "hissed",
501
+ "hitting",
502
+ "holds",
503
+ "holiday",
504
+ "holly",
505
+ "holy",
506
+ "honor",
507
+ "hopefully",
508
+ "horizon",
509
+ "houses",
510
+ "humor",
511
+ "hundreds",
512
+ "hunger",
513
+ "hunt",
514
+ "hunting",
515
+ "hurting",
516
+ "ideas",
517
+ "idiot",
518
+ "ignoring",
519
+ "images",
520
+ "imagination",
521
+ "impressed",
522
+ "impression",
523
+ "inches",
524
+ "including",
525
+ "incredible",
526
+ "incredibly",
527
+ "informed",
528
+ "injured",
529
+ "inner",
530
+ "insane",
531
+ "intended",
532
+ "intensity",
533
+ "invisible",
534
+ "invited",
535
+ "iron",
536
+ "isaac",
537
+ "issue",
538
+ "items",
539
+ "jamie",
540
+ "jared",
541
+ "jealous",
542
+ "jeremy",
543
+ "jerk",
544
+ "jess",
545
+ "jesus",
546
+ "jimmy",
547
+ "joey",
548
+ "johnny",
549
+ "jonas",
550
+ "jordan",
551
+ "joseph",
552
+ "joshua",
553
+ "josie",
554
+ "journey",
555
+ "judge",
556
+ "jumping",
557
+ "kane",
558
+ "karen",
559
+ "katherine",
560
+ "katie",
561
+ "keeps",
562
+ "kevin",
563
+ "kicking",
564
+ "kidding",
565
+ "killer",
566
+ "kisses",
567
+ "knelt",
568
+ "knocking",
569
+ "ladies",
570
+ "landing",
571
+ "language",
572
+ "larger",
573
+ "lately",
574
+ "laura",
575
+ "lauren",
576
+ "lawyer",
577
+ "lean",
578
+ "leapt",
579
+ "learning",
580
+ "lesson",
581
+ "letters",
582
+ "licked",
583
+ "lies",
584
+ "lifting",
585
+ "lightning",
586
+ "likes",
587
+ "lined",
588
+ "liquid",
589
+ "lobby",
590
+ "location",
591
+ "london",
592
+ "lonely",
593
+ "louder",
594
+ "lover",
595
+ "loves",
596
+ "loving",
597
+ "lucien",
598
+ "lucy",
599
+ "major",
600
+ "mama",
601
+ "manner",
602
+ "marks",
603
+ "martin",
604
+ "mask",
605
+ "mason",
606
+ "mass",
607
+ "massive",
608
+ "mate",
609
+ "material",
610
+ "mattered",
611
+ "matters",
612
+ "meaning",
613
+ "meat",
614
+ "medical",
615
+ "melissa",
616
+ "member",
617
+ "members",
618
+ "mental",
619
+ "mere",
620
+ "merely",
621
+ "midnight",
622
+ "military",
623
+ "minds",
624
+ "miranda",
625
+ "mist",
626
+ "mixed",
627
+ "moaned",
628
+ "monday",
629
+ "moonlight",
630
+ "morgan",
631
+ "mortal",
632
+ "motioned",
633
+ "mountains",
634
+ "movements",
635
+ "moves",
636
+ "movies",
637
+ "mumbled",
638
+ "muscle",
639
+ "mystery",
640
+ "nails",
641
+ "named",
642
+ "nathan",
643
+ "nearest",
644
+ "necklace",
645
+ "needing",
646
+ "nerves",
647
+ "nervously",
648
+ "nicolas",
649
+ "nicole",
650
+ "nightmare",
651
+ "nights",
652
+ "nina",
653
+ "nodding",
654
+ "nods",
655
+ "nora",
656
+ "noted",
657
+ "notes",
658
+ "numbers",
659
+ "object",
660
+ "occurred",
661
+ "offering",
662
+ "officers",
663
+ "oliver",
664
+ "opens",
665
+ "opinion",
666
+ "option",
667
+ "orange",
668
+ "ordinary",
669
+ "original",
670
+ "ourselves",
671
+ "overhead",
672
+ "owner",
673
+ "packed",
674
+ "pages",
675
+ "painful",
676
+ "paint",
677
+ "painted",
678
+ "painting",
679
+ "palace",
680
+ "palms",
681
+ "paris",
682
+ "parted",
683
+ "particularly",
684
+ "partner",
685
+ "parts",
686
+ "passenger",
687
+ "passion",
688
+ "patch",
689
+ "patience",
690
+ "patient",
691
+ "patrick",
692
+ "patted",
693
+ "paying",
694
+ "period",
695
+ "permission",
696
+ "pete",
697
+ "photo",
698
+ "pile",
699
+ "pillow",
700
+ "pissed",
701
+ "pity",
702
+ "placing",
703
+ "plain",
704
+ "planet",
705
+ "plastic",
706
+ "pleasant",
707
+ "plus",
708
+ "pockets",
709
+ "points",
710
+ "polite",
711
+ "popped",
712
+ "positive",
713
+ "possibility",
714
+ "potential",
715
+ "pounded",
716
+ "powers",
717
+ "precious",
718
+ "prefer",
719
+ "prepare",
720
+ "president",
721
+ "preston",
722
+ "pretend",
723
+ "pretending",
724
+ "previous",
725
+ "price",
726
+ "pride",
727
+ "princess",
728
+ "prison",
729
+ "professor",
730
+ "program",
731
+ "progress",
732
+ "project",
733
+ "proof",
734
+ "proper",
735
+ "property",
736
+ "protection",
737
+ "protest",
738
+ "provide",
739
+ "provided",
740
+ "pulse",
741
+ "punch",
742
+ "punched",
743
+ "pure",
744
+ "purple",
745
+ "puts",
746
+ "quinn",
747
+ "quit",
748
+ "racing",
749
+ "radio",
750
+ "rafe",
751
+ "raise",
752
+ "raising",
753
+ "rapidly",
754
+ "rare",
755
+ "rarely",
756
+ "reaches",
757
+ "react",
758
+ "realizing",
759
+ "rear",
760
+ "rebecca",
761
+ "recall",
762
+ "received",
763
+ "recently",
764
+ "record",
765
+ "reflection",
766
+ "refuse",
767
+ "regular",
768
+ "reluctantly",
769
+ "remaining",
770
+ "remains",
771
+ "remembering",
772
+ "remind",
773
+ "remove",
774
+ "replaced",
775
+ "request",
776
+ "required",
777
+ "rescue",
778
+ "research",
779
+ "resist",
780
+ "responsibility",
781
+ "responsible",
782
+ "result",
783
+ "returning",
784
+ "reveal",
785
+ "revealed",
786
+ "revealing",
787
+ "ribs",
788
+ "riding",
789
+ "rifle",
790
+ "roar",
791
+ "robe",
792
+ "robert",
793
+ "rode",
794
+ "rope",
795
+ "rubbing",
796
+ "ruin",
797
+ "ruined",
798
+ "rule",
799
+ "runs",
800
+ "rushing",
801
+ "sadly",
802
+ "sadness",
803
+ "sake",
804
+ "sally",
805
+ "satisfaction",
806
+ "satisfied",
807
+ "saturday",
808
+ "savannah",
809
+ "saving",
810
+ "scanned",
811
+ "scar",
812
+ "scare",
813
+ "scattered",
814
+ "scott",
815
+ "scrambled",
816
+ "screams",
817
+ "seated",
818
+ "seats",
819
+ "secrets",
820
+ "section",
821
+ "sees",
822
+ "self",
823
+ "sell",
824
+ "sensation",
825
+ "sensed",
826
+ "senses",
827
+ "sentence",
828
+ "separate",
829
+ "serena",
830
+ "series",
831
+ "serve",
832
+ "served",
833
+ "settle",
834
+ "sexy",
835
+ "shade",
836
+ "shakes",
837
+ "shame",
838
+ "sharply",
839
+ "shattered",
840
+ "sheet",
841
+ "sheets",
842
+ "sheriff",
843
+ "shield",
844
+ "shining",
845
+ "shiver",
846
+ "shivered",
847
+ "shooting",
848
+ "shopping",
849
+ "shore",
850
+ "shortly",
851
+ "shorts",
852
+ "shout",
853
+ "shouting",
854
+ "shown",
855
+ "shrug",
856
+ "shuddered",
857
+ "sidewalk",
858
+ "sideways",
859
+ "signal",
860
+ "signs",
861
+ "silk",
862
+ "silly",
863
+ "similar",
864
+ "sisters",
865
+ "site",
866
+ "sits",
867
+ "skills",
868
+ "skirt",
869
+ "skull",
870
+ "slapped",
871
+ "sleeve",
872
+ "slide",
873
+ "slipping",
874
+ "slowed",
875
+ "slumped",
876
+ "smaller",
877
+ "smith",
878
+ "snap",
879
+ "social",
880
+ "society",
881
+ "sofa",
882
+ "somewhat",
883
+ "sooner",
884
+ "sophia",
885
+ "sophie",
886
+ "souls",
887
+ "sounding",
888
+ "source",
889
+ "spare",
890
+ "speech",
891
+ "spencer",
892
+ "spending",
893
+ "spinning",
894
+ "split",
895
+ "spring",
896
+ "squeeze",
897
+ "stands",
898
+ "stated",
899
+ "statement",
900
+ "states",
901
+ "steal",
902
+ "stepping",
903
+ "stiff",
904
+ "stirred",
905
+ "stole",
906
+ "stones",
907
+ "stops",
908
+ "straightened",
909
+ "stream",
910
+ "stretch",
911
+ "strike",
912
+ "strode",
913
+ "stroked",
914
+ "struggle",
915
+ "struggling",
916
+ "student",
917
+ "students",
918
+ "studying",
919
+ "stuffed",
920
+ "style",
921
+ "success",
922
+ "suggest",
923
+ "sunday",
924
+ "sunlight",
925
+ "supplies",
926
+ "surprisingly",
927
+ "surrounding",
928
+ "survived",
929
+ "suspect",
930
+ "suspected",
931
+ "suspicious",
932
+ "swallow",
933
+ "sweetheart",
934
+ "swiftly",
935
+ "swing",
936
+ "switch",
937
+ "t-shirt",
938
+ "tables",
939
+ "tail",
940
+ "tapped",
941
+ "tara",
942
+ "target",
943
+ "task",
944
+ "tasted",
945
+ "taught",
946
+ "taylor",
947
+ "teach",
948
+ "teacher",
949
+ "teased",
950
+ "teasing",
951
+ "television",
952
+ "tells",
953
+ "temper",
954
+ "temple",
955
+ "tense",
956
+ "tent",
957
+ "terms",
958
+ "terrified",
959
+ "terror",
960
+ "tessa",
961
+ "text",
962
+ "thigh",
963
+ "thighs",
964
+ "thousands",
965
+ "threatened",
966
+ "threatening",
967
+ "throughout",
968
+ "thrown",
969
+ "thrust",
970
+ "tighter",
971
+ "till",
972
+ "tipped",
973
+ "toby",
974
+ "toes",
975
+ "tony",
976
+ "tore",
977
+ "torn",
978
+ "total",
979
+ "tough",
980
+ "towel",
981
+ "tower",
982
+ "trace",
983
+ "tracks",
984
+ "traffic",
985
+ "trailed",
986
+ "trained",
987
+ "trap",
988
+ "trapped",
989
+ "traveled",
990
+ "travis",
991
+ "tray",
992
+ "treat",
993
+ "treated",
994
+ "trembled",
995
+ "trevor",
996
+ "trick",
997
+ "tristan",
998
+ "trunk",
999
+ "trusted",
1000
+ "twelve",
1001
+ "ugly",
1002
+ "unconscious",
1003
+ "unexpected",
1004
+ "uniform",
1005
+ "universe",
1006
+ "unknown",
1007
+ "unlike",
1008
+ "unsure",
1009
+ "unusual",
1010
+ "upper",
1011
+ "upright",
1012
+ "upward",
1013
+ "useful",
1014
+ "useless",
1015
+ "valley",
1016
+ "vanished",
1017
+ "various",
1018
+ "vehicle",
1019
+ "veins",
1020
+ "victor",
1021
+ "video",
1022
+ "vincent",
1023
+ "violet",
1024
+ "visible",
1025
+ "vulnerable",
1026
+ "walks",
1027
+ "wandered",
1028
+ "warrior",
1029
+ "wash",
1030
+ "washed",
1031
+ "waving",
1032
+ "weather",
1033
+ "werewolf",
1034
+ "whipped",
1035
+ "whispers",
1036
+ "whom",
1037
+ "willow",
1038
+ "winced",
1039
+ "wings",
1040
+ "winked",
1041
+ "wise",
1042
+ "wishing",
1043
+ "witch",
1044
+ "wolves",
1045
+ "worn",
1046
+ "wounded",
1047
+ "wounds",
1048
+ "wrap",
1049
+ "wrapping",
1050
+ "wrists",
1051
+ "wrote",
1052
+ "xavier",
1053
+ "yanked",
1054
+ "yard",
1055
+ "yards",
1056
+ "yelling",
1057
+ "zach",
1058
+ ],
1059
+ "obj_number": ["NN", "NNS"],
1060
+ "past_present": ["PAST", "PRES"],
1061
+ "sentence_length": [0, 1, 2, 3, 4, 5],
1062
+ "top_constituents": [
1063
+ "ADVP_NP_VP_.",
1064
+ "CC_ADVP_NP_VP_.",
1065
+ "CC_NP_VP_.",
1066
+ "IN_NP_VP_.",
1067
+ "NP_ADVP_VP_.",
1068
+ "NP_NP_VP_.",
1069
+ "NP_PP_.",
1070
+ "NP_VP_.",
1071
+ "OTHER",
1072
+ "PP_NP_VP_.",
1073
+ "RB_NP_VP_.",
1074
+ "SBAR_NP_VP_.",
1075
+ "SBAR_VP_.",
1076
+ "S_CC_S_.",
1077
+ "S_NP_VP_.",
1078
+ "S_VP_.",
1079
+ "VBD_NP_VP_.",
1080
+ "VP_.",
1081
+ "WHADVP_SQ_.",
1082
+ "WHNP_SQ_.",
1083
+ ],
1084
+ "tree_depth": ["depth_5", "depth_6", "depth_7", "depth_8", "depth_9", "depth_10", "depth_11"],
1085
+ "coordination_inversion": ["I", "O"],
1086
+ "odd_man_out": ["C", "O"],
1087
+ "bigram_shift": ["I", "O"],
1088
+ }
1089
+
1090
+
1091
+ def get_labels(task):
1092
+ return TASK_TO_LABELS[task]
1093
+
1094
+
1095
+ class LinguisticprobingConfig(datasets.BuilderConfig):
1096
+ """BuilderConfig for Linguisticprobing."""
1097
+
1098
+ def __init__(
1099
+ self,
1100
+ text_features,
1101
+ label_classes=None,
1102
+ process_label=lambda x: x,
1103
+ **kwargs,
1104
+ ):
1105
+ """BuilderConfig for Linguisticprobing.
1106
+ Args:
1107
+ text_features: `dict[string, string]`, map from the name of the feature
1108
+ dict for each text field to the name of the column in the tsv file
1109
+ label_column: `string`, name of the column in the tsv file corresponding
1110
+ to the label
1111
+ data_url: `string`, url to download the zip file from
1112
+ data_dir: `string`, the path to the folder containing the tsv files in the
1113
+ downloaded zip
1114
+ citation: `string`, citation for the data set
1115
+ url: `string`, url for information about the data set
1116
+ label_classes: `list[string]`, the list of classes if the label is
1117
+ categorical. If not provided, then the label will be of type
1118
+ `datasets.Value('float32')`.
1119
+ process_label: `Function[string, any]`, function taking in the raw value
1120
+ of the label and processing it to the form required by the label feature
1121
+ **kwargs: keyword arguments forwarded to super.
1122
+ """
1123
+
1124
+ super(LinguisticprobingConfig, self).__init__(version=datasets.Version("1.0.0", ""), **kwargs)
1125
+
1126
+ self.text_features = text_features
1127
+ self.label_column = "label"
1128
+ self.label_classes = get_labels(self.name)
1129
+ self.data_url = DATA_URL
1130
+ self.data_dir = os.path.join("linguisticprobing", self.name)
1131
+ self.citation = textwrap.dedent(_Linguisticprobing_CITATION)
1132
+ self.process_label = process_label
1133
+ self.description = ""
1134
+ self.url = ""
1135
+
1136
+
1137
+ class Linguisticprobing(datasets.GeneratorBasedBuilder):
1138
+
1139
+ """The General Language Understanding Evaluation (Linguisticprobing) benchmark."""
1140
+
1141
+ BUILDER_CONFIG_CLASS = LinguisticprobingConfig
1142
+
1143
+ BUILDER_CONFIGS = [
1144
+ LinguisticprobingConfig(
1145
+ name="subj_number",
1146
+ text_features={"sentence": "sentence"},
1147
+ ),
1148
+ LinguisticprobingConfig(
1149
+ name="word_content",
1150
+ text_features={"sentence": "sentence"},
1151
+ ),
1152
+ LinguisticprobingConfig(
1153
+ name="obj_number",
1154
+ text_features={"sentence": "sentence"},
1155
+ ),
1156
+ LinguisticprobingConfig(
1157
+ name="past_present",
1158
+ text_features={"sentence": "sentence"},
1159
+ ),
1160
+ LinguisticprobingConfig(
1161
+ name="sentence_length",
1162
+ text_features={"sentence": "sentence"},
1163
+ ),
1164
+ LinguisticprobingConfig(
1165
+ name="top_constituents",
1166
+ text_features={"sentence": "sentence"},
1167
+ ),
1168
+ LinguisticprobingConfig(
1169
+ name="tree_depth",
1170
+ text_features={"sentence": "sentence"},
1171
+ ),
1172
+ LinguisticprobingConfig(
1173
+ name="coordination_inversion",
1174
+ text_features={"sentence": "sentence"},
1175
+ ),
1176
+ LinguisticprobingConfig(
1177
+ name="odd_man_out",
1178
+ text_features={"sentence": "sentence"},
1179
+ ),
1180
+ LinguisticprobingConfig(
1181
+ name="bigram_shift",
1182
+ text_features={"sentence": "sentence"},
1183
+ ),
1184
+ ]
1185
+
1186
+ def _info(self):
1187
+ features = {text_feature: datasets.Value("string") for text_feature in six.iterkeys(self.config.text_features)}
1188
+ if self.config.label_classes:
1189
+ features["label"] = datasets.features.ClassLabel(names=self.config.label_classes)
1190
+ else:
1191
+ features["label"] = datasets.Value("float32")
1192
+ features["idx"] = datasets.Value("int32")
1193
+ return datasets.DatasetInfo(
1194
+ description=_Linguisticprobing_DESCRIPTION,
1195
+ features=datasets.Features(features),
1196
+ homepage=self.config.url,
1197
+ citation=self.config.citation + "\n" + _Linguisticprobing_CITATION,
1198
+ )
1199
+
1200
+ def _split_generators(self, dl_manager):
1201
+ dl_dir = dl_manager.download_and_extract(self.config.data_url)
1202
+ data_dir = os.path.join(dl_dir, self.config.data_dir)
1203
+
1204
+ return [
1205
+ datasets.SplitGenerator(
1206
+ name=datasets.Split.TRAIN,
1207
+ gen_kwargs={
1208
+ "data_file": os.path.join(data_dir or "", "train.tsv"),
1209
+ "split": "train",
1210
+ },
1211
+ ),
1212
+ datasets.SplitGenerator(
1213
+ name=datasets.Split.VALIDATION,
1214
+ gen_kwargs={
1215
+ "data_file": os.path.join(data_dir or "", "dev.tsv"),
1216
+ "split": "dev",
1217
+ },
1218
+ ),
1219
+ datasets.SplitGenerator(
1220
+ name=datasets.Split.TEST,
1221
+ gen_kwargs={
1222
+ "data_file": os.path.join(data_dir or "", "test.tsv"),
1223
+ "split": "test",
1224
+ },
1225
+ ),
1226
+ ]
1227
+
1228
+ def _generate_examples(self, data_file, split):
1229
+
1230
+ process_label = self.config.process_label
1231
+ label_classes = self.config.label_classes
1232
+
1233
+ with open(data_file, encoding="utf8") as f:
1234
+ reader = csv.DictReader(f, delimiter="\t", quoting=csv.QUOTE_NONE)
1235
+
1236
+ for n, row in enumerate(reader):
1237
+
1238
+ example = {feat: row[col] for feat, col in six.iteritems(self.config.text_features)}
1239
+ example["idx"] = n
1240
+
1241
+ if self.config.label_column in row:
1242
+ label = row[self.config.label_column]
1243
+ if label_classes and label not in label_classes:
1244
+ label = int(label) if label else None
1245
+ example["label"] = process_label(label)
1246
+ else:
1247
+ example["label"] = process_label(-1)
1248
+ yield example["idx"], example