File size: 84,385 Bytes
6fa4bc9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
{
    "paper_id": "P01-1028",
    "header": {
        "generated_with": "S2ORC 1.0.0",
        "date_generated": "2023-01-19T09:29:54.795270Z"
    },
    "title": "Generating with a Grammar Based on Tree Descriptions: a Constraint-Based Approach",
    "authors": [
        {
            "first": "Claire",
            "middle": [],
            "last": "Gardent",
            "suffix": "",
            "affiliation": {
                "laboratory": "",
                "institution": "CNRS LORIA",
                "location": {
                    "addrLine": "BP 239 Campus Scientifique",
                    "postCode": "54506",
                    "settlement": "Vandoeuvre-les-Nancy",
                    "country": "France"
                }
            },
            "email": "claire.gardent@loria.fr"
        },
        {
            "first": "Stefan",
            "middle": [],
            "last": "Thater",
            "suffix": "",
            "affiliation": {},
            "email": ""
        }
    ],
    "year": "",
    "venue": null,
    "identifiers": {},
    "abstract": "While the generative view of language processing builds bigger units out of smaller ones by means of rewriting steps, the axiomatic view eliminates invalid linguistic structures out of a set of possible structures by means of wellformedness principles. We present a generator based on the axiomatic view and argue that when combined with a TAG-like grammar and a flat semantics, this axiomatic view permits avoiding drawbacks known to hold either of top-down or of bottom-up generators.",
    "pdf_parse": {
        "paper_id": "P01-1028",
        "_pdf_hash": "",
        "abstract": [
            {
                "text": "While the generative view of language processing builds bigger units out of smaller ones by means of rewriting steps, the axiomatic view eliminates invalid linguistic structures out of a set of possible structures by means of wellformedness principles. We present a generator based on the axiomatic view and argue that when combined with a TAG-like grammar and a flat semantics, this axiomatic view permits avoiding drawbacks known to hold either of top-down or of bottom-up generators.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Abstract",
                "sec_num": null
            }
        ],
        "body_text": [
            {
                "text": "We take the axiomatic view of language and show that it yields an interestingly new perspective on the tactical generation task i.e. the task of producing from a given semantics a string with semantics .",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Introduction",
                "sec_num": "1"
            },
            {
                "text": "As (Cornell and Rogers, To appear) clearly shows, there has recently been a surge of interest in logic based grammars for natural language. In this branch of research sometimes referred to as \"Model Theoretic Syntax\", a grammar is viewed as a set of axioms defining the well-formed structures of natural language. The motivation for model theoretic grammars is initially theoretical: the use of logic should support both a more precise formulation of grammars and a different perspective on the mathematical and computational properties of natural language.",
                "cite_spans": [
                    {
                        "start": 3,
                        "end": 15,
                        "text": "(Cornell and",
                        "ref_id": null
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Introduction",
                "sec_num": "1"
            },
            {
                "text": "But eventually the question must also be addressed of how such grammars could be put to work. One obvious answer is to use a model generator. Given a logical formula , a model genera-tor is a program which builds some of the models satisfying this formula. Thus for parsing, a model generator can be used to enumerate the (minimal) model(s), that is, the parse trees, satisfying the conjunction of the lexical categories selected on the basis of the input string plus any additional constraints which might be encoded in the grammar. And similarly for generation, a model generator can be used to enumerate the models satisfying the bag of lexical items selected by the lexical look up phase on the basis of the input semantics.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Introduction",
                "sec_num": "1"
            },
            {
                "text": "How can we design model generators which work efficiently on natural language input i.e. on the type of information delivered by logic based grammars? (Duchier and Gardent, 1999) shows that constraint programming can be used to implement a model generator for tree logic (Backofen et al., 1995) . Further, (Duchier and Thater, 1999) shows that this model generator can be used to parse with descriptions based grammars (Rambow et al., 1995; Kallmeyer, 1999) that is, on logic based grammars where lexical entries are descriptions of trees expressed in some tree logic.",
                "cite_spans": [
                    {
                        "start": 151,
                        "end": 178,
                        "text": "(Duchier and Gardent, 1999)",
                        "ref_id": "BIBREF6"
                    },
                    {
                        "start": 271,
                        "end": 294,
                        "text": "(Backofen et al., 1995)",
                        "ref_id": "BIBREF1"
                    },
                    {
                        "start": 306,
                        "end": 332,
                        "text": "(Duchier and Thater, 1999)",
                        "ref_id": "BIBREF8"
                    },
                    {
                        "start": 419,
                        "end": 440,
                        "text": "(Rambow et al., 1995;",
                        "ref_id": "BIBREF16"
                    },
                    {
                        "start": 441,
                        "end": 457,
                        "text": "Kallmeyer, 1999)",
                        "ref_id": "BIBREF11"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Introduction",
                "sec_num": "1"
            },
            {
                "text": "In this paper, we build on (Duchier and Thater, 1999) and show that modulo some minor modifications, the same model generator can be used to generate with description based grammars. We describe the workings of the algorithm and compare it with standard existing top-down and bottom-up generation algorithms. In specific, we argue that the change of perspective offered by the constraint-based, axiomatic approach to processing presents some interesting differences with the more traditional generative approach usually pursued in tactical generation and further, that the combination of this static view with a TAG-like grammar and a flat semantics results in a system which combines the positive aspects of both top-down and bottom-up generators.",
                "cite_spans": [
                    {
                        "start": 27,
                        "end": 53,
                        "text": "(Duchier and Thater, 1999)",
                        "ref_id": "BIBREF8"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Introduction",
                "sec_num": "1"
            },
            {
                "text": "The paper is structured as follows. Section 2 presents the grammars we are working with namely, Description Grammars (DG), Section 3 summarises the parsing model presented in (Duchier and Thater, 1999) and Section 4 shows that this model can be extended to generate with DGs. In Section 5, we compare our generator with top-down and bottom-up generators, Section 6 reports on a proof-of-concept implementation and Section 7 concludes with pointers for further research.",
                "cite_spans": [
                    {
                        "start": 175,
                        "end": 201,
                        "text": "(Duchier and Thater, 1999)",
                        "ref_id": "BIBREF8"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Introduction",
                "sec_num": "1"
            },
            {
                "text": "There is a range of grammar formalisms which depart from Tree Adjoining Grammar (TAG) by taking as basic building blocks tree descriptions rather than trees. D-Tree Grammar (DTG) is proposed in (Rambow et al., 1995) to remedy some empirical and theoretical shortcomings of TAG; Tree Description Grammar (TDG) is introduced in (Kallmeyer, 1999) to support syntactic and semantic underspecification and Interaction Grammar is presented in (Perrier, 2000) as an alternative way of formulating linear logic grammars.",
                "cite_spans": [
                    {
                        "start": 194,
                        "end": 215,
                        "text": "(Rambow et al., 1995)",
                        "ref_id": "BIBREF16"
                    },
                    {
                        "start": 326,
                        "end": 343,
                        "text": "(Kallmeyer, 1999)",
                        "ref_id": "BIBREF11"
                    },
                    {
                        "start": 437,
                        "end": 452,
                        "text": "(Perrier, 2000)",
                        "ref_id": "BIBREF14"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "Like all these frameworks, DG uses tree descriptions and thereby benefits first, from the extended domain of locality which makes TAG particularly suitable for generation (cf. (Joshi, 1987) ) and second, from the monotonicity which differentiates descriptions from trees with respect to adjunction (cf. (Vijay-Shanker, 1992) ).",
                "cite_spans": [
                    {
                        "start": 176,
                        "end": 189,
                        "text": "(Joshi, 1987)",
                        "ref_id": "BIBREF10"
                    },
                    {
                        "start": 298,
                        "end": 324,
                        "text": "(cf. (Vijay-Shanker, 1992)",
                        "ref_id": null
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "DG differs from DTG and TDG however in that it adopts an axiomatic rather than a generative view of grammar: whereas in DTG and TDG, derived trees are constructed through a sequence of rewriting steps, in DG derived trees are models satisfying a conjunction of elementary tree descriptions. Moreover, DG differs from Interaction Grammars in that it uses a flat rather than a Montague style recursive semantics thereby permitting a simple syntax/semantics interface (see below).",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "A Description Grammar is a set of lexical entries of the form",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "\u00a1 \u00a3 \u00a2 \u00a5 \u00a4 \u00a7 \u00a6 \u00a9",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "where \u00a2 is a tree description and \u00a6 is the semantic representation associated with \u00a2 . Tree descriptions. A tree description is a conjunction of literals that specify either the label of a node or the position of a node relative to NP: Figure 2 :",
                "cite_spans": [],
                "ref_spans": [
                    {
                        "start": 236,
                        "end": 244,
                        "text": "Figure 2",
                        "ref_id": null
                    }
                ],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "John ! \u00a5 \" $ # & % ' NP:( Mary ) 1 0 3 2 5 4 7 6 9 8 A @ & B D C \u00a7 E 0 G F H ) 1 0 3 2 5 4 7 6 1 8 ( I @ 4 P 2 5 Q S R T F H \u00a7 U V W V $ X Y S:\u00e0 b NP:c d e VP:f VP:g h V sees i p NP:q \u00a7 U V W V",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "n o p S \u00a2 ! \u00a2 \u00a7 ! D 1 \u00a2 9 \u00a7 D",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "fragments (fully specified subtrees) are always positive; except for the anchor, all leaves of fragments are negative, and internal node variables are neutral. This guarantees that in a saturated model, tree fragments that belong to the denotation of distinct tree descriptions do not overlap. Second, we require that every lexical tree description has a single minimal free model, which essentially means that the lexical descriptions must be tree shaped. Semantic representation. Following (Stone and Doran, 1997) , we represent meaning using a flat semantic representation, i.e. as multisets, or conjunctions, of non-recursive propositions. This treatment offers a simple syntax-semantics interface in that the meaning of a tree is just the conjunction of meanings of the lexical tree descriptions used to derive it once the free variables occurring in the propositions are instantiated. A free variable is instantiated as follows: each free variable labels a syntactic node variable and is unified with the label of any node variable identified with . For the purpose of this paper, a simple semantic representation language is adopted which in particular, does not include \"handles\" i.e. labels on propositions. For a wider empirical coverage including e.g. quantifiers, a more sophisticated version of flat semantics can be used such as Minimal Recursion Semantics .",
                "cite_spans": [
                    {
                        "start": 492,
                        "end": 515,
                        "text": "(Stone and Doran, 1997)",
                        "ref_id": "BIBREF20"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Description Grammars",
                "sec_num": "2"
            },
            {
                "text": "Parsing with DG can be formulated as a model generation problem, the task of finding models satisfying a give logical formula. If we restrict our attention to grammars where every lexical tree description has exactly one anchor and (unrealistically) assuming that each word is associated with \u00a1 the tree description associated with the word by the grammar. Figure 2 illustrates this idea for the sentence \"John loves Mary\". The tree on the right hand side represents the saturated model satisfying the conjunction of the descriptions given on the left and obtained from parsing the sentence \"John sees Mary\" (the isolated negative node variable, the \"ROOT description\", is postulated during parsing to cancel out the negative polarity of the top-most S-node in the parse tree). The dashed lines between the left and the right part of the figure schematise the interpretation function: it indicates which node variables gets mapped to which node in the model.",
                "cite_spans": [],
                "ref_spans": [
                    {
                        "start": 357,
                        "end": 365,
                        "text": "Figure 2",
                        "ref_id": null
                    }
                ],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "- ! # $ Q S 6 John & Q S 6 saw U V W V $ X U V V & Mary ! \u00a5 \" $ # & % Q S 6",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "As (Duchier and Thater, 1999) shows however, lexical ambiguity means that the parsing problem is in fact more complex as it in effect requires that models be searched for that satisfy a conjunction of disjunctions (rather than simply a conjunction) of lexical tree descriptions.",
                "cite_spans": [
                    {
                        "start": 3,
                        "end": 29,
                        "text": "(Duchier and Thater, 1999)",
                        "ref_id": "BIBREF8"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "The constraint based encoding of this problem presented in (Duchier and Thater, 1999) can be sketched as follows 1 . To start with, the conjunction of disjunctions of descriptions obtained on the basis of the lexical lookup is represented as a matrix, where each row corresponds to a word from the input (except for the first row which is filled with the above mentioned ROOT description) and columns give the lexical entries associated by the grammar with these words. Any matrix entry which is empty is filled with the formula",
                "cite_spans": [
                    {
                        "start": 59,
                        "end": 85,
                        "text": "(Duchier and Thater, 1999)",
                        "ref_id": "BIBREF8"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "\u00a3 \u00a2 5 \u00a3 \u00a1 \u00a4",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "which is true in all models. Figure 3 shows an example parsing matrix for the string \"John saw Mary\" given the grammar in Figure 1 . 2 Given such a matrix, the task of parsing con-sists in:",
                "cite_spans": [
                    {
                        "start": 133,
                        "end": 134,
                        "text": "2",
                        "ref_id": null
                    }
                ],
                "ref_spans": [
                    {
                        "start": 29,
                        "end": 37,
                        "text": "Figure 3",
                        "ref_id": null
                    },
                    {
                        "start": 122,
                        "end": 130,
                        "text": "Figure 1",
                        "ref_id": null
                    }
                ],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "1. selecting exactly one entry per row thereby producing a conjunction of selected lexical entries, 2. building a saturated model for this conjunction of selected entries such that the yield of that model is equal to the input string and 3. building a free model for each of the remaining (non selected) entries.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "The important point about this way of formulating the problem is that it requires all constraints imposed by the lexical tree descriptions occurring in the matrix to be satisfied (though not necessarily in the same model). This ensures strong constraint propagation and thereby reduces nondeterminism. In particular, it avoids the combinatorial explosion that would result from first generating the possible conjunctions of lexical descriptions out of the CNF obtained by lexical lookup and second, testing their satisfiability.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Parsing with DG",
                "sec_num": "3"
            },
            {
                "text": "We now show how the parsing model just described can be adapted to generate from some semantic representation , one or more sentence(s) with semantics .",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Generating with DG",
                "sec_num": "4"
            },
            {
                "text": "The parsing model outlined in the previous section can directly be adapted for generation as follows. First, the lexical lookup is modified such that propositions instead of words are used to determine the relevant lexical tree descriptions: a lexical tree description is selected if its semantics subsumes part of the input semantics. Second, the constraint that the yield of the saturated model matches the input string is replaced by a constraint that the sum of the cardinalities of the multisets of propositions associated with the lexical tree descriptions composing the solution tree equals the cardinality of the input semantics. Together with the above requirement that only lexical entries be selected whose semantics subsumes part of the goal semantics, this ensures that the semantics of the solution trees is identical with the input semantics.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "The following simple example illustrates this idea.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "Suppose the input semantics is",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "l \u00a5 \u00a6 \u00a4 \u00a5 \u00a7 $ \u00a4 \u00a9 9 m 3 k G \u00e4 5 \u00a4 1 l 3 \u00a5 \u00ab \u00a4 \u00a5 \u00a7 $ \u00ac \u00a4 1 \u00a5 \u00ae \u00a2 5 \u00a1 \u00aa 5 \u00a4 9\u00b0 1 \u00a4 5 \u00a4 \u00a5 \u00a7 \u00b1 \u00a4 \u00b2 \u00a4 \u00b2 \u00ac \u00b3 \u00aa ! i",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "and the grammar is as given in Figure 1 . The generating matrix then is:",
                "cite_spans": [],
                "ref_spans": [
                    {
                        "start": 31,
                        "end": 39,
                        "text": "Figure 1",
                        "ref_id": null
                    }
                ],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "0 3 2 5 4 7 6 9 8 c @ B \u00b2 C \u00a7 E 0 F & Q S 3 6 6 6 1 8 1 @ $ c @ q F U V W V $ X U V V r 0 3 2 5 4 7 6 9 8 q @ \u00a3 \u00b6 \u00b5 \u2022 5 q F 2 \u00b9 \u00b2 \u00ba \u00bb Q S 3 6",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "Given this generating matrix, two matrix models will be generated, one with a saturated model",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "n \u00bd \u00bc satisfying \u00a2 \u00b2 \u00be S \u00bf \u00c0 5 \u00c1 \u00c2 \u00c3 \u00a2 \u00c4 \u00c5 $ \u00c5 5 AE \u00a2 \u00c7 d \u00c8 \u00b2 \u00c9 \u00ca",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "and a free model satisfying",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "\u00a2 \u00c4 \u00a3 \u00c5 \u00a3 \u00c5 \u00b2 \u00cb",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "and the other with the saturated model",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "n \u00cd \u00cc satisfying \u00a2 \u00b2 \u00be S \u00bf \u00c0 5 \u00c1 \u00c2 \u00a2 \u00c4 \u00c5 \u00a3 \u00c5 \u00b2 \u00cb \u00c2 \u00a2 \u00c7 d \u00c8 \u00b2 \u00c9 \u00ca",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "and a free model satisfying",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "\u00a2 \u00c4 \u00c5 \u00a3 \u00c5",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": ". The first solution yields the sentence \"John sees Mary\" whereas the second yields the topicalised sentence \"Mary, John sees.\"",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Basic Idea",
                "sec_num": "4.1"
            },
            {
                "text": "The problem with the simple method outlined above is that it severely restricts the class of grammars that can be used by the generator. Recall that in (Duchier and Thater, 1999 )'s parsing model, the assumption is made that each lexical entry has exactly one anchor. In practice this means that the parser can deal neither with a grammar assigning trees with multiple anchors to idioms (as is argued for in e.g. (Abeill\u00e9 and Schabes, 1989 )) nor with a grammar allowing for trace anchored lexical entries. The mirror restriction for generation is that each lexical entry must be associated with exactly one semantic proposition. The resulting shortcomings are that the generator can deal neither with a lexical entry having an empty semantics nor with a lexical entry having a multipropositional semantics. We first show that these restrictions are too strong. We then show how to adapt the generator so as to lift them. Empty Semantics. Arguably there are words such as \"that\" or infinitival \"to\" whose semantic contribution is void. As (Shieber, 1988) showed, the problem with such words is that they cannot be selected on the basis of the input semantics. To circumvent this problem, we take advantage of the TAG extended domain of locality to avoid having such entries in the grammar. For instance, complementizer \"that\" does not anchor a tree description by itself but occurs in all lexical tree descriptions providing an appropriate syntactic context for it, e.g. in the tree description for \"say\". Multiple Propositions. Lexical entries with a multi-propositional semantics are also very common. For instance, a neo-Davidsonian semantics would associate e.g.",
                "cite_spans": [
                    {
                        "start": 152,
                        "end": 177,
                        "text": "(Duchier and Thater, 1999",
                        "ref_id": "BIBREF8"
                    },
                    {
                        "start": 413,
                        "end": 439,
                        "text": "(Abeill\u00e9 and Schabes, 1989",
                        "ref_id": "BIBREF0"
                    },
                    {
                        "start": 1039,
                        "end": 1054,
                        "text": "(Shieber, 1988)",
                        "ref_id": "BIBREF18"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "\u00a2 5 \u00a3 \u00a5 \u00ce \u00a7 \u00b1 A \u00aa 5 \u00a4 T \u00cf \u00a4 9 \u00a1 A \u00a7 \u00b1 \u00a4 \u00b2 \u00d0 \u00aa with the verb \"run\" or \u00a2 \u00d1 \u00a3 \u00a5 \u00ce \u00a7 \u00b1 \u00a4 \u00b2 \u00d0 \u00aa 5 \u00a4 \u00a5 3\u00b0 \u00d1 A \u00a7 \u00b1 \u00aa",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "with the past tensed \"ran\". Similarly, agentless passive \"be\" might be represented by an overt quantification over the missing agent position (such as",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "\u00d2 \u00b1 \u00d2 \u00a9 \u00d4 \u00d3 \u00a6 \u00a7 \u00b1 A \u00aa d \u00d5 \u00cf \u00a4 9 \u00a1 1 \u00a7 \u00b1 \u00a4 \u00b2 \u00aa",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "with \u00d3 a variable over the complement verb semantics). And a grammar with a rich lexical semantics might for instance associate the semantics",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "\u00d6 \u00d7 3 \u00a1 1 \u00a7 \u00b1 3 T \u00a4 \u00b2 \u00a4 ! \u00b1 \u00d8 T \u00aa , k 3 \u00d9 \u00a4 \u00a7 \u00b1 T \u00d8 3 \u00a4 \u00b2 \u00d0 \u00aa",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "with \"want\" (cf. (McCawley, 1979) which argues for such a semantics to account for examples such as \"Reuters wants the report tomorrow\" where \"tomorrow\" modifies the \"having\" not the \"wanting\").",
                "cite_spans": [
                    {
                        "start": 17,
                        "end": 33,
                        "text": "(McCawley, 1979)",
                        "ref_id": "BIBREF13"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "Because it assumes that each lexical entry is associated with exactly one semantic proposition, such cases cannot be dealt with the generator sketched in the previous section. A simple method for fixing this problem would be to first partition the input semantics in as many ways as are possible and to then use the resulting partitions as the basis for lexical lookup.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "The problems with this method are both theoretical and computational. On the theoretical side, the problem is that the partitioning is made independent of grammatical knowledge. It would be better for the decomposition of the input semantics to be specified by the lexical lookup phase, rather than by means of a language independent partitioning procedure. Computationally, this method is unsatisfactory in that it implements a generate-and-test procedure (first, a partition is created and second, model generation is applied to the resulting matrices) which could rapidly lead to combinatorial explosion and is contrary in spirit to (Duchier and Thater, 1999) constraint-based approach.",
                "cite_spans": [
                    {
                        "start": 636,
                        "end": 662,
                        "text": "(Duchier and Thater, 1999)",
                        "ref_id": "BIBREF8"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "We therefore propose the following alternative procedure. We start by marking in each lexical entry, one proposition in the associated semantics as being the head of this semantic representation. The marking is arbitrary: it does not matter which proposition is the head as long as each semantic representation has exactly one head. We then use this head for lexical lookup. Instead of selecting lexical entries on the basis \u00da NP: Figure 4 : Example grammar of their whole semantics, we select them on the basis of their index. That is, a lexical entry is selected iff its head unifies with a proposition in the input semantics. To preserve coherence, we further maintain the additional constraint that the total semantics of each selected entries subsumes (part of) the input semantics. For instance, given the grammar in Figure 4 (where semantic heads are underlined) and the input semantics",
                "cite_spans": [],
                "ref_spans": [
                    {
                        "start": 431,
                        "end": 439,
                        "text": "Figure 4",
                        "ref_id": null
                    },
                    {
                        "start": 823,
                        "end": 831,
                        "text": "Figure 4",
                        "ref_id": null
                    }
                ],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "John \u00d1 \u00db $ \u00dc \u00dd \u00db \u00de VP:\u00df \u00e0 V did \u00e1 \u00e2 VP:) 1 0 3 2 5 4 7 6 8 A @ & B D C \u00a7 E 0 G F H ) \u00b2 \u00e3 T 2 ! 8 F H # & \u00e4 \u00e5 S:ae \u00e7 NP:c \u00e8 \u00e9 VP:\u1ec1 VP:\u00eb \u00ec V run # \" \u00ed S:\u00ee \u00ef NP:c \u00f0 \u00f1 VP:\u00f2 VP:\u00f3 \u00f4 V ran ) 9 Q 0 8 9 @ c F H ) 1 Q S 0 8 9 @ c F @ \u00e3 T 2 ! 8 F H",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "\u00a2 \u00d1 \u00a3 \u00a5 d \u00a7 \u00b1 \u00a4 \u00b2 \u00d0 \u00aa 5 \u00a4 1 l 3 \u00a5 \u00ab \u00a4 \u00b3 \u00a7 $ \u00a4 S \u00f5 \u00f6 \u00f7 \u00b3 \u00e4 5 \u00a4 \u00b3 \u00b0 \u00d1 3 \u00a7 \u00b1 A \u00aa",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": ", the generating matrix will be:",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "0 2 5 4 P 6 1 8 c @ W B \u00b2 C \u00a7 E 0 G F & Q 6 Q 0 \u00b3 8 9 @ c F # & \u00e4 # W \" \u00e3 T 2 5 8 F \u00d1 \u00db $ \u00dc \u00db Q 6",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "Given this matrix, two solutions will be found: the saturated tree for \"John ran\" satisfying the conjunction",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "\u00a2 \u00b2 \u00be \u00bf D \u00c0 5 \u00c1 \u00f8 \u00d5 \u00a2 \u00c9 s \u00c8 \u00c1",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "and that for \"John did run\" satisfying",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "\u00a2 \u00b2 \u00be \u00bf D \u00c0 5 \u00c1 \u00f9 \u00fa \u00a2 \u00c9 \u00fb \u00c1 \u00fc \u00a2 3 \u00fd \u00b2 \u00fe \u00fd",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": ". No other solution is found as for any other conjunction of descriptions made available by the matrix, no saturated model exists.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Going Further",
                "sec_num": "4.2"
            },
            {
                "text": "Our generator presents three main characteristics: (i) It is based on an axiomatic rather than a generative view of grammar, (ii) it uses a TAG-like grammar in which the basic linguistic units are trees rather than categories and (iii) it assumes a flat semantics.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Comparison with related work",
                "sec_num": "5"
            },
            {
                "text": "In what follows we show that this combination of features results in a generator which integrates the positive aspects of both top-down and bottom-up generators. In this sense, it is not unlike (Shieber et al., 1990) 's semantic-head-driven generation. As will become clear in the following section however, it differs from it in that it integrates stronger lexicalist (i.e. bottom-up) information.",
                "cite_spans": [
                    {
                        "start": 194,
                        "end": 216,
                        "text": "(Shieber et al., 1990)",
                        "ref_id": "BIBREF17"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Comparison with related work",
                "sec_num": "5"
            },
            {
                "text": "Bottom-up or \"lexically-driven\" generators (e.g., (Shieber, 1988; Whitelock, 1992; Kay, 1996; Carroll et al., 1999) ) start from a bag of lexical items with instantiated semantics and generates a syntactic tree by applying grammar rules whose right hand side matches a sequence of phrases in the current input.",
                "cite_spans": [
                    {
                        "start": 50,
                        "end": 65,
                        "text": "(Shieber, 1988;",
                        "ref_id": "BIBREF18"
                    },
                    {
                        "start": 66,
                        "end": 82,
                        "text": "Whitelock, 1992;",
                        "ref_id": "BIBREF22"
                    },
                    {
                        "start": 83,
                        "end": 93,
                        "text": "Kay, 1996;",
                        "ref_id": "BIBREF12"
                    },
                    {
                        "start": 94,
                        "end": 115,
                        "text": "Carroll et al., 1999)",
                        "ref_id": "BIBREF3"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "There are two known disadvantages to bottomup generators. On the one hand, they require that the grammar be semantically monotonic that is, that the semantics of each daughter in a rule subsumes some portion of the mother semantics. On the other hand, they are often overly nondeterministic (though see (Carroll et al., 1999) for an exception). We now show how these problems are dealt with in the present algorithm. Non-determinism. Two main sources of nondeterminism affect the performance of bottom-up generators: the lack of an indexing scheme and the presence of intersective modifiers.",
                "cite_spans": [
                    {
                        "start": 303,
                        "end": 325,
                        "text": "(Carroll et al., 1999)",
                        "ref_id": "BIBREF3"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "In (Shieber, 1988) , a chart-based bottom-up generator is presented which is devoid of an indexing scheme: all word edges leave and enter the same vertex and as a result, interactions must be considered explicitly between new edges and all edges currently in the chart. The standard solution to this problem (cf. (Kay, 1996) ) is to index edges with semantic indices (for instance, the edge with category N/x:dog(x) will be indexed with x) and to restrict edge combination to these edges which have compatible indices. Specifically, an active edge with category A(...)/C(c ...) (with c the semantics index of the missing component) is restricted to combine with inactive edges with category C(c ...), and vice versa.",
                "cite_spans": [
                    {
                        "start": 3,
                        "end": 18,
                        "text": "(Shieber, 1988)",
                        "ref_id": "BIBREF18"
                    },
                    {
                        "start": 313,
                        "end": 324,
                        "text": "(Kay, 1996)",
                        "ref_id": "BIBREF12"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "Although our generator does not make use of a chart, the constraint-based processing model described in (Duchier and Thater, 1999 ) imposes a similar restriction on possible combinations as it in essence requires that only these nodes pairs be tried for identification which (i) have opposite polarity and (ii) are labeled with the same semantic index.",
                "cite_spans": [
                    {
                        "start": 104,
                        "end": 129,
                        "text": "(Duchier and Thater, 1999",
                        "ref_id": "BIBREF8"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "Let us now turn to the second known source of non-determinism for bottom-up generators namely, intersective modifiers. Within a constructive approach to lexicalist generation, the number of structures (edges or phrases) built when generating a phrase with intersective modifiers is \u00ff in the case where the grammar imposes a single linear ordering of these modifiers. For instance, when generating \"The fierce little black cat\", a naive constructive approach will also build the subphrases (1) only to find that these cannot be part of the output as they do not exhaust the input semantics.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "(1) The fierce black cat, The fierce little cat, The little black cat, The black cat, The fierce cat, The little cat, The cat.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "To remedy this shortcoming, various heuristics and parsing strategies have been proposed. (Brew, 1992) combines a constraint-propagation mechanism with a shift-reduce generator, propagating constraints after every reduction step. (Carroll et al., 1999 ) advocate a two-step generation algorithm in which first, the basic structure of the sentence is generated and second, intersective modifiers are adjoined in. And (Poznanski et al., 1995 ) make use of a tree reconstruction method which incrementally improves the syntactic tree until it is accepted by the grammar. In effect, the constraint-based encoding of the axiomatic view of generation proposed here takes advantage of Brew's observation that constraint propagation can be very effective in pruning the search space involved in the generation process. In constraint programming, the solutions to a constraint satisfaction problem (CSP) are found by alternating propagation with distribution steps. Propagation is a process of deterministic inference which fills out the consequences of a given choice by removing all the variable values which can be inferred to be inconsistent with the problem constraint while distribution is a search process which enumerates possible values for the problem variables. By specifying global properties of the output and letting constraint propagation fill out the consequences of a choice, situations in which no suitable trees can be built can be detected early. Specifically, the global constraint stating that the semantics of a solution tree must be identical with the goal semantics rules out the generation of the phrases in (1b). In practice, we observe that constraint propagation is indeed very efficient at pruning the search space. As table 5 shows, the number of choice points (for these specific examples) augments very slowly with the size of the input. Semantic monotonicity. Lexical lookup only returns these categories in the grammar whose semantics subsumes some portion of the input semantics. Therefore if some grammar rule involves a daughter category whose semantics is not part of the mother semantics i.e. if the grammar is semantically non-monotonic, this rule will never be applied even though it might need to be. Here is an example. Suppose the grammar contains the following rule (where X/Y abbreviates a category with part-of-speech X and semantics Y):",
                "cite_spans": [
                    {
                        "start": 90,
                        "end": 102,
                        "text": "(Brew, 1992)",
                        "ref_id": "BIBREF2"
                    },
                    {
                        "start": 230,
                        "end": 251,
                        "text": "(Carroll et al., 1999",
                        "ref_id": "BIBREF3"
                    },
                    {
                        "start": 416,
                        "end": 439,
                        "text": "(Poznanski et al., 1995",
                        "ref_id": "BIBREF15"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "vp/call up(X,Y) v/call up(X,Y), np/Y, pp/up",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "And suppose the input semantics is !",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "\u00a1 \u00a2 \u00a1 \u00a3 A \u00ce \u00a7 \u00a9 1 m k G d \u00a4 1 \u00a5 \u00a6 3 \u00a2 \u00d1 \u00a1 \u00aa .",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "On the basis of this input, lexical lookup will return the categories V/call up(john,mary), NP/john and NP/mary (because their semantics subsumes some portion of the input semantics) but not the category PP/up. Hence the sentence \"John called Mary up\" will fail to be generated.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "In short, the semantic monotonicity constraint makes the generation of collocations and idioms problematic. Here again the extended domain of locality provided by TAG is useful as it means that the basic units are trees rather than categories. Furthermore, as argued in (Abeill\u00e9 and Schabes, 1989) , these trees can have multiple lexical anchors. As in the case of vestigial semantics discussed in Section 4 above, this means that phonological material can be generated without its semantics necessarily being part of the input.",
                "cite_spans": [
                    {
                        "start": 270,
                        "end": 297,
                        "text": "(Abeill\u00e9 and Schabes, 1989)",
                        "ref_id": "BIBREF0"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Bottom-Up Generation",
                "sec_num": "5.1"
            },
            {
                "text": "As shown in detail in (Shieber et al., 1990) , topdown generators can fail to terminate on certain grammars because they lack the lexical information necessary for their well-foundedness. A simple example involves the following grammar fragment: Given a top-down regime proceeding depth-first, left-to-right through the search space defined by the grammar rules, termination may fail to occur as the intermediate goal semantics NP (in the second rule) is uninstantiated and permits an infinite loop by iterative applications of rules r2 and r3. Such non-termination problems do not arise for the present algorithm as it is lexically driven. So for instance given the corresponding DG fragment for the above grammar and the input semantics",
                "cite_spans": [
                    {
                        "start": 22,
                        "end": 44,
                        "text": "(Shieber et al., 1990)",
                        "ref_id": "BIBREF17"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Top-Down Generation",
                "sec_num": "5.2"
            },
            {
                "text": "\u00a1 \u00a4 \u00a4 \u00a3 9 9 \u00a7 \u00b1 \u00a4 \u00b2 \u00d0 \u00aa 5 \u00a4 \u00a5 \u00a3 1 $ k \u00a5 \u00a4 1 \u00a2 l \u00a7 $ \u00a9 \u00a4 \u00b2 \u00ac \u00aa 5 \u00a4 1 l 3 \u00a5 \u00ab \u00a4 \u00a5 \u00a7 $ \u00ac \u00a4 S \u00f5 \u00f6 \u00f7 \u00b3 \u00e4 ! i",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Top-Down Generation",
                "sec_num": "5.2"
            },
            {
                "text": ", the generator will simply select the tree descriptions for \"left\", \"John\", \"s\" and \"father\" and generate the saturated model satisfying the conjunction of these descriptions.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Top-Down Generation",
                "sec_num": "5.2"
            },
            {
                "text": "The ideas presented here have been implemented using the concurrent constraint programming language Oz (Smolka, 1995) . The implementation includes a model generator for the tree logic presented in section 2, two lexical lookup modules (one for parsing, one for generation) and a small DG fragment for English which has been tested in parsing and generation mode on a small set of English sentences.",
                "cite_spans": [
                    {
                        "start": 103,
                        "end": 117,
                        "text": "(Smolka, 1995)",
                        "ref_id": "BIBREF19"
                    }
                ],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Implementation",
                "sec_num": "6"
            },
            {
                "text": "This implementation can be seen as a proof of concept for the ideas presented in this paper: it shows how a constraint-based encoding of the type of global constraints suggested by an axiomatic view of grammar can help reduce nondeterminism (few choice points cf. table 5) but performance decreases rapidly with the length of the input and it remains a matter for further research how efficiency can be improved to scale up to bigger sentences and larger grammars.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "Implementation",
                "sec_num": "6"
            },
            {
                "text": "We have shown that modulo some minor changes, the constraint-based approach to parsing presented in (Duchier and Thater, 1999) could also be used for generation. Furthermore, we have argued that the resulting generator, when combined with a TAG-like grammar and a flat semantics, had some interesting features: it exhibits the lexicalist aspects of bottom-up approaches thereby avoiding the non-termination problems connected with top-down approaches; it includes enough Example CP Time The cat likes a fox 1 1.2s The little brown cat likes a yellow fox 2 1.8s The fierce little brown cat likes a yellow fox 2 5.5s The fierce little brown cat likes a tame yellow fox 3 8.0s Figure 5 : Examples top-down guidance from the TAG trees to avoid typical bottom-up shortcomings such as the requirement for grammar semantic monotonicity and by implementing an axiomatic view of grammar, it supports a near-deterministic treatment of intersective modifiers. It would be interesting to see whether other axiomatic constraint-based treatments of grammar could be use to support both parsing and generation. In particular, we intend to investigate whether the dependency grammar presented in (Duchier, 1999) , once equipped with a semantics, could be used not only for parsing but also for generating. And similarly, whether the description based treatment of discourse parsing sketched in (Duchier and Gardent, 2001 ) could be used to generate discourse.",
                "cite_spans": [
                    {
                        "start": 100,
                        "end": 126,
                        "text": "(Duchier and Thater, 1999)",
                        "ref_id": "BIBREF8"
                    },
                    {
                        "start": 1180,
                        "end": 1195,
                        "text": "(Duchier, 1999)",
                        "ref_id": "BIBREF9"
                    },
                    {
                        "start": 1378,
                        "end": 1404,
                        "text": "(Duchier and Gardent, 2001",
                        "ref_id": "BIBREF7"
                    }
                ],
                "ref_spans": [
                    {
                        "start": 674,
                        "end": 682,
                        "text": "Figure 5",
                        "ref_id": null
                    }
                ],
                "eq_spans": [],
                "section": "Conclusion",
                "sec_num": "7"
            },
            {
                "text": "For a detailed presentation of this constraint based encoding, see the paper itself.2 For lack of space in the remainder of the paper, we omit the ROOT description in the matrices.",
                "cite_spans": [],
                "ref_spans": [],
                "eq_spans": [],
                "section": "",
                "sec_num": null
            }
        ],
        "back_matter": [],
        "bib_entries": {
            "BIBREF0": {
                "ref_id": "b0",
                "title": "Parsing idioms in lexicalised TAGs",
                "authors": [
                    {
                        "first": "A",
                        "middle": [],
                        "last": "Abeill\u00e9",
                        "suffix": ""
                    },
                    {
                        "first": "Y",
                        "middle": [],
                        "last": "Schabes",
                        "suffix": ""
                    }
                ],
                "year": 1989,
                "venue": "Proceedings of EACL '89",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "A. Abeill\u00e9 and Y. Schabes. 1989. Parsing idioms in lexicalised TAGs. In Proceedings of EACL '89, Manchester, UK.",
                "links": null
            },
            "BIBREF1": {
                "ref_id": "b1",
                "title": "A first-order axiomatization of the theory of finite trees",
                "authors": [
                    {
                        "first": "R",
                        "middle": [],
                        "last": "Backofen",
                        "suffix": ""
                    },
                    {
                        "first": "J",
                        "middle": [],
                        "last": "Rogers",
                        "suffix": ""
                    },
                    {
                        "first": "K",
                        "middle": [],
                        "last": "Vijay-Shanker",
                        "suffix": ""
                    }
                ],
                "year": 1995,
                "venue": "Journal of Logic, Language and Information",
                "volume": "4",
                "issue": "1",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "R. Backofen, J. Rogers, and K. Vijay-Shanker. 1995. A first-order axiomatization of the theory of finite trees. Journal of Logic, Language and Information, 4(1).",
                "links": null
            },
            "BIBREF2": {
                "ref_id": "b2",
                "title": "Letting the cat out of the bag: Generation for shake-and-bake MT",
                "authors": [
                    {
                        "first": "C",
                        "middle": [],
                        "last": "Brew",
                        "suffix": ""
                    }
                ],
                "year": 1992,
                "venue": "Proceedings of COLING '92",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "C. Brew. 1992. Letting the cat out of the bag: Gen- eration for shake-and-bake MT. In Proceedings of COLING '92, Nantes, France.",
                "links": null
            },
            "BIBREF3": {
                "ref_id": "b3",
                "title": "An efficient chart generator for (semi-)lexicalist grammars",
                "authors": [
                    {
                        "first": "J",
                        "middle": [],
                        "last": "Carroll",
                        "suffix": ""
                    },
                    {
                        "first": "A",
                        "middle": [],
                        "last": "Copestake",
                        "suffix": ""
                    },
                    {
                        "first": "D",
                        "middle": [],
                        "last": "Flickinger",
                        "suffix": ""
                    },
                    {
                        "first": "V",
                        "middle": [],
                        "last": "Pazna\u0144ski",
                        "suffix": ""
                    }
                ],
                "year": 1999,
                "venue": "Proceedings of EWNLG '99",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "J. Carroll, A. Copestake, D. Flickinger, and V. Pazna\u0144ski. 1999. An efficient chart generator for (semi-)lexicalist grammars. In Proceedings of EWNLG '99.",
                "links": null
            },
            "BIBREF4": {
                "ref_id": "b4",
                "title": "Minimal Recursion Semantics: An introduction",
                "authors": [
                    {
                        "first": "A",
                        "middle": [],
                        "last": "Copestake",
                        "suffix": ""
                    },
                    {
                        "first": "D",
                        "middle": [],
                        "last": "Flickinger",
                        "suffix": ""
                    },
                    {
                        "first": "I",
                        "middle": [],
                        "last": "Sag",
                        "suffix": ""
                    },
                    {
                        "first": "C",
                        "middle": [],
                        "last": "Pollard",
                        "suffix": ""
                    }
                ],
                "year": 1999,
                "venue": "",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "A. Copestake, D. Flickinger, I. Sag, and C. Pol- lard. 1999. Minimal Recursion Seman- tics: An introduction. URL: http://www- csli.stanford.edu/\u00a6 aac/papers.html, September.",
                "links": null
            },
            "BIBREF5": {
                "ref_id": "b5",
                "title": "To appear. Model theoretic syntax",
                "authors": [
                    {
                        "first": "T",
                        "middle": [],
                        "last": "Cornell",
                        "suffix": ""
                    },
                    {
                        "first": "J",
                        "middle": [],
                        "last": "Rogers",
                        "suffix": ""
                    }
                ],
                "year": null,
                "venue": "The GLOT International State of the Article Book 1. Holland Academic Graphics",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "T. Cornell and J. Rogers. To appear. Model theo- retic syntax. In L. Cheng and R. Sybesma, editors, The GLOT International State of the Article Book 1. Holland Academic Graphics, The Hague.",
                "links": null
            },
            "BIBREF6": {
                "ref_id": "b6",
                "title": "A constraint-based treatment of descriptions",
                "authors": [
                    {
                        "first": "D",
                        "middle": [],
                        "last": "Duchier",
                        "suffix": ""
                    },
                    {
                        "first": "C",
                        "middle": [],
                        "last": "Gardent",
                        "suffix": ""
                    }
                ],
                "year": 1999,
                "venue": "Proceedings of IWCS-3",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "D. Duchier and C. Gardent. 1999. A constraint-based treatment of descriptions. In H.C. Bunt and E.G.C. Thijsse, editors, Proceedings of IWCS-3, Tilburg.",
                "links": null
            },
            "BIBREF7": {
                "ref_id": "b7",
                "title": "Tree descriptions, constraints and incrementality",
                "authors": [
                    {
                        "first": "D",
                        "middle": [],
                        "last": "Duchier",
                        "suffix": ""
                    },
                    {
                        "first": "C",
                        "middle": [],
                        "last": "Gardent",
                        "suffix": ""
                    }
                ],
                "year": 2001,
                "venue": "Computing Meaning",
                "volume": "2",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "D. Duchier and C. Gardent. 2001. Tree descrip- tions, constraints and incrementality. In Comput- ing Meaning, volume 2 of Studies in Linguistics and Philosophy Series. Kluwer Academic Publishers.",
                "links": null
            },
            "BIBREF8": {
                "ref_id": "b8",
                "title": "Parsing with tree descriptions: a constraint-based approach",
                "authors": [
                    {
                        "first": "D",
                        "middle": [],
                        "last": "Duchier",
                        "suffix": ""
                    },
                    {
                        "first": "S",
                        "middle": [],
                        "last": "Thater",
                        "suffix": ""
                    }
                ],
                "year": 1999,
                "venue": "NLULP'99",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "D. Duchier and S. Thater. 1999. Parsing with tree descriptions: a constraint-based approach. In NLULP'99, Las Cruces, New Mexico.",
                "links": null
            },
            "BIBREF9": {
                "ref_id": "b9",
                "title": "Axiomatizing dependency parsing using set constraints",
                "authors": [
                    {
                        "first": "D",
                        "middle": [],
                        "last": "Duchier",
                        "suffix": ""
                    }
                ],
                "year": 1999,
                "venue": "Sixth Meeting on Mathematics of Language",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "D. Duchier. 1999. Axiomatizing dependency parsing using set constraints. In Sixth Meeting on Mathe- matics of Language, Orlando, Florida.",
                "links": null
            },
            "BIBREF10": {
                "ref_id": "b10",
                "title": "The relevance of Tree Adjoining Grammar to generation",
                "authors": [
                    {
                        "first": "A",
                        "middle": [],
                        "last": "Joshi",
                        "suffix": ""
                    }
                ],
                "year": 1987,
                "venue": "Natural Language Generation, chapter 16",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "A. Joshi. 1987. The relevance of Tree Adjoining Grammar to generation. In Natural Language Gen- eration, chapter 16. Martinus Jijhoff Publishers, Dordrecht, Holland.",
                "links": null
            },
            "BIBREF11": {
                "ref_id": "b11",
                "title": "Tree Description Grammars and Underspecified Representations",
                "authors": [
                    {
                        "first": "L",
                        "middle": [],
                        "last": "Kallmeyer",
                        "suffix": ""
                    }
                ],
                "year": 1999,
                "venue": "",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "L. Kallmeyer. 1999. Tree Description Grammars and Underspecified Representations. Ph.D. thesis, Uni- versit\u00e4t T\u00fcbingen.",
                "links": null
            },
            "BIBREF12": {
                "ref_id": "b12",
                "title": "Chart generation",
                "authors": [
                    {
                        "first": "M",
                        "middle": [],
                        "last": "Kay",
                        "suffix": ""
                    }
                ],
                "year": 1996,
                "venue": "Proceedings of ACL'96",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "M. Kay. 1996. Chart generation. In Proceedings of ACL'96, Santa Cruz, USA.",
                "links": null
            },
            "BIBREF13": {
                "ref_id": "b13",
                "title": "Adverbs, Vowels, and other objects of Wonder",
                "authors": [
                    {
                        "first": "J",
                        "middle": [
                            "D"
                        ],
                        "last": "Mccawley",
                        "suffix": ""
                    }
                ],
                "year": 1979,
                "venue": "",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "J. D. McCawley. 1979. Adverbs, Vowels, and other objects of Wonder. University of Chicago Press, Chicago, Illinois.",
                "links": null
            },
            "BIBREF14": {
                "ref_id": "b14",
                "title": "Interaction grammars",
                "authors": [
                    {
                        "first": "G",
                        "middle": [],
                        "last": "Perrier",
                        "suffix": ""
                    }
                ],
                "year": 2000,
                "venue": "Proceedings of 18th International Conference on Computational Linguistics",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "G. Perrier. 2000. Interaction grammars. In In Pro- ceedings of 18th International Conference on Com- putational Linguistics (COLING 2000).",
                "links": null
            },
            "BIBREF15": {
                "ref_id": "b15",
                "title": "An efficient generation algorithm for lexicalist MT",
                "authors": [
                    {
                        "first": "V",
                        "middle": [],
                        "last": "Poznanski",
                        "suffix": ""
                    },
                    {
                        "first": "J",
                        "middle": [
                            "L"
                        ],
                        "last": "Beaven",
                        "suffix": ""
                    },
                    {
                        "first": "P",
                        "middle": [],
                        "last": "Whitelock",
                        "suffix": ""
                    }
                ],
                "year": 1995,
                "venue": "Proceedings of ACL '95",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "V. Poznanski, J. L. Beaven, and P. Whitelock. 1995. An efficient generation algorithm for lexicalist MT. In Proceedings of ACL '95.",
                "links": null
            },
            "BIBREF16": {
                "ref_id": "b16",
                "title": "D-tree Grammars",
                "authors": [
                    {
                        "first": "O",
                        "middle": [],
                        "last": "Rambow",
                        "suffix": ""
                    },
                    {
                        "first": "K",
                        "middle": [],
                        "last": "Vijay-Shanker",
                        "suffix": ""
                    },
                    {
                        "first": "D",
                        "middle": [],
                        "last": "Weir",
                        "suffix": ""
                    }
                ],
                "year": 1995,
                "venue": "Proceedings of ACL '95",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "O. Rambow, K. Vijay-Shanker, and D. Weir. 1995. D-tree Grammars. In Proceedings of ACL '95.",
                "links": null
            },
            "BIBREF17": {
                "ref_id": "b17",
                "title": "Semantic-head-driven generation",
                "authors": [
                    {
                        "first": "S",
                        "middle": [],
                        "last": "Shieber",
                        "suffix": ""
                    },
                    {
                        "first": "F",
                        "middle": [],
                        "last": "Pereira",
                        "suffix": ""
                    },
                    {
                        "first": "G",
                        "middle": [],
                        "last": "Van Noord",
                        "suffix": ""
                    },
                    {
                        "first": "R",
                        "middle": [],
                        "last": "Moore",
                        "suffix": ""
                    }
                ],
                "year": 1990,
                "venue": "Computational Linguistics",
                "volume": "",
                "issue": "1",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "S. Shieber, F. Pereira, G. van Noord, and R. Moore. 1990. Semantic-head-driven generation. Computa- tional Linguistics, 16(1).",
                "links": null
            },
            "BIBREF18": {
                "ref_id": "b18",
                "title": "A Uniform Architecture for Parsing and Generation",
                "authors": [
                    {
                        "first": "S",
                        "middle": [],
                        "last": "Shieber",
                        "suffix": ""
                    }
                ],
                "year": 1988,
                "venue": "Proceedings of ACL '88",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "S. Shieber. 1988. A Uniform Architecture for Parsing and Generation. In Proceedings of ACL '88.",
                "links": null
            },
            "BIBREF19": {
                "ref_id": "b19",
                "title": "The Oz Programming Model",
                "authors": [
                    {
                        "first": "G",
                        "middle": [],
                        "last": "Smolka",
                        "suffix": ""
                    }
                ],
                "year": 1995,
                "venue": "Computer Science Today",
                "volume": "1000",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "G. Smolka. 1995. The Oz Programming Model. In Computer Science Today, volume 1000 of LNCS.",
                "links": null
            },
            "BIBREF20": {
                "ref_id": "b20",
                "title": "Sentence planning as description using Tree-Adjoining Grammar",
                "authors": [
                    {
                        "first": "M",
                        "middle": [],
                        "last": "Stone",
                        "suffix": ""
                    },
                    {
                        "first": "C",
                        "middle": [],
                        "last": "Doran",
                        "suffix": ""
                    }
                ],
                "year": 1997,
                "venue": "Proceedings of ACL '97",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "M. Stone and C. Doran. 1997. Sentence planning as description using Tree-Adjoining Grammar. In Proceedings of ACL '97.",
                "links": null
            },
            "BIBREF21": {
                "ref_id": "b21",
                "title": "Using descriptions of trees in Tree Adjoining Grammars",
                "authors": [
                    {
                        "first": "K",
                        "middle": [],
                        "last": "Vijay-Shanker",
                        "suffix": ""
                    }
                ],
                "year": 1992,
                "venue": "Computational Linguistics",
                "volume": "18",
                "issue": "4",
                "pages": "481--518",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "K. Vijay-Shanker. 1992. Using descriptions of trees in Tree Adjoining Grammars. Computational Lin- guistics, 18(4):481-518.",
                "links": null
            },
            "BIBREF22": {
                "ref_id": "b22",
                "title": "Shake-and-bake translation",
                "authors": [
                    {
                        "first": "P",
                        "middle": [],
                        "last": "Whitelock",
                        "suffix": ""
                    }
                ],
                "year": 1992,
                "venue": "Proceedings of COLING '92",
                "volume": "",
                "issue": "",
                "pages": "",
                "other_ids": {},
                "num": null,
                "urls": [],
                "raw_text": "P. Whitelock. 1992. Shake-and-bake translation. In Proceedings of COLING '92, Nantes, France.",
                "links": null
            }
        },
        "ref_entries": {
            "FIGREF0": {
                "type_str": "figure",
                "text": "(NP0,NP)/mod(N,NP0) s r6. n/father father r7. vp(NP)/left(NP) left",
                "uris": null,
                "num": null
            }
        }
    }
}