Datasets:
Tasks:
Text Classification
Formats:
json
Sub-tasks:
natural-language-inference
Languages:
English
Size:
1K - 10K
ArXiv:
License:
| \documentclass[varwidth=598pt]{standalone} | |
| \usepackage{booktabs} % professional-quality tables | |
| \usepackage{multirow} % multi-row cells in tables | |
| \usepackage{colortbl} % color for tables (\cellcolor, \rowcolor) | |
| \usepackage[table]{xcolor} % enhanced colors for tables | |
| \usepackage{array} % more flexible column formats (often used) | |
| \usepackage{tabularx} % for tables with auto-stretch columns (if used) | |
| \usepackage{graphicx} % for images in tables or figure/table floats | |
| \usepackage{amssymb} % math symbols | |
| \usepackage{amsmath} % math environments | |
| \usepackage{soul} % highlighting (used for colored text/cells) | |
| \usepackage[normalem]{ulem} % underlining, strikethroughs | |
| \usepackage[T1]{fontenc} % font encoding | |
| \usepackage[utf8]{inputenc}% input encoding (legacy, fine for pdflatex) | |
| \usepackage{microtype} % better text appearance | |
| \usepackage{hyperref} % hyperlinks | |
| \usepackage{textcomp} % for extra symbols | |
| \usepackage{enumitem} % for compact lists (if used in table notes) | |
| \usepackage{adjustbox} | |
| \usepackage{tabu} | |
| \usepackage{pifont} % http://ctan.org/pkg/pifont | |
| \usepackage{bbding} % \XSolidBrush | |
| \usepackage{makecell} | |
| \begingroup | |
| \makeatletter | |
| \renewcommand{\fnum@table}{}% | |
| \long\def\@makecaption#1#2{% | |
| \vskip\abovecaptionskip | |
| \centering #2\par | |
| \vskip\belowcaptionskip | |
| } | |
| \makeatother | |
| \begin{document} | |
| \begin{table} | |
| \centering | |
| \begin{adjustbox}{max width=\textwidth} | |
| \begin{tabular}{c|cc|cc|cc|cc} | |
| \toprule | |
| \multirow{2}{*}{Methods} & \multicolumn{2}{c|}{CoLA} & \multicolumn{2}{c|}{STS-B$^\ddagger$} & \multicolumn{2}{c|}{MRPC} & \multicolumn{2}{c}{RTE} \\ | |
| & Mcc & Time & Pcc & Time & Acc & Time & Acc & Time \\ | |
| \midrule | |
| RD & +1.8 & $\times$1.42 & +0.3 & $\times$1.38 & +2.7 & $\times$1.31 & +3.9 & $\times$1.42 \\ | |
| AA & +3.3 & $\times$1.42 & +0.1 & $\times$1.48 & +2.9 & $\times$1.94 & +3.9 & $\times$1.58 \\ | |
| GA & +4.3 & $\times$3.58 & +0.5 & $\times$1.95 & +3.4 & $\times$4.13 & +4.3 & $\times$4.50 \\ | |
| IGA & +3.5 & $\times$99.61 & +0.8 & $\times$15.00 & +3.4 & $\times$110.12 & +3.6 & $\times$125.67 \\ | |
| \bottomrule | |
| \end{tabular}} | |
| \end{adjustbox} | |
| \caption{Table 7: Results of performance and computational cost of \textsc{AD-Drop} with different masking strategies (GA, IGA, AA, and RD) relative to the original fine-tuning. The symbol $\ddagger$ means \textsc{AD-Drop} is only applied in the first layer. BERT is chosen as the base model.} | |
| \end{table} | |
| \end{document} |