Datasets:
Tasks:
Text Classification
Formats:
json
Sub-tasks:
natural-language-inference
Languages:
English
Size:
1K - 10K
ArXiv:
License:
| \documentclass[varwidth=598pt]{standalone} | |
| \usepackage{booktabs} % professional-quality tables | |
| \usepackage{multirow} % multi-row cells in tables | |
| \usepackage{colortbl} % color for tables (\cellcolor, \rowcolor) | |
| \usepackage[table]{xcolor} % enhanced colors for tables | |
| \usepackage{array} % more flexible column formats (often used) | |
| \usepackage{tabularx} % for tables with auto-stretch columns (if used) | |
| \usepackage{graphicx} % for images in tables or figure/table floats | |
| \usepackage{amssymb} % math symbols | |
| \usepackage{amsmath} % math environments | |
| \usepackage{soul} % highlighting (used for colored text/cells) | |
| \usepackage[normalem]{ulem} % underlining, strikethroughs | |
| \usepackage[T1]{fontenc} % font encoding | |
| \usepackage[utf8]{inputenc}% input encoding (legacy, fine for pdflatex) | |
| \usepackage{microtype} % better text appearance | |
| \usepackage{hyperref} % hyperlinks | |
| \usepackage{textcomp} % for extra symbols | |
| \usepackage{enumitem} % for compact lists (if used in table notes) | |
| \usepackage{adjustbox} | |
| \usepackage{tabu} | |
| \usepackage{pifont} % http://ctan.org/pkg/pifont | |
| \usepackage{bbding} % \XSolidBrush | |
| \usepackage{makecell} | |
| \begingroup | |
| \makeatletter | |
| \renewcommand{\fnum@table}{}% | |
| \long\def\@makecaption#1#2{% | |
| \vskip\abovecaptionskip | |
| \centering #2\par | |
| \vskip\belowcaptionskip | |
| } | |
| \makeatother | |
| \begin{document} | |
| \begin{table} | |
| \centering | |
| \begin{adjustbox}{max width=\textwidth} | |
| \begin{tabular}{lccccccccc} | |
| \toprule | |
| Methods & SST-2 & MNLI & QNLI & QQP & CoLA & STS-B & MRPC & RTE & Average\\ | |
| \midrule | |
| \multicolumn{10}{c}{\textit{Development}} \\ | |
| BERT$_{\rm{base}}$ & 92.3 & 84.6 & 91.5 & 91.3 & 60.3 & 89.9 & 85.1 & 70.8 & 83.23 \\ | |
| \ \ +\textit{SCAL}$^\dagger$ miao2021simple & 92.8 & 84.1 & 90.9 & 91.4 & 61.7 & - & - & 69.7 & - \\ | |
| \ \ +\textit{SuperT}$^\dagger$ liang2021super & 93.4 & 84.5 & 91.3 & 91.3 & 58.8 & 89.8 & 87.5 & 72.5 & 83.64 \\ | |
| \ \ +\textit{R-Drop}$^\dagger$ wu2021r & 93.0 & 85.5 & 92.0 & 91.4 & 62.6 & 89.6 & 87.3 & 71.1 & 84.06 \\ | |
| \ \ +\textsc{AD-Drop} & 93.9 & 85.1 & 92.3 & 91.8 & 64.6 & 90.4 & 88.5 & 75.1 & \textbf{85.21} \\ | |
| \midrule | |
| RoBERTa$_{\rm{base}}$ & 95.3 & 87.6 & 92.9 & 91.9 & 64.8 & 90.9 & 90.7 & 79.4 & 86.69 \\ | |
| \ \ +\textit{R-Drop} wu2021r & 95.2 & 87.8 & 93.2 & 91.7 & 64.7 & 91.2 & 90.5 & 80.5 & 86.85 \\ | |
| \ \ +\textit{HiddenCut}$^\dagger$ chen2021hiddencut & 95.8 & 88.2 & 93.7 & 92.0 & 66.2 & 91.3 & 92.0 & 83.4 & 87.83 \\ | |
| \ \ +\textsc{AD-Drop} & 95.8 & 88.0 & 93.5 & 92.0 & 66.8 & 91.4 & 92.2 & 84.1 & \textbf{87.98}\\ | |
| \midrule | |
| \midrule | |
| \multicolumn{10}{c}{\textit{Test}} \\ | |
| BERT$_{\rm{base}}$ & 93.6 & 84.7 & 90.4 & 89.3 & 52.8 & 85.6 & 81.4 & 68.4 & 80.78 \\ | |
| \ \ +\textsc{AD-Drop} & 94.3 & 85.2 & 91.6 & 89.4 & 53.3 & 86.6 & 84.1 & 68.7 & \textbf{81.65} \\ | |
| \midrule | |
| RoBERTa$_{\rm{base}}$ & 94.8 & 87.5 & 92.8 & 89.6 & 58.3 & 88.7 & 86.3 & 75.1 & 84.14 \\ | |
| \ \ +\textsc{AD-Drop} & 95.9 & 87.6 & 93.4 & 89.5 & 58.5 & 89.3 & 87.9 & 76.0 & \textbf{84.76} \\ | |
| \bottomrule | |
| \end{tabular}} | |
| \end{adjustbox} | |
| \caption{Table 1: Overall results of fine-tuned models on the GLUE benchmark. The symbol $\dagger$ denotes results directly taken from the original papers. The best average results are shown in bold.} | |
| \end{table} | |
| \end{document} |