Add paper link, GitHub repository, and model metadata

#1
by nielsr HF Staff - opened
Files changed (1) hide show
  1. README.md +25 -3
README.md CHANGED
@@ -1,5 +1,17 @@
1
  ---
2
  license: apache-2.0
 
 
 
 
 
 
 
 
 
 
 
 
3
  ---
4
 
5
  <!doctype html>
@@ -198,7 +210,7 @@ license: apache-2.0
198
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJGbb5cc10122964720bc8da76c5' style='background-color:#72c69d;text-align: right;vertical-align: bottom;' class="extra-columns">73.7
199
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG0d0b379bcb6f462185bacedaa' style='background-color:#b2e0ca;text-align: right;vertical-align: bottom;' class="extra-columns">14.3
200
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG750f96b656904402b0685a9dc' style='background-color:#a5dbc1;text-align: right;vertical-align: bottom;' class="extra-columns">20.9
201
- <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG090381cf461f4ec1be545a3b6' style='background-color:#8ed1b0;text-align: right;vertical-align: bottom;' class="extra-columns">18.7
202
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJGdbb0b3f228a24416873a958a9' style='background-color:#bde4d1;text-align: right;vertical-align: bottom;' class="extra-columns">5.9
203
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG6886327be27f435c846f58863' style='background-color:#e3f4ec;text-align: right;vertical-align: bottom;' class="extra-columns">0.1
204
  <br/></td></tr><tr id='temp:C:GJG49cecd182f02483f9e1a67399'><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG35c2ff53e6ca4ff6a432d6e10' style=''><a href="https://huggingface.co/MichiganNLP/TAMA-vA">TAMA-vA</a>
@@ -212,7 +224,7 @@ license: apache-2.0
212
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJGbb5cc10122964720bc8da76c5' style='background-color:#afdfc7;text-align: right;vertical-align: bottom;' class="extra-columns">41.8
213
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG0d0b379bcb6f462185bacedaa' style='background-color:#bbe4d0;text-align: right;vertical-align: bottom;' class="extra-columns">12.7
214
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG750f96b656904402b0685a9dc' style='background-color:#a4dbc0;text-align: right;vertical-align: bottom;' class="extra-columns">21.1
215
- <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG090381cf461f4ec1be545a3b6' style='background-color:#97d5b7;text-align: right;vertical-align: bottom;' class="extra-columns">17.1
216
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJGdbb0b3f228a24416873a958a9' style='background-color:#b8e3ce;text-align: right;vertical-align: bottom;' class="extra-columns">6.3
217
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG6886327be27f435c846f58863' style='background-color:#e3f4ec;text-align: right;vertical-align: bottom;' class="extra-columns">0.1
218
  <br/></td></tr><tr id='temp:C:GJGb7a26b478b8a4d66bce379049'><td id='temp:s:temp:C:GJGb7a26b478b8a4d66bce379049;temp:C:GJG35c2ff53e6ca4ff6a432d6e10' style=''><a href="https://huggingface.co/Qwen/Qwen2.5-7B-Instruct">Qwen2.5-7B</a>
@@ -282,7 +294,7 @@ license: apache-2.0
282
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJGbb5cc10122964720bc8da76c5' style='background-color:#5fbf90;text-align: right;vertical-align: bottom;' class="extra-columns">83.2
283
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG0d0b379bcb6f462185bacedaa' style='background-color:#63c093;text-align: right;vertical-align: bottom;' class="extra-columns">28.9
284
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG750f96b656904402b0685a9dc' style='background-color:#57bb8a;text-align: right;vertical-align: bottom;' class="extra-columns">38.9
285
- <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG090381cf461f4ec1be545a3b6' style='background-color:#57bb8a;text-align: right;vertical-align: bottom;' class="extra-columns">27.6
286
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJGdbb0b3f228a24416873a958a9' style='background-color:#5ebe8f;text-align: right;vertical-align: bottom;' class="extra-columns">14.2
287
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG6886327be27f435c846f58863' style='background-color:#57bb8a;text-align: right;vertical-align: bottom;' class="extra-columns">0.6
288
  <br/></td></tr><tr id='temp:C:GJG3cb5f57e8e3748a9b13fe5018'><td id='temp:s:temp:C:GJG3cb5f57e8e3748a9b13fe5018;temp:C:GJG35c2ff53e6ca4ff6a432d6e10' style=''><a href="https://huggingface.co/MichiganNLP/TAMA-QWen3">TAMA-QWen3</a>
@@ -321,3 +333,13 @@ license: apache-2.0
321
  </body>
322
  </html>
323
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  license: apache-2.0
3
+ library_name: transformers
4
+ pipeline_tag: table-question-answering
5
+ base_model: Qwen/Qwen3-8B
6
+ ---
7
+
8
+ # TAMA-QWen3
9
+
10
+ This repository contains **TAMA-QWen3**, an instruction-tuned model based on [Qwen3-8B](https://huggingface.co/Qwen/Qwen3-8B) optimized for table understanding and reasoning. It was introduced and evaluated as part of the work: **[MMTU: A Massive Multi-Task Table Understanding and Reasoning Benchmark](https://huggingface.co/papers/2506.05587)**.
11
+
12
+ - **GitHub Repository:** [MMTU-Benchmark/MMTU](https://github.com/mmtu-benchmark/mmtu)
13
+ - **Paper:** [arXiv:2506.05587](https://arxiv.org/abs/2506.05587)
14
+
15
  ---
16
 
17
  <!doctype html>
 
210
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJGbb5cc10122964720bc8da76c5' style='background-color:#72c69d;text-align: right;vertical-align: bottom;' class="extra-columns">73.7
211
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG0d0b379bcb6f462185bacedaa' style='background-color:#b2e0ca;text-align: right;vertical-align: bottom;' class="extra-columns">14.3
212
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG750f96b656904402b0685a9dc' style='background-color:#a5dbc1;text-align: right;vertical-align: bottom;' class="extra-columns">20.9
213
+ <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG090381cf461f4be6be1be545a3b6' style='background-color:#8ed1b0;text-align: right;vertical-align: bottom;' class="extra-columns">18.7
214
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJGdbb0b3f228a24416873a958a9' style='background-color:#bde4d1;text-align: right;vertical-align: bottom;' class="extra-columns">5.9
215
  <br/></td><td id='temp:s:temp:C:GJGa47d78fca9d44923919996e67;temp:C:GJG6886327be27f435c846f58863' style='background-color:#e3f4ec;text-align: right;vertical-align: bottom;' class="extra-columns">0.1
216
  <br/></td></tr><tr id='temp:C:GJG49cecd182f02483f9e1a67399'><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG35c2ff53e6ca4ff6a432d6e10' style=''><a href="https://huggingface.co/MichiganNLP/TAMA-vA">TAMA-vA</a>
 
224
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJGbb5cc10122964720bc8da76c5' style='background-color:#afdfc7;text-align: right;vertical-align: bottom;' class="extra-columns">41.8
225
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG0d0b379bcb6f462185bacedaa' style='background-color:#bbe4d0;text-align: right;vertical-align: bottom;' class="extra-columns">12.7
226
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG750f96b656904402b0685a9dc' style='background-color:#a4dbc0;text-align: right;vertical-align: bottom;' class="extra-columns">21.1
227
+ <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG090381cf461f4be6be1be545a3b6' style='background-color:#97d5b7;text-align: right;vertical-align: bottom;' class="extra-columns">17.1
228
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJGdbb0b3f228a24416873a958a9' style='background-color:#b8e3ce;text-align: right;vertical-align: bottom;' class="extra-columns">6.3
229
  <br/></td><td id='temp:s:temp:C:GJG49cecd182f02483f9e1a67399;temp:C:GJG6886327be27f435c846f58863' style='background-color:#e3f4ec;text-align: right;vertical-align: bottom;' class="extra-columns">0.1
230
  <br/></td></tr><tr id='temp:C:GJGb7a26b478b8a4d66bce379049'><td id='temp:s:temp:C:GJGb7a26b478b8a4d66bce379049;temp:C:GJG35c2ff53e6ca4ff6a432d6e10' style=''><a href="https://huggingface.co/Qwen/Qwen2.5-7B-Instruct">Qwen2.5-7B</a>
 
294
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJGbb5cc10122964720bc8da76c5' style='background-color:#5fbf90;text-align: right;vertical-align: bottom;' class="extra-columns">83.2
295
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG0d0b379bcb6f462185bacedaa' style='background-color:#63c093;text-align: right;vertical-align: bottom;' class="extra-columns">28.9
296
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG750f96b656904402b0685a9dc' style='background-color:#57bb8a;text-align: right;vertical-align: bottom;' class="extra-columns">38.9
297
+ <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG090381cf461f4be6be1be545a3b6' style='background-color:#57bb8a;text-align: right;vertical-align: bottom;' class="extra-columns">27.6
298
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJGdbb0b3f228a24416873a958a9' style='background-color:#5ebe8f;text-align: right;vertical-align: bottom;' class="extra-columns">14.2
299
  <br/></td><td id='temp:s:temp:C:GJG0085c5b822744c81b434cc6cd;temp:C:GJG6886327be27f435c846f58863' style='background-color:#57bb8a;text-align: right;vertical-align: bottom;' class="extra-columns">0.6
300
  <br/></td></tr><tr id='temp:C:GJG3cb5f57e8e3748a9b13fe5018'><td id='temp:s:temp:C:GJG3cb5f57e8e3748a9b13fe5018;temp:C:GJG35c2ff53e6ca4ff6a432d6e10' style=''><a href="https://huggingface.co/MichiganNLP/TAMA-QWen3">TAMA-QWen3</a>
 
333
  </body>
334
  </html>
335
 
336
+ ## Citation
337
+
338
+ ```bibtex
339
+ @article{mmtu,
340
+ title={{MMTU}: A Massive Multi-Task Table Understanding and Reasoning Benchmark},
341
+ author={Junjie Xing and Yeye He and Mengyu Zhou and Haoyu Dong and Shi Han and Lingjiao Chen and Dongmei Zhang and Surajit Chaudhuri and H. V. Jagadish},
342
+ journal={arXiv preprint arXiv:2506.05587},
343
+ year={2025}
344
+ }
345
+ ```