wruisi commited on
Commit
a038abb
·
verified ·
1 Parent(s): fb265c6

Update src/about.py

Browse files
Files changed (1) hide show
  1. src/about.py +22 -3
src/about.py CHANGED
@@ -25,6 +25,24 @@ TITLE = """<h1 align="center" id="space-title">VBVR-Bench Leaderboard</h1>"""
25
 
26
  # What does your leaderboard evaluate?
27
  INTRODUCTION_TEXT = """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  **VBVR-Bench** is a comprehensive benchmark for evaluating **video reasoning capabilities**.
29
 
30
  To systematically assess model reasoning capabilities, VBVR-Bench employs a **dual-split evaluation strategy** across **100 diverse tasks**:
@@ -115,8 +133,9 @@ We will review your submission and add it to the leaderboard within 1-2 weeks.
115
  CITATION_BUTTON_LABEL = "Copy the following snippet to cite these results"
116
  CITATION_BUTTON_TEXT = r"""
117
  @article{vbvr2026,
118
- title={A Very Big Video Reasoning Suite},
119
- author={Wang, Maijunxian and Wang, Ruisi and Lin, Juyi and Ji, Ran and Wiedemer, Thaddäus and Gao, Qingying and Luo, Dezhi and Qian, Yaoyao and Huang, Lianyu and Hong, Zelong and Ge, Jiahui and Ma, Qianli and He, Hang and Zhou, Yifan and Guo, Lingzi and Mei, Lantao and Li, Jiachen and Xing, Hanwen and Zhao, Tianqi and Yu, Fengyuan and Xiao, Weihang and Jiao, Yizheng and Hou, Jianheng and Zhang, Danyang and Xu, Pengcheng and Zhong, Boyang and Zhao, Zehong and Fang, Gaoyun and Kitaoka, John and Xu, Yile and Xu, Hua and Blacutt, Kenton and Nguyen, Tin and Song, Siyuan and Sun, Haoran and Wen, Shaoyue and He, Linyang and Wang, Runming and Wang, Yanzhi and Yang, Mengyue and Ma, Ziqiao and Millière, Raphaël and Shi, Freda and Vasconcelos, Nuno and Khashabi, Daniel and Yuille, Alan and Du, Yilun and Liu, Ziming and Lin, Dahua and Liu, Ziwei and Kumar, Vikash and Li, Yijiang and Yang, Lei and Cai, Zhongang and Deng, Hokin},
120
- year={2026}
 
121
  }
122
  """
 
25
 
26
  # What does your leaderboard evaluate?
27
  INTRODUCTION_TEXT = """
28
+ <a href="https://video-reason.com" target="_blank">
29
+ <img alt="Code" src="https://img.shields.io/badge/Project%20-%20Homepage-4285F4" height="20" />
30
+ </a>
31
+ <a href="https://github.com/orgs/Video-Reason/repositories" target="_blank">
32
+ <img alt="Code" src="https://img.shields.io/badge/VBVR-Code-100000?style=flat-square&logo=github&logoColor=white" height="20" />
33
+ </a>
34
+ <a href="https://arxiv.org/abs/2602.20159" target="_blank">
35
+ <img alt="arXiv" src="https://img.shields.io/badge/arXiv-VBVR-red?logo=arxiv" height="20" />
36
+ </a>
37
+ <a href="https://huggingface.co/Video-Reason/VBVR-Dataset" target="_blank">
38
+ <img alt="Leaderboard" src="https://img.shields.io/badge/%F0%9F%A4%97%20_VBVR_Dataset-Data-ffc107?color=ffc107&logoColor=white" height="20" />
39
+ </a>
40
+ <a href="https://huggingface.co/Video-Reason/VBVR-Bench-Data" target="_blank">
41
+ <img alt="Leaderboard" src="https://img.shields.io/badge/%F0%9F%A4%97%20_VBVR_Bench-Data-ffc107?color=ffc107&logoColor=white" height="20" />
42
+ </a>
43
+ <a href="https://huggingface.co/Video-Reason/VBVR-Bench-Leaderboard" target="_blank">
44
+ <img alt="Leaderboard" src="https://img.shields.io/badge/%F0%9F%A4%97%20_VBVR_Bench-Leaderboard-ffc107?color=ffc107&logoColor=white" height="20" />
45
+ </a>
46
  **VBVR-Bench** is a comprehensive benchmark for evaluating **video reasoning capabilities**.
47
 
48
  To systematically assess model reasoning capabilities, VBVR-Bench employs a **dual-split evaluation strategy** across **100 diverse tasks**:
 
133
  CITATION_BUTTON_LABEL = "Copy the following snippet to cite these results"
134
  CITATION_BUTTON_TEXT = r"""
135
  @article{vbvr2026,
136
+ title={A Very Big Video Reasoning Suite},
137
+ author={Maijunxian Wang and Ruisi Wang and Juyi Lin and Ran Ji and Thaddäus Wiedemer and Qingying Gao and Dezhi Luo and Yaoyao Qian and Lianyu Huang and Zelong Hong and Jiahui Ge and Qianli Ma and Hang He and Yifan Zhou and Lingzi Guo and Lantao Mei and Jiachen Li and Hanwen Xing and Tianqi Zhao and Fengyuan Yu and Weihang Xiao and Yizheng Jiao and Jianheng Hou and Danyang Zhang and Pengcheng Xu and Boyang Zhong and Zehong Zhao and Gaoyun Fang and John Kitaoka and Yile Xu and Hua Xu and Kenton Blacutt and Tin Nguyen and Siyuan Song and Haoran Sun and Shaoyue Wen and Linyang He and Runming Wang and Yanzhi Wang and Mengyue Yang and Ziqiao Ma and Raphaël Millière and Freda Shi and Nuno Vasconcelos and Daniel Khashabi and Alan Yuille and Yilun Du and Ziming Liu and Bo Li and Dahua Lin and Ziwei Liu and Vikash Kumar and Yijiang Li and Lei Yang and Zhongang Cai and Hokin Deng},
138
+ journal = {arXiv preprint arXiv:2602.20159},
139
+ year = {2026}
140
  }
141
  """