| | __all__ = ['block', 'make_clickable_model', 'make_clickable_user', 'get_submissions'] |
| | import os |
| | import io |
| | import gradio as gr |
| | import pandas as pd |
| | import datetime |
| | import zipfile |
| | import numpy as np |
| |
|
| | from constants import * |
| | from draw_sub_dimension import * |
| | from huggingface_hub import Repository |
| |
|
| | HF_TOKEN = os.environ.get("HF_TOKEN") |
| |
|
| | global data_component, filter_component |
| |
|
| |
|
| | def add_new_eval( |
| | input_file, |
| | model_name_textbox: str, |
| | revision_name_textbox: str, |
| | access_type: str, |
| | model_link: str, |
| | team_name: str, |
| | contact_email: str, |
| | model_publish: str, |
| | model_resolution: str, |
| | model_frame: str, |
| | model_fps: str, |
| | model_video_length: str, |
| | model_checkpoint: str, |
| | model_commit_id: str, |
| | model_video_format: str |
| | ): |
| | if input_file is None: |
| | return "Error! Empty file!" |
| | |
| | if model_link == '' or model_name_textbox == '' or contact_email == '': |
| | return gr.update(visible=True), gr.update(visible=False), gr.update(visible=True) |
| |
|
| | submission_repo = Repository(local_dir=SUBMISSION_NAME, clone_from=SUBMISSION_URL, use_auth_token=HF_TOKEN, repo_type="dataset") |
| | submission_repo.git_pull() |
| | |
| | |
| | now = datetime.datetime.now() |
| | upload_date = now.strftime("%Y-%m-%d") |
| | upload_time = now.strftime("%Y-%m-%d_%H-%M-%S") |
| | filename = f"{model_name_textbox}_{upload_time}" |
| | |
| | with open(f'{SUBMISSION_NAME}/{filename}.zip','wb') as f: |
| | f.write(input_file) |
| | |
| | |
| | csv_data = pd.read_csv(CSV_PATH) |
| |
|
| | if revision_name_textbox == '': |
| | col = csv_data.shape[0] |
| | model_name = model_name_textbox.replace(',',' ') |
| | else: |
| | model_name = revision_name_textbox.replace(',',' ') |
| | model_name_list = csv_data['Model Name (clickable)'] |
| | name_list = [name.split(']')[0][1:] for name in model_name_list] |
| | if revision_name_textbox not in name_list: |
| | col = csv_data.shape[0] |
| | else: |
| | col = name_list.index(revision_name_textbox) |
| |
|
| | model_name = '[' + model_name + '](' + model_link + ')' |
| |
|
| | folder = f'{SUBMISSION_NAME}/{filename}' |
| | os.makedirs(folder, exist_ok=True) |
| | with zipfile.ZipFile(io.BytesIO(input_file), 'r') as zip_ref: |
| | zip_ref.extractall(folder) |
| |
|
| | required_files = [ |
| | "_consistent_attr_score.csv", |
| | "_dynamic_attr_score.csv", |
| | "_spatial_score.csv", |
| | "_motion_score.csv", |
| | "_motion_back_fore.csv", |
| | "_action_binding_score.csv", |
| | "_object_interactions_score.csv", |
| | "_numeracy_video.csv", |
| | ] |
| | |
| | score_1 = score_2 = score_3 = score_4 = score_5 = score_6 = score_7 = "N/A" |
| | color_score = shape_score = texture_score = coexist = acc = acc_score = "N/A" |
| | motion_level = motion_acc = common_score = uncommon_score = physical_score = social_score = "N/A" |
| | |
| | for i,suffix in enumerate(required_files): |
| | for sub_folder in os.listdir(folder): |
| | if sub_folder.startswith('.') or sub_folder.startswith('__'): |
| | print(f"Skip the file: {sub_folder}") |
| | continue |
| | |
| | cur_sub_folder = os.path.join(folder, sub_folder) |
| | if os.path.isdir(cur_sub_folder): |
| | for file in os.listdir(cur_sub_folder): |
| | if file.endswith(suffix): |
| | print("FILE exist",file) |
| | filepath = os.path.join(cur_sub_folder,file) |
| | if i==0: |
| | score_1 = read_score(filepath) |
| | color_score, shape_score, texture_score = sub_consist_attr(filepath) |
| | elif i==1: |
| | score_2 = read_score(filepath) |
| | elif i==2: |
| | score_3 = read_score(filepath) |
| | coexist, acc, acc_score = sub_spatial(filepath) |
| | elif i==3: |
| | score_4 = read_score(filepath) |
| | elif i==4: |
| | motion_level, motion_acc = sub_motion(filepath) |
| | elif i==5: |
| | score_5 = read_score(filepath) |
| | common_score,uncommon_score = sub_action(filepath) |
| | elif i==6: |
| | score_6 = read_score(filepath) |
| | physical_score, social_score = sub_interaction(filepath) |
| | elif i==7: |
| | score_7 = read_score(filepath) |
| | |
| | |
| | if team_name =='' or 'compbench' in team_name.lower(): |
| | evaluate_team = ("User Upload") |
| | else: |
| | evaluate_team = team_name |
| | |
| | new_data = [model_name,evaluate_team,upload_date,score_1,score_2,score_3,score_4,score_5,score_6,score_7,color_score, shape_score, texture_score,coexist, acc, acc_score,motion_level, motion_acc,common_score,uncommon_score,physical_score, social_score] |
| | print(new_data) |
| |
|
| | csv_data.loc[col] = new_data |
| | csv_data = csv_data.to_csv(CSV_PATH, index=False) |
| | |
| | new_info = [model_name,upload_time,team_name,model_publish,model_resolution,model_frame,model_fps,model_video_length,model_checkpoint,model_commit_id,model_video_format,access_type,contact_email,model_link] |
| | with open(INFO_PATH, mode='a', newline='') as csvfile: |
| | writer = csv.writer(csvfile) |
| | writer.writerow(new_info) |
| |
|
| | submission_repo.push_to_hub() |
| |
|
| | print("success update", model_name) |
| | return gr.update(visible=False), gr.update(visible=True), gr.update(visible=False) |
| |
|
| |
|
| | def calculate_selected_score(df, selected_columns): |
| | selected_task = [i for i in selected_columns if i in TASK_INFO] |
| | |
| | selected_task_score = df[selected_task].mean(axis=1, skipna=True) |
| | if selected_task_score.isna().any().any(): |
| | return selected_task_score.fillna(0.0) |
| | return selected_task_score.fillna(0.0) |
| |
|
| | def get_final_score(df, selected_columns): |
| | df[TASK_INFO] = df[TASK_INFO].replace("N/A", np.nan) |
| | df[TASK_INFO] = df[TASK_INFO].apply(pd.to_numeric, errors='coerce') |
| | final_score = df[TASK_INFO].mean(axis=1, skipna=True) |
| | final_score = round(final_score,4) |
| | |
| | if 'Total Avg. Score' in df: |
| | df['Total Avg. Score'] = final_score |
| | else: |
| | df.insert(1, 'Total Avg. Score', final_score) |
| | |
| | selected_score = calculate_selected_score(df, selected_columns) |
| | selected_score = round(selected_score,4) |
| | |
| | if 'Selected Avg. Score' in df: |
| | df['Selected Avg. Score'] = selected_score |
| | else: |
| | df.insert(1, 'Selected Avg. Score', selected_score) |
| | return df |
| |
|
| |
|
| | def get_baseline_df(): |
| | submission_repo = Repository(local_dir=SUBMISSION_NAME, clone_from=SUBMISSION_URL, use_auth_token=HF_TOKEN, repo_type="dataset") |
| | submission_repo.git_pull() |
| | df = pd.read_csv(CSV_PATH) |
| | df = get_final_score(df, checkbox_group.value) |
| | df = df.sort_values(by="Selected Avg. Score", ascending=False) |
| | present_columns = MODEL_INFO + checkbox_group.value |
| | df = df[present_columns] |
| | df = df[df['Evaluated by'] == 'T2V-CompBench Team'] |
| | return df |
| |
|
| | def get_baseline_df_sub(): |
| | submission_repo = Repository(local_dir=SUBMISSION_NAME, clone_from=SUBMISSION_URL, use_auth_token=HF_TOKEN, repo_type="dataset") |
| | submission_repo.git_pull() |
| | df = pd.read_csv(CSV_PATH) |
| | df = get_final_score(df, checkbox_group.value) |
| | df = df.sort_values(by="Selected Avg. Score", ascending=False) |
| | |
| | present_columns = MODEL_INFO[:-2] + SUB_TASK_INFO + MODEL_INFO[-2:] |
| | print(present_columns) |
| | |
| | df = df[present_columns] |
| | df = df[df['Evaluated by'] == 'T2V-CompBench Team'] |
| | return df |
| |
|
| |
|
| |
|
| | def get_all_df(selected_columns, csv=CSV_PATH): |
| | df = pd.read_csv(csv) |
| | df = get_final_score(df, selected_columns) |
| | df = df.sort_values(by="Selected Avg. Score", ascending=False) |
| | return df |
| |
|
| |
|
| | |
| | def category_checkbox_change(selected_columns, only_compbench_team): |
| |
|
| | updated_data = get_all_df(selected_columns, CSV_PATH) |
| | if only_compbench_team: |
| | updated_data = updated_data[updated_data['Evaluated by'] == 'T2V-CompBench Team'] |
| |
|
| | |
| | selected_columns = [item for item in TASK_INFO if item in selected_columns] |
| | present_columns = MODEL_INFO + selected_columns |
| | updated_data = updated_data[present_columns] |
| | updated_data = updated_data.sort_values(by="Selected Avg. Score", ascending=False) |
| |
|
| | updated_headers = present_columns |
| | update_datatype = [DATA_TITLE_TYPE[COLUMN_NAMES.index(x)] for x in updated_headers] |
| |
|
| | filter_component = gr.components.Dataframe( |
| | value=updated_data, |
| | headers=updated_headers, |
| | type="pandas", |
| | datatype=update_datatype, |
| | interactive=False, |
| | visible=True, |
| | ) |
| | return filter_component |
| |
|
| | def category_checkbox_change_sub(selected_columns, selected_columns_sub,only_compbench_team): |
| | updated_data = get_all_df(selected_columns, CSV_PATH) |
| | if only_compbench_team: |
| | updated_data = updated_data[updated_data['Evaluated by'] == 'T2V-CompBench Team'] |
| |
|
| | |
| | selected_columns = [item for item in SUB_TASK_INFO if item in selected_columns_sub] |
| | present_columns = MODEL_INFO[:-2] + selected_columns + MODEL_INFO[-2:] |
| | updated_data = updated_data[present_columns] |
| | updated_data = updated_data.sort_values(by="Selected Avg. Score", ascending=False) |
| |
|
| | updated_headers = present_columns |
| | update_datatype = [SUB_DATA_TITLE_TYPE[SUB_COLUMN_NAMES.index(x)] for x in updated_headers] |
| |
|
| | filter_component = gr.components.Dataframe( |
| | value=updated_data, |
| | headers=updated_headers, |
| | type="pandas", |
| | datatype=update_datatype, |
| | interactive=False, |
| | visible=True, |
| | ) |
| | return filter_component |
| |
|
| |
|
| | block = gr.Blocks() |
| |
|
| | with block: |
| | gr.Markdown( |
| | LEADERBOARD_INTRODUCTION |
| | ) |
| | gr.HTML( |
| | LEADERBOARD_INTRODUCTION_HTML |
| | ) |
| | gr.Markdown( |
| | LEADERBOARD_INTRODUCTION_2 |
| | ) |
| | with gr.Tabs(elem_classes="tab-buttons") as tabs: |
| | |
| | with gr.TabItem("📊 T2V-CompBench", elem_id="compbench-tab-table", id=1): |
| | with gr.Row(): |
| | with gr.Accordion("Citation", open=False): |
| | citation_button = gr.Textbox( |
| | value=CITATION_BUTTON_TEXT, |
| | label=CITATION_BUTTON_LABEL, |
| | elem_id="citation-button", |
| | lines=14, |
| | ) |
| | |
| | |
| | with gr.Row(): |
| | compbench_team_filter = gr.Checkbox( |
| | label="Evaluated by T2V-CompBench Team (Uncheck to view all submissions)", |
| | value=True, |
| | interactive=True |
| | ) |
| |
|
| | with gr.Row(): |
| | |
| | checkbox_group = gr.CheckboxGroup( |
| | choices=TASK_INFO, |
| | value=TASK_INFO, |
| | label="Evaluation Category", |
| | interactive=True, |
| | ) |
| | |
| | data_component = gr.components.Dataframe( |
| | value=get_baseline_df, |
| | headers=COLUMN_NAMES, |
| | type="pandas", |
| | datatype=DATA_TITLE_TYPE, |
| | interactive=False, |
| | visible=True, |
| | ) |
| | |
| | checkbox_group.change(fn=category_checkbox_change, inputs=[checkbox_group, compbench_team_filter], outputs=data_component) |
| | compbench_team_filter.change(fn=category_checkbox_change, inputs=[checkbox_group, compbench_team_filter], outputs=data_component) |
| | |
| | |
| | with gr.TabItem("🗂️ Sub-Dimension", elem_id="compbench-tab-table", id=2): |
| | with gr.Row(): |
| | with gr.Accordion("Citation", open=False): |
| | citation_button = gr.Textbox( |
| | value=CITATION_BUTTON_TEXT, |
| | label=CITATION_BUTTON_LABEL, |
| | elem_id="citation-button", |
| | lines=14, |
| | ) |
| | with gr.Row(): |
| | compbench_team_filter_sub = gr.Checkbox( |
| | label="Evaluated by T2V-CompBench Team (Uncheck to view all submissions)", |
| | value=True, |
| | interactive=True |
| | ) |
| | with gr.Row(): |
| | |
| | checkbox_group_sub = gr.CheckboxGroup( |
| | choices=SUB_TASK_INFO, |
| | value=SUB_TASK_INFO, |
| | label="Evaluation Sub-Dimensions", |
| | interactive=True, |
| | ) |
| | |
| | data_component_sub = gr.components.Dataframe( |
| | value=get_baseline_df_sub, |
| | headers=SUB_COLUMN_NAMES, |
| | type="pandas", |
| | datatype=SUB_DATA_TITLE_TYPE, |
| | interactive=False, |
| | visible=True, |
| | ) |
| | |
| | checkbox_group_sub.change(fn=category_checkbox_change_sub, inputs=[checkbox_group,checkbox_group_sub, compbench_team_filter_sub], outputs=data_component_sub) |
| | compbench_team_filter_sub.change(fn=category_checkbox_change_sub, inputs=[checkbox_group,checkbox_group_sub, compbench_team_filter_sub], outputs=data_component_sub) |
| | |
| | |
| | |
| | |
| | with gr.TabItem("📝 About", elem_id="compbench-tab-table", id=3): |
| | gr.Markdown(LEADERBOARD_INFO, elem_classes="markdown-text") |
| | |
| | |
| | with gr.TabItem("🚀 Submit here! ", elem_id="compbench-tab-table", id=4): |
| |
|
| | with gr.Row(): |
| | gr.Markdown(SUBMIT_INTRODUCTION, elem_classes="markdown-text") |
| |
|
| | with gr.Row(): |
| | gr.Markdown("# ✉️✨ Submit your model evaluation CSV files here!", elem_classes="markdown-text") |
| |
|
| | with gr.Row(): |
| | gr.Markdown("Here is a required field", elem_classes="markdown-text") |
| | with gr.Row(): |
| | with gr.Column(): |
| | model_name_textbox = gr.Textbox( |
| | label="Model Name", placeholder="Required field" |
| | ) |
| | revision_name_textbox = gr.Textbox( |
| | label="Revision Model Name(Optional)", placeholder="If you need to update the previous results, please fill in this line" |
| | ) |
| | access_type = gr.Dropdown(choices=["Open Source", "Ready to Open Source", "API", "Close"], value=None,label="Please select the way user can access your model. You can update the content by revision_name, or contact the T2V-CompBench Team.") |
| |
|
| |
|
| | with gr.Column(): |
| | model_link = gr.Textbox( |
| | label="Project Page/Paper Link/Github/HuggingFace Repo", placeholder="Required field. If filling in the wrong information, your results may be removed." |
| | ) |
| | team_name = gr.Textbox( |
| | label="Your Team Name(If left blank, it will be user upload)", placeholder="User Upload" |
| | ) |
| | contact_email = gr.Textbox( |
| | label="E-Mail(Will not be displayed)", placeholder="Required field" |
| | ) |
| | |
| | |
| | with gr.Row(): |
| | model_publish = gr.Textbox(label="Time of Publish", placeholder="1970-01-01") |
| | model_resolution = gr.Textbox(label="Resolution", placeholder="width x height") |
| | model_frame = gr.Textbox(label="Frame Count", placeholder="int") |
| | model_fps = gr.Textbox(label="FPS", placeholder="int") |
| | model_video_length = gr.Textbox(label="Video Duration(s)", placeholder="float(2.0)") |
| | model_checkpoint = gr.Textbox(label="Model Checkpoint", placeholder="optional") |
| | model_commit_id = gr.Textbox(label="Github commit id", placeholder='optional') |
| | model_video_format = gr.Textbox(label="Video Format", placeholder='mp4/gif') |
| | with gr.Column(): |
| | input_file = gr.components.File(label = "Click to Upload a ZIP File", file_count="single", type='binary') |
| | submit_button = gr.Button("Submit Eval!") |
| | submit_succ_button = gr.Markdown("Submit Success! Please press refresh and return to LeaderBoard!", visible=False) |
| | fail_textbox = gr.Markdown('❗️Please ensure that the `Model Name`, `Project Page`, and `Email` are filled in correctly.',visible=False) |
| | |
| | |
| | submission_result = gr.Markdown() |
| | submit_button.click( |
| | add_new_eval, |
| | inputs = [ |
| | input_file, |
| | model_name_textbox, |
| | revision_name_textbox, |
| | access_type, |
| | model_link, |
| | team_name, |
| | contact_email, |
| | |
| | model_publish, |
| | model_resolution, |
| | model_frame, |
| | model_fps, |
| | model_video_length, |
| | model_checkpoint, |
| | model_commit_id, |
| | model_video_format |
| | ], |
| | outputs=[submit_button, submit_succ_button, fail_textbox] |
| | ) |
| | |
| | |
| | def refresh_data(): |
| | value1 = get_baseline_df() |
| | return value1 |
| |
|
| | with gr.Row(): |
| | data_run = gr.Button("Refresh") |
| | data_run.click(category_checkbox_change, inputs=[checkbox_group, compbench_team_filter], outputs=data_component) |
| | |
| |
|
| | block.launch() |
| |
|