Spaces:
Runtime error
Runtime error
yinanhe
commited on
Commit
·
df9540e
1
Parent(s):
098ebb3
[update] fix typo
Browse files- app.py +8 -5
- constants.py +2 -2
app.py
CHANGED
|
@@ -81,9 +81,15 @@ def calculate_selected_score(df, selected_columns):
|
|
| 81 |
def get_final_score(df, selected_columns):
|
| 82 |
normalize_df = get_normalized_df(df)
|
| 83 |
final_score = df.drop('name', axis=1).sum(axis=1)
|
| 84 |
-
|
|
|
|
|
|
|
|
|
|
| 85 |
selected_score = calculate_selected_score(normalize_df, selected_columns)
|
| 86 |
-
|
|
|
|
|
|
|
|
|
|
| 87 |
return df
|
| 88 |
|
| 89 |
def get_baseline_df():
|
|
@@ -91,7 +97,6 @@ def get_baseline_df():
|
|
| 91 |
submission_repo.git_pull()
|
| 92 |
df = pd.read_csv(CSV_DIR)
|
| 93 |
df = get_final_score(df, checkbox_group.value)
|
| 94 |
-
# calculate the Overall Score
|
| 95 |
df = df.sort_values(by="Overall Score", ascending=False)
|
| 96 |
present_columns = MODEL_INFO + checkbox_group.value
|
| 97 |
df = df[present_columns]
|
|
@@ -111,7 +116,6 @@ def on_filter_model_size_method_change(selected_columns):
|
|
| 111 |
# columns:
|
| 112 |
selected_columns = [item for item in TASK_INFO if item in selected_columns]
|
| 113 |
present_columns = MODEL_INFO + selected_columns
|
| 114 |
-
# print("selected_columns",'|'.join(selected_columns))
|
| 115 |
updated_data = updated_data[present_columns]
|
| 116 |
updated_data = updated_data.sort_values(by=selected_columns[0], ascending=False)
|
| 117 |
updated_headers = present_columns
|
|
@@ -125,7 +129,6 @@ def on_filter_model_size_method_change(selected_columns):
|
|
| 125 |
interactive=False,
|
| 126 |
visible=True,
|
| 127 |
)
|
| 128 |
-
|
| 129 |
return filter_component#.value
|
| 130 |
|
| 131 |
block = gr.Blocks()
|
|
|
|
| 81 |
def get_final_score(df, selected_columns):
|
| 82 |
normalize_df = get_normalized_df(df)
|
| 83 |
final_score = df.drop('name', axis=1).sum(axis=1)
|
| 84 |
+
if 'Overall Score' in df:
|
| 85 |
+
df['Overall Score'] = final_score
|
| 86 |
+
else:
|
| 87 |
+
df.insert(1, 'Overall Score', final_score)
|
| 88 |
selected_score = calculate_selected_score(normalize_df, selected_columns)
|
| 89 |
+
if 'Selected Score' in df:
|
| 90 |
+
df['Selected Score'] = selected_score
|
| 91 |
+
else:
|
| 92 |
+
df.insert(1, 'Selected Score', selected_score)
|
| 93 |
return df
|
| 94 |
|
| 95 |
def get_baseline_df():
|
|
|
|
| 97 |
submission_repo.git_pull()
|
| 98 |
df = pd.read_csv(CSV_DIR)
|
| 99 |
df = get_final_score(df, checkbox_group.value)
|
|
|
|
| 100 |
df = df.sort_values(by="Overall Score", ascending=False)
|
| 101 |
present_columns = MODEL_INFO + checkbox_group.value
|
| 102 |
df = df[present_columns]
|
|
|
|
| 116 |
# columns:
|
| 117 |
selected_columns = [item for item in TASK_INFO if item in selected_columns]
|
| 118 |
present_columns = MODEL_INFO + selected_columns
|
|
|
|
| 119 |
updated_data = updated_data[present_columns]
|
| 120 |
updated_data = updated_data.sort_values(by=selected_columns[0], ascending=False)
|
| 121 |
updated_headers = present_columns
|
|
|
|
| 129 |
interactive=False,
|
| 130 |
visible=True,
|
| 131 |
)
|
|
|
|
| 132 |
return filter_component#.value
|
| 133 |
|
| 134 |
block = gr.Blocks()
|
constants.py
CHANGED
|
@@ -46,11 +46,11 @@ CSV_DIR = "./vbench_leaderboard_submission/results.csv"
|
|
| 46 |
|
| 47 |
COLUMN_NAMES = MODEL_INFO + TASK_INFO
|
| 48 |
|
| 49 |
-
LEADERBORAD_INTRODUCTION = """#
|
| 50 |
|
| 51 |
🏆 Welcome to the leaderboard of the VBench! 🎦
|
| 52 |
|
| 53 |
-
Please follow the instructions in [
|
| 54 |
"""
|
| 55 |
|
| 56 |
SUBMIT_INTRODUCTION = """# Submit on VBench Benchmark Introduction
|
|
|
|
| 46 |
|
| 47 |
COLUMN_NAMES = MODEL_INFO + TASK_INFO
|
| 48 |
|
| 49 |
+
LEADERBORAD_INTRODUCTION = """# VBench Leaderboard
|
| 50 |
|
| 51 |
🏆 Welcome to the leaderboard of the VBench! 🎦
|
| 52 |
|
| 53 |
+
Please follow the instructions in [VBench](https://github.com/Vchitect/VBench?tab=readme-ov-file#usage) to upload the generated `result.json` file here. After clicking the `Submit Eval` button, click the `Refresh` button.
|
| 54 |
"""
|
| 55 |
|
| 56 |
SUBMIT_INTRODUCTION = """# Submit on VBench Benchmark Introduction
|