Spaces:
Running
Running
New format of model data: renamed "metadata" to "submission_metadata"
Browse files
server.py
CHANGED
|
@@ -227,7 +227,7 @@ class LeaderboardServer:
|
|
| 227 |
submission_id: {
|
| 228 |
category: self._dataframe_to_csv(
|
| 229 |
self._get_model_tournament_table(submission_id, category, to_csv=True),
|
| 230 |
-
f"Tournament table - {self.submission_id_to_data[submission_id]['
|
| 231 |
)
|
| 232 |
for category in sorted(self.TASKS_CATEGORIES)
|
| 233 |
}
|
|
@@ -285,7 +285,7 @@ class LeaderboardServer:
|
|
| 285 |
with self.results_dataset_local_snapshot_lock.ro:
|
| 286 |
for submission_file in glob.glob(os.path.join(self.results_dataset_local_snapshot, "data") + "/*.json"):
|
| 287 |
data = json.load(open(submission_file))
|
| 288 |
-
metadata = data.get(
|
| 289 |
if metadata is None:
|
| 290 |
continue
|
| 291 |
submission_id = metadata["submission_id"]
|
|
@@ -293,7 +293,7 @@ class LeaderboardServer:
|
|
| 293 |
submission_ids.add(submission_id)
|
| 294 |
submission_id_to_file[submission_id] = submission_file
|
| 295 |
submission_id_to_model_title[submission_id] = metadata["team_name"] + "/" + metadata["model_name"]
|
| 296 |
-
submission_id_to_data[submission_id] = {"results": data["results"], "
|
| 297 |
|
| 298 |
with self.var_lock.rw:
|
| 299 |
self.submission_ids = submission_ids
|
|
@@ -406,14 +406,14 @@ class LeaderboardServer:
|
|
| 406 |
match_task_result_details = "\n".join(f"{k}: {v}" for k, v in match_task_result_details.items())
|
| 407 |
match_results[task] = f'<abbr title={xmlQuoteAttr(match_task_result_details)}>{match_task_result_significant}</abbr>'
|
| 408 |
|
| 409 |
-
model_link = data["
|
| 410 |
-
model_title = data["
|
| 411 |
if to_csv:
|
| 412 |
match_results["model"] = model_title
|
| 413 |
match_results["link_to_model"] = model_link
|
| 414 |
else:
|
| 415 |
-
model_title_abbr_team_name = self.abbreviate(data["
|
| 416 |
-
model_title_abbr_model_name = self.abbreviate(data["
|
| 417 |
model_title_abbr_html = f'<div style="font-size: 10px;">{xmlAndMarkdownEscape(model_title_abbr_team_name)}</div>{xmlAndMarkdownEscape(model_title_abbr_model_name)}'
|
| 418 |
match_results["model"] = f'<a href={xmlQuoteAttr(model_link)} title={xmlQuoteAttr(model_title)}>{model_title_abbr_html}</a>'
|
| 419 |
|
|
@@ -492,7 +492,7 @@ class LeaderboardServer:
|
|
| 492 |
else:
|
| 493 |
data = self.submission_id_to_data[submission_id]
|
| 494 |
|
| 495 |
-
if submission_id != data["
|
| 496 |
raise gr.Error(f"Proper submission [{submission_id}] not found")
|
| 497 |
|
| 498 |
local_results = {}
|
|
@@ -534,22 +534,22 @@ class LeaderboardServer:
|
|
| 534 |
else:
|
| 535 |
local_results["average_score"] = win_score[category]
|
| 536 |
|
| 537 |
-
model_link = data["
|
| 538 |
-
model_title = data["
|
| 539 |
if to_csv:
|
| 540 |
local_results["model"] = model_title
|
| 541 |
local_results["link_to_model"] = model_link
|
| 542 |
else:
|
| 543 |
-
model_title_abbr_team_name = self.abbreviate(data["
|
| 544 |
-
model_title_abbr_model_name = self.abbreviate(data["
|
| 545 |
model_title_abbr_html = f'<div style="font-size: 10px;">{xmlAndMarkdownEscape(model_title_abbr_team_name)}</div>{xmlAndMarkdownEscape(model_title_abbr_model_name)}'
|
| 546 |
local_results["model"] = f'<a href={xmlQuoteAttr(model_link)} title={xmlQuoteAttr(model_title)}>{model_title_abbr_html}</a>'
|
| 547 |
|
| 548 |
-
release = data["
|
| 549 |
release = time.strftime("%Y-%m-%d", time.gmtime(release)) if release else "N/A"
|
| 550 |
local_results["release"] = release
|
| 551 |
-
local_results["model_type"] = data["
|
| 552 |
-
local_results["parameters"] = data["
|
| 553 |
|
| 554 |
if pre_submit and submission_id == pre_submit.submission_id:
|
| 555 |
processed_results.insert(0, local_results)
|
|
@@ -688,7 +688,7 @@ class LeaderboardServer:
|
|
| 688 |
with open(file, "r") as f:
|
| 689 |
data = json.load(f)
|
| 690 |
|
| 691 |
-
data["
|
| 692 |
|
| 693 |
metadata["model_predictions_sha256"] = self.get_sha256_hexdigest(data["predictions"])
|
| 694 |
metadata["model_results_sha256"] = self.get_sha256_hexdigest(data["results"])
|
|
@@ -755,4 +755,4 @@ class LeaderboardServer:
|
|
| 755 |
raise gr.Error(f"Submission [{submission_id}] not found")
|
| 756 |
else:
|
| 757 |
data = self.submission_id_to_data[submission_id]
|
| 758 |
-
return data["
|
|
|
|
| 227 |
submission_id: {
|
| 228 |
category: self._dataframe_to_csv(
|
| 229 |
self._get_model_tournament_table(submission_id, category, to_csv=True),
|
| 230 |
+
f"Tournament table - {self.submission_id_to_data[submission_id]['submission_metadata']['model_name'][:self.MAX_LENGTH_OF_MODEL_TITLE].replace('/', '_')} - {category}.csv",
|
| 231 |
)
|
| 232 |
for category in sorted(self.TASKS_CATEGORIES)
|
| 233 |
}
|
|
|
|
| 285 |
with self.results_dataset_local_snapshot_lock.ro:
|
| 286 |
for submission_file in glob.glob(os.path.join(self.results_dataset_local_snapshot, "data") + "/*.json"):
|
| 287 |
data = json.load(open(submission_file))
|
| 288 |
+
metadata = data.get("submission_metadata")
|
| 289 |
if metadata is None:
|
| 290 |
continue
|
| 291 |
submission_id = metadata["submission_id"]
|
|
|
|
| 293 |
submission_ids.add(submission_id)
|
| 294 |
submission_id_to_file[submission_id] = submission_file
|
| 295 |
submission_id_to_model_title[submission_id] = metadata["team_name"] + "/" + metadata["model_name"]
|
| 296 |
+
submission_id_to_data[submission_id] = {"results": data["results"], "submission_metadata": metadata}
|
| 297 |
|
| 298 |
with self.var_lock.rw:
|
| 299 |
self.submission_ids = submission_ids
|
|
|
|
| 406 |
match_task_result_details = "\n".join(f"{k}: {v}" for k, v in match_task_result_details.items())
|
| 407 |
match_results[task] = f'<abbr title={xmlQuoteAttr(match_task_result_details)}>{match_task_result_significant}</abbr>'
|
| 408 |
|
| 409 |
+
model_link = data["submission_metadata"]["link_to_model"]
|
| 410 |
+
model_title = data["submission_metadata"]["team_name"] + "/" + data["submission_metadata"]["model_name"]
|
| 411 |
if to_csv:
|
| 412 |
match_results["model"] = model_title
|
| 413 |
match_results["link_to_model"] = model_link
|
| 414 |
else:
|
| 415 |
+
model_title_abbr_team_name = self.abbreviate(data["submission_metadata"]["team_name"], self.MAX_LENGTH_OF_MODEL_TITLE)
|
| 416 |
+
model_title_abbr_model_name = self.abbreviate(data["submission_metadata"]["model_name"], self.MAX_LENGTH_OF_MODEL_TITLE)
|
| 417 |
model_title_abbr_html = f'<div style="font-size: 10px;">{xmlAndMarkdownEscape(model_title_abbr_team_name)}</div>{xmlAndMarkdownEscape(model_title_abbr_model_name)}'
|
| 418 |
match_results["model"] = f'<a href={xmlQuoteAttr(model_link)} title={xmlQuoteAttr(model_title)}>{model_title_abbr_html}</a>'
|
| 419 |
|
|
|
|
| 492 |
else:
|
| 493 |
data = self.submission_id_to_data[submission_id]
|
| 494 |
|
| 495 |
+
if submission_id != data["submission_metadata"]["submission_id"]:
|
| 496 |
raise gr.Error(f"Proper submission [{submission_id}] not found")
|
| 497 |
|
| 498 |
local_results = {}
|
|
|
|
| 534 |
else:
|
| 535 |
local_results["average_score"] = win_score[category]
|
| 536 |
|
| 537 |
+
model_link = data["submission_metadata"]["link_to_model"]
|
| 538 |
+
model_title = data["submission_metadata"]["team_name"] + "/" + data["submission_metadata"]["model_name"]
|
| 539 |
if to_csv:
|
| 540 |
local_results["model"] = model_title
|
| 541 |
local_results["link_to_model"] = model_link
|
| 542 |
else:
|
| 543 |
+
model_title_abbr_team_name = self.abbreviate(data["submission_metadata"]["team_name"], self.MAX_LENGTH_OF_MODEL_TITLE)
|
| 544 |
+
model_title_abbr_model_name = self.abbreviate(data["submission_metadata"]["model_name"], self.MAX_LENGTH_OF_MODEL_TITLE)
|
| 545 |
model_title_abbr_html = f'<div style="font-size: 10px;">{xmlAndMarkdownEscape(model_title_abbr_team_name)}</div>{xmlAndMarkdownEscape(model_title_abbr_model_name)}'
|
| 546 |
local_results["model"] = f'<a href={xmlQuoteAttr(model_link)} title={xmlQuoteAttr(model_title)}>{model_title_abbr_html}</a>'
|
| 547 |
|
| 548 |
+
release = data["submission_metadata"].get("submission_timestamp")
|
| 549 |
release = time.strftime("%Y-%m-%d", time.gmtime(release)) if release else "N/A"
|
| 550 |
local_results["release"] = release
|
| 551 |
+
local_results["model_type"] = data["submission_metadata"]["model_type"]
|
| 552 |
+
local_results["parameters"] = data["submission_metadata"]["parameters"]
|
| 553 |
|
| 554 |
if pre_submit and submission_id == pre_submit.submission_id:
|
| 555 |
processed_results.insert(0, local_results)
|
|
|
|
| 688 |
with open(file, "r") as f:
|
| 689 |
data = json.load(f)
|
| 690 |
|
| 691 |
+
data["submission_metadata"] = metadata
|
| 692 |
|
| 693 |
metadata["model_predictions_sha256"] = self.get_sha256_hexdigest(data["predictions"])
|
| 694 |
metadata["model_results_sha256"] = self.get_sha256_hexdigest(data["results"])
|
|
|
|
| 755 |
raise gr.Error(f"Submission [{submission_id}] not found")
|
| 756 |
else:
|
| 757 |
data = self.submission_id_to_data[submission_id]
|
| 758 |
+
return data["submission_metadata"]
|