task_id
stringlengths 8
69
| name
stringlengths 3
64
| suite
stringclasses 6
values | hf_repo
stringclasses 125
values | hf_subset
stringlengths 0
55
| file_path
stringclasses 6
values | line_number
int64 36
22.8k
| variable_name
stringclasses 1
value | is_subtask
bool 2
classes | main_task
stringclasses 122
values | subtask_count
int64 1
1
| suites
listlengths 1
1
| subtasks
listlengths 1
1
| is_standalone
bool 2
classes |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
original:mmlu:business_ethics
|
mmlu:business_ethics
|
original
|
cais/mmlu
|
business_ethics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,435
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:business_ethics"
] | false
|
leaderboard:mmlu:business_ethics
|
mmlu:business_ethics
|
leaderboard
|
lighteval/mmlu
|
business_ethics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,450
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:business_ethics"
] | false
|
helm:mmlu:business_ethics
|
mmlu:business_ethics
|
helm
|
lighteval/mmlu
|
business_ethics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,465
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:business_ethics"
] | false
|
original:mmlu:clinical_knowledge
|
mmlu:clinical_knowledge
|
original
|
cais/mmlu
|
clinical_knowledge
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,491
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:clinical_knowledge"
] | false
|
leaderboard:mmlu:clinical_knowledge
|
mmlu:clinical_knowledge
|
leaderboard
|
lighteval/mmlu
|
clinical_knowledge
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,506
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:clinical_knowledge"
] | false
|
helm:mmlu:clinical_knowledge
|
mmlu:clinical_knowledge
|
helm
|
lighteval/mmlu
|
clinical_knowledge
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,521
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:clinical_knowledge"
] | false
|
original:mmlu:college_biology
|
mmlu:college_biology
|
original
|
cais/mmlu
|
college_biology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,547
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:college_biology"
] | false
|
leaderboard:mmlu:college_biology
|
mmlu:college_biology
|
leaderboard
|
lighteval/mmlu
|
college_biology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,562
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:college_biology"
] | false
|
helm:mmlu:college_biology
|
mmlu:college_biology
|
helm
|
lighteval/mmlu
|
college_biology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,577
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:college_biology"
] | false
|
original:mmlu:college_chemistry
|
mmlu:college_chemistry
|
original
|
cais/mmlu
|
college_chemistry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,603
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:college_chemistry"
] | false
|
leaderboard:mmlu:college_chemistry
|
mmlu:college_chemistry
|
leaderboard
|
lighteval/mmlu
|
college_chemistry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,618
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:college_chemistry"
] | false
|
helm:mmlu:college_chemistry
|
mmlu:college_chemistry
|
helm
|
lighteval/mmlu
|
college_chemistry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,633
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:college_chemistry"
] | false
|
original:mmlu:college_computer_science
|
mmlu:college_computer_science
|
original
|
cais/mmlu
|
college_computer_science
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,659
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:college_computer_science"
] | false
|
leaderboard:mmlu:college_computer_science
|
mmlu:college_computer_science
|
leaderboard
|
lighteval/mmlu
|
college_computer_science
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,674
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:college_computer_science"
] | false
|
helm:mmlu:college_computer_science
|
mmlu:college_computer_science
|
helm
|
lighteval/mmlu
|
college_computer_science
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,689
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:college_computer_science"
] | false
|
original:mmlu:college_mathematics
|
mmlu:college_mathematics
|
original
|
cais/mmlu
|
college_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,715
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:college_mathematics"
] | false
|
leaderboard:mmlu:college_mathematics
|
mmlu:college_mathematics
|
leaderboard
|
lighteval/mmlu
|
college_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,730
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:college_mathematics"
] | false
|
helm:mmlu:college_mathematics
|
mmlu:college_mathematics
|
helm
|
lighteval/mmlu
|
college_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,745
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:college_mathematics"
] | false
|
original:mmlu:college_medicine
|
mmlu:college_medicine
|
original
|
cais/mmlu
|
college_medicine
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,771
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:college_medicine"
] | false
|
leaderboard:mmlu:college_medicine
|
mmlu:college_medicine
|
leaderboard
|
lighteval/mmlu
|
college_medicine
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,786
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:college_medicine"
] | false
|
helm:mmlu:college_medicine
|
mmlu:college_medicine
|
helm
|
lighteval/mmlu
|
college_medicine
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,801
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:college_medicine"
] | false
|
original:mmlu:college_physics
|
mmlu:college_physics
|
original
|
cais/mmlu
|
college_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,827
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:college_physics"
] | false
|
leaderboard:mmlu:college_physics
|
mmlu:college_physics
|
leaderboard
|
lighteval/mmlu
|
college_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,842
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:college_physics"
] | false
|
helm:mmlu:college_physics
|
mmlu:college_physics
|
helm
|
lighteval/mmlu
|
college_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,857
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:college_physics"
] | false
|
original:mmlu:computer_security
|
mmlu:computer_security
|
original
|
cais/mmlu
|
computer_security
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,883
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:computer_security"
] | false
|
leaderboard:mmlu:computer_security
|
mmlu:computer_security
|
leaderboard
|
lighteval/mmlu
|
computer_security
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,898
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:computer_security"
] | false
|
helm:mmlu:computer_security
|
mmlu:computer_security
|
helm
|
lighteval/mmlu
|
computer_security
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,913
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:computer_security"
] | false
|
original:mmlu:conceptual_physics
|
mmlu:conceptual_physics
|
original
|
cais/mmlu
|
conceptual_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,939
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:conceptual_physics"
] | false
|
leaderboard:mmlu:conceptual_physics
|
mmlu:conceptual_physics
|
leaderboard
|
lighteval/mmlu
|
conceptual_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,954
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:conceptual_physics"
] | false
|
helm:mmlu:conceptual_physics
|
mmlu:conceptual_physics
|
helm
|
lighteval/mmlu
|
conceptual_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,969
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:conceptual_physics"
] | false
|
original:mmlu:econometrics
|
mmlu:econometrics
|
original
|
cais/mmlu
|
econometrics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,995
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:econometrics"
] | false
|
leaderboard:mmlu:econometrics
|
mmlu:econometrics
|
leaderboard
|
lighteval/mmlu
|
econometrics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,010
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:econometrics"
] | false
|
helm:mmlu:econometrics
|
mmlu:econometrics
|
helm
|
lighteval/mmlu
|
econometrics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,025
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:econometrics"
] | false
|
original:mmlu:electrical_engineering
|
mmlu:electrical_engineering
|
original
|
cais/mmlu
|
electrical_engineering
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,051
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:electrical_engineering"
] | false
|
leaderboard:mmlu:electrical_engineering
|
mmlu:electrical_engineering
|
leaderboard
|
lighteval/mmlu
|
electrical_engineering
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,066
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:electrical_engineering"
] | false
|
helm:mmlu:electrical_engineering
|
mmlu:electrical_engineering
|
helm
|
lighteval/mmlu
|
electrical_engineering
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,081
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:electrical_engineering"
] | false
|
original:mmlu:elementary_mathematics
|
mmlu:elementary_mathematics
|
original
|
cais/mmlu
|
elementary_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,107
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:elementary_mathematics"
] | false
|
leaderboard:mmlu:elementary_mathematics
|
mmlu:elementary_mathematics
|
leaderboard
|
lighteval/mmlu
|
elementary_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,122
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:elementary_mathematics"
] | false
|
helm:mmlu:elementary_mathematics
|
mmlu:elementary_mathematics
|
helm
|
lighteval/mmlu
|
elementary_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,137
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:elementary_mathematics"
] | false
|
original:mmlu:formal_logic
|
mmlu:formal_logic
|
original
|
cais/mmlu
|
formal_logic
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,163
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:formal_logic"
] | false
|
leaderboard:mmlu:formal_logic
|
mmlu:formal_logic
|
leaderboard
|
lighteval/mmlu
|
formal_logic
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,178
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:formal_logic"
] | false
|
helm:mmlu:formal_logic
|
mmlu:formal_logic
|
helm
|
lighteval/mmlu
|
formal_logic
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,193
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:formal_logic"
] | false
|
original:mmlu:global_facts
|
mmlu:global_facts
|
original
|
cais/mmlu
|
global_facts
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,219
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:global_facts"
] | false
|
leaderboard:mmlu:global_facts
|
mmlu:global_facts
|
leaderboard
|
lighteval/mmlu
|
global_facts
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,234
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:global_facts"
] | false
|
helm:mmlu:global_facts
|
mmlu:global_facts
|
helm
|
lighteval/mmlu
|
global_facts
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,249
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:global_facts"
] | false
|
original:mmlu:high_school_biology
|
mmlu:high_school_biology
|
original
|
cais/mmlu
|
high_school_biology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,275
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_biology"
] | false
|
leaderboard:mmlu:high_school_biology
|
mmlu:high_school_biology
|
leaderboard
|
lighteval/mmlu
|
high_school_biology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,290
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_biology"
] | false
|
helm:mmlu:high_school_biology
|
mmlu:high_school_biology
|
helm
|
lighteval/mmlu
|
high_school_biology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,305
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_biology"
] | false
|
original:mmlu:high_school_chemistry
|
mmlu:high_school_chemistry
|
original
|
cais/mmlu
|
high_school_chemistry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,331
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_chemistry"
] | false
|
leaderboard:mmlu:high_school_chemistry
|
mmlu:high_school_chemistry
|
leaderboard
|
lighteval/mmlu
|
high_school_chemistry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,346
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_chemistry"
] | false
|
helm:mmlu:high_school_chemistry
|
mmlu:high_school_chemistry
|
helm
|
lighteval/mmlu
|
high_school_chemistry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,361
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_chemistry"
] | false
|
original:mmlu:high_school_computer_science
|
mmlu:high_school_computer_science
|
original
|
cais/mmlu
|
high_school_computer_science
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,387
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_computer_science"
] | false
|
leaderboard:mmlu:high_school_computer_science
|
mmlu:high_school_computer_science
|
leaderboard
|
lighteval/mmlu
|
high_school_computer_science
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,402
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_computer_science"
] | false
|
helm:mmlu:high_school_computer_science
|
mmlu:high_school_computer_science
|
helm
|
lighteval/mmlu
|
high_school_computer_science
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,417
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_computer_science"
] | false
|
original:mmlu:high_school_european_history
|
mmlu:high_school_european_history
|
original
|
cais/mmlu
|
high_school_european_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,443
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_european_history"
] | false
|
leaderboard:mmlu:high_school_european_history
|
mmlu:high_school_european_history
|
leaderboard
|
lighteval/mmlu
|
high_school_european_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,458
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_european_history"
] | false
|
helm:mmlu:high_school_european_history
|
mmlu:high_school_european_history
|
helm
|
lighteval/mmlu
|
high_school_european_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,473
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_european_history"
] | false
|
original:mmlu:high_school_geography
|
mmlu:high_school_geography
|
original
|
cais/mmlu
|
high_school_geography
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,499
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_geography"
] | false
|
leaderboard:mmlu:high_school_geography
|
mmlu:high_school_geography
|
leaderboard
|
lighteval/mmlu
|
high_school_geography
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,514
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_geography"
] | false
|
helm:mmlu:high_school_geography
|
mmlu:high_school_geography
|
helm
|
lighteval/mmlu
|
high_school_geography
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,529
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_geography"
] | false
|
original:mmlu:high_school_government_and_politics
|
mmlu:high_school_government_and_politics
|
original
|
cais/mmlu
|
high_school_government_and_politics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,555
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_government_and_politics"
] | false
|
leaderboard:mmlu:high_school_government_and_politics
|
mmlu:high_school_government_and_politics
|
leaderboard
|
lighteval/mmlu
|
high_school_government_and_politics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,570
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_government_and_politics"
] | false
|
helm:mmlu:high_school_government_and_politics
|
mmlu:high_school_government_and_politics
|
helm
|
lighteval/mmlu
|
high_school_government_and_politics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,585
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_government_and_politics"
] | false
|
original:mmlu:high_school_macroeconomics
|
mmlu:high_school_macroeconomics
|
original
|
cais/mmlu
|
high_school_macroeconomics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,611
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_macroeconomics"
] | false
|
leaderboard:mmlu:high_school_macroeconomics
|
mmlu:high_school_macroeconomics
|
leaderboard
|
lighteval/mmlu
|
high_school_macroeconomics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,626
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_macroeconomics"
] | false
|
helm:mmlu:high_school_macroeconomics
|
mmlu:high_school_macroeconomics
|
helm
|
lighteval/mmlu
|
high_school_macroeconomics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,641
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_macroeconomics"
] | false
|
original:mmlu:high_school_mathematics
|
mmlu:high_school_mathematics
|
original
|
cais/mmlu
|
high_school_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,667
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_mathematics"
] | false
|
leaderboard:mmlu:high_school_mathematics
|
mmlu:high_school_mathematics
|
leaderboard
|
lighteval/mmlu
|
high_school_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,682
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_mathematics"
] | false
|
helm:mmlu:high_school_mathematics
|
mmlu:high_school_mathematics
|
helm
|
lighteval/mmlu
|
high_school_mathematics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,697
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_mathematics"
] | false
|
original:mmlu:high_school_microeconomics
|
mmlu:high_school_microeconomics
|
original
|
cais/mmlu
|
high_school_microeconomics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,723
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_microeconomics"
] | false
|
leaderboard:mmlu:high_school_microeconomics
|
mmlu:high_school_microeconomics
|
leaderboard
|
lighteval/mmlu
|
high_school_microeconomics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,738
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_microeconomics"
] | false
|
helm:mmlu:high_school_microeconomics
|
mmlu:high_school_microeconomics
|
helm
|
lighteval/mmlu
|
high_school_microeconomics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,753
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_microeconomics"
] | false
|
original:mmlu:high_school_physics
|
mmlu:high_school_physics
|
original
|
cais/mmlu
|
high_school_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,779
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_physics"
] | false
|
leaderboard:mmlu:high_school_physics
|
mmlu:high_school_physics
|
leaderboard
|
lighteval/mmlu
|
high_school_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,794
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_physics"
] | false
|
helm:mmlu:high_school_physics
|
mmlu:high_school_physics
|
helm
|
lighteval/mmlu
|
high_school_physics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,809
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_physics"
] | false
|
original:mmlu:high_school_psychology
|
mmlu:high_school_psychology
|
original
|
cais/mmlu
|
high_school_psychology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,835
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_psychology"
] | false
|
leaderboard:mmlu:high_school_psychology
|
mmlu:high_school_psychology
|
leaderboard
|
lighteval/mmlu
|
high_school_psychology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,850
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_psychology"
] | false
|
helm:mmlu:high_school_psychology
|
mmlu:high_school_psychology
|
helm
|
lighteval/mmlu
|
high_school_psychology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,865
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_psychology"
] | false
|
original:mmlu:high_school_statistics
|
mmlu:high_school_statistics
|
original
|
cais/mmlu
|
high_school_statistics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,891
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_statistics"
] | false
|
leaderboard:mmlu:high_school_statistics
|
mmlu:high_school_statistics
|
leaderboard
|
lighteval/mmlu
|
high_school_statistics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,906
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_statistics"
] | false
|
helm:mmlu:high_school_statistics
|
mmlu:high_school_statistics
|
helm
|
lighteval/mmlu
|
high_school_statistics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,921
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_statistics"
] | false
|
original:mmlu:high_school_us_history
|
mmlu:high_school_us_history
|
original
|
cais/mmlu
|
high_school_us_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,947
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_us_history"
] | false
|
leaderboard:mmlu:high_school_us_history
|
mmlu:high_school_us_history
|
leaderboard
|
lighteval/mmlu
|
high_school_us_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,962
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_us_history"
] | false
|
helm:mmlu:high_school_us_history
|
mmlu:high_school_us_history
|
helm
|
lighteval/mmlu
|
high_school_us_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 12,977
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_us_history"
] | false
|
original:mmlu:high_school_world_history
|
mmlu:high_school_world_history
|
original
|
cais/mmlu
|
high_school_world_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,003
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:high_school_world_history"
] | false
|
leaderboard:mmlu:high_school_world_history
|
mmlu:high_school_world_history
|
leaderboard
|
lighteval/mmlu
|
high_school_world_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,018
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:high_school_world_history"
] | false
|
helm:mmlu:high_school_world_history
|
mmlu:high_school_world_history
|
helm
|
lighteval/mmlu
|
high_school_world_history
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,033
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:high_school_world_history"
] | false
|
original:mmlu:human_aging
|
mmlu:human_aging
|
original
|
cais/mmlu
|
human_aging
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,059
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:human_aging"
] | false
|
leaderboard:mmlu:human_aging
|
mmlu:human_aging
|
leaderboard
|
lighteval/mmlu
|
human_aging
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,074
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:human_aging"
] | false
|
helm:mmlu:human_aging
|
mmlu:human_aging
|
helm
|
lighteval/mmlu
|
human_aging
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,089
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:human_aging"
] | false
|
original:mmlu:human_sexuality
|
mmlu:human_sexuality
|
original
|
cais/mmlu
|
human_sexuality
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,115
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:human_sexuality"
] | false
|
leaderboard:mmlu:human_sexuality
|
mmlu:human_sexuality
|
leaderboard
|
lighteval/mmlu
|
human_sexuality
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,130
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:human_sexuality"
] | false
|
helm:mmlu:human_sexuality
|
mmlu:human_sexuality
|
helm
|
lighteval/mmlu
|
human_sexuality
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,145
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:human_sexuality"
] | false
|
original:mmlu:international_law
|
mmlu:international_law
|
original
|
cais/mmlu
|
international_law
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,171
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:international_law"
] | false
|
leaderboard:mmlu:international_law
|
mmlu:international_law
|
leaderboard
|
lighteval/mmlu
|
international_law
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,186
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:international_law"
] | false
|
helm:mmlu:international_law
|
mmlu:international_law
|
helm
|
lighteval/mmlu
|
international_law
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,201
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:international_law"
] | false
|
original:mmlu:jurisprudence
|
mmlu:jurisprudence
|
original
|
cais/mmlu
|
jurisprudence
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,227
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:jurisprudence"
] | false
|
leaderboard:mmlu:jurisprudence
|
mmlu:jurisprudence
|
leaderboard
|
lighteval/mmlu
|
jurisprudence
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,242
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:jurisprudence"
] | false
|
helm:mmlu:jurisprudence
|
mmlu:jurisprudence
|
helm
|
lighteval/mmlu
|
jurisprudence
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,257
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:jurisprudence"
] | false
|
original:mmlu:logical_fallacies
|
mmlu:logical_fallacies
|
original
|
cais/mmlu
|
logical_fallacies
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 13,283
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:logical_fallacies"
] | false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.