task_id
stringlengths 8
69
| name
stringlengths 3
64
| suite
stringclasses 6
values | hf_repo
stringclasses 125
values | hf_subset
stringlengths 0
55
| file_path
stringclasses 6
values | line_number
int64 36
22.8k
| variable_name
stringclasses 1
value | is_subtask
bool 2
classes | main_task
stringclasses 122
values | subtask_count
int64 1
1
| suites
listlengths 1
1
| subtasks
listlengths 1
1
| is_standalone
bool 2
classes |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
lighteval:bigbench:english_proverbs
|
bigbench:english_proverbs
|
lighteval
|
tasksource/bigbench
|
english_proverbs
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,622
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:english_proverbs"
] | false
|
lighteval:bigbench:english_russian_proverbs
|
bigbench:english_russian_proverbs
|
lighteval
|
tasksource/bigbench
|
english_russian_proverbs
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,637
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:english_russian_proverbs"
] | false
|
lighteval:bigbench:entailed_polarity
|
bigbench:entailed_polarity
|
lighteval
|
tasksource/bigbench
|
entailed_polarity
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,652
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:entailed_polarity"
] | false
|
lighteval:bigbench:entailed_polarity_hindi
|
bigbench:entailed_polarity_hindi
|
lighteval
|
tasksource/bigbench
|
entailed_polarity_hindi
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,667
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:entailed_polarity_hindi"
] | false
|
helm:entity_data_imputation:Buy
|
entity_data_imputation:Buy
|
helm
|
lighteval/Buy
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,682
|
direct_call
| true
|
entity_data_imputation
| 1
|
[
"helm"
] |
[
"helm:entity_data_imputation:Buy"
] | false
|
helm:entity_data_imputation:Restaurant
|
entity_data_imputation:Restaurant
|
helm
|
lighteval/Restaurant
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,708
|
direct_call
| true
|
entity_data_imputation
| 1
|
[
"helm"
] |
[
"helm:entity_data_imputation:Restaurant"
] | false
|
helm:entity_matching:Abt_Buy
|
entity_matching:Abt_Buy
|
helm
|
lighteval/EntityMatching
|
Abt_Buy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,734
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Abt_Buy"
] | false
|
helm:entity_matching:Amazon_Google
|
entity_matching:Amazon_Google
|
helm
|
lighteval/EntityMatching
|
Amazon_Google
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,760
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Amazon_Google"
] | false
|
helm:entity_matching:Beer
|
entity_matching:Beer
|
helm
|
lighteval/EntityMatching
|
Beer
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,786
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Beer"
] | false
|
helm:entity_matching:Company
|
entity_matching:Company
|
helm
|
lighteval/EntityMatching
|
Company
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,812
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Company"
] | false
|
helm:entity_matching:DBLP_ACM
|
entity_matching:DBLP_ACM
|
helm
|
lighteval/EntityMatching
|
DBLP_ACM
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,838
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:DBLP_ACM"
] | false
|
helm:entity_matching:DBLP_GoogleScholar
|
entity_matching:DBLP_GoogleScholar
|
helm
|
lighteval/EntityMatching
|
DBLP_GoogleScholar
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,864
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:DBLP_GoogleScholar"
] | false
|
helm:entity_matching:Dirty_DBLP_ACM
|
entity_matching:Dirty_DBLP_ACM
|
helm
|
lighteval/EntityMatching
|
Dirty_DBLP_ACM
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,890
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Dirty_DBLP_ACM"
] | false
|
helm:entity_matching:Dirty_DBLP_GoogleScholar
|
entity_matching:Dirty_DBLP_GoogleScholar
|
helm
|
lighteval/EntityMatching
|
Dirty_DBLP_GoogleScholar
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,916
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Dirty_DBLP_GoogleScholar"
] | false
|
helm:entity_matching:Dirty_Walmart_Amazon
|
entity_matching:Dirty_Walmart_Amazon
|
helm
|
lighteval/EntityMatching
|
Dirty_Walmart_Amazon
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,942
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Dirty_Walmart_Amazon"
] | false
|
helm:entity_matching:Dirty_iTunes_Amazon
|
entity_matching:Dirty_iTunes_Amazon
|
helm
|
lighteval/EntityMatching
|
Dirty_iTunes_Amazon
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,968
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Dirty_iTunes_Amazon"
] | false
|
helm:entity_matching=Fodors_Zagats
|
entity_matching=Fodors_Zagats
|
helm
|
lighteval/EntityMatching
|
Fodors_Zagats
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 7,994
|
direct_call
| false
|
entity_matching=Fodors_Zagats
| 1
|
[
"helm"
] |
[
"helm:entity_matching=Fodors_Zagats"
] | true
|
helm:entity_matching:Walmart_Amazon
|
entity_matching:Walmart_Amazon
|
helm
|
lighteval/EntityMatching
|
Walmart_Amazon
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,020
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:Walmart_Amazon"
] | false
|
helm:entity_matching:iTunes_Amazon
|
entity_matching:iTunes_Amazon
|
helm
|
lighteval/EntityMatching
|
iTunes_Amazon
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,046
|
direct_call
| true
|
entity_matching
| 1
|
[
"helm"
] |
[
"helm:entity_matching:iTunes_Amazon"
] | false
|
lighteval:bigbench:epistemic_reasoning
|
bigbench:epistemic_reasoning
|
lighteval
|
tasksource/bigbench
|
epistemic_reasoning
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,072
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:epistemic_reasoning"
] | false
|
lighteval:ethics:commonsense
|
ethics:commonsense
|
lighteval
|
lighteval/hendrycks_ethics
|
commonsense
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,087
|
direct_call
| true
|
ethics
| 1
|
[
"lighteval"
] |
[
"lighteval:ethics:commonsense"
] | false
|
lighteval:ethics:deontology
|
ethics:deontology
|
lighteval
|
lighteval/hendrycks_ethics
|
deontology
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,102
|
direct_call
| true
|
ethics
| 1
|
[
"lighteval"
] |
[
"lighteval:ethics:deontology"
] | false
|
lighteval:ethics:justice
|
ethics:justice
|
lighteval
|
lighteval/hendrycks_ethics
|
justice
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,117
|
direct_call
| true
|
ethics
| 1
|
[
"lighteval"
] |
[
"lighteval:ethics:justice"
] | false
|
lighteval:ethics:utilitarianism
|
ethics:utilitarianism
|
lighteval
|
lighteval/hendrycks_ethics
|
utilitarianism
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,132
|
direct_call
| true
|
ethics
| 1
|
[
"lighteval"
] |
[
"lighteval:ethics:utilitarianism"
] | false
|
lighteval:ethics:virtue
|
ethics:virtue
|
lighteval
|
lighteval/hendrycks_ethics
|
virtue
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,147
|
direct_call
| true
|
ethics
| 1
|
[
"lighteval"
] |
[
"lighteval:ethics:virtue"
] | false
|
lighteval:bigbench:evaluating_information_essentiality
|
bigbench:evaluating_information_essentiality
|
lighteval
|
tasksource/bigbench
|
evaluating_information_essentiality
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,162
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:evaluating_information_essentiality"
] | false
|
lighteval:bigbench:fact_checker
|
bigbench:fact_checker
|
lighteval
|
tasksource/bigbench
|
fact_checker
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,177
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:fact_checker"
] | false
|
lighteval:bigbench:fantasy_reasoning
|
bigbench:fantasy_reasoning
|
lighteval
|
tasksource/bigbench
|
fantasy_reasoning
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,192
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:fantasy_reasoning"
] | false
|
lighteval:bigbench:few_shot_nlg
|
bigbench:few_shot_nlg
|
lighteval
|
tasksource/bigbench
|
few_shot_nlg
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,207
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:few_shot_nlg"
] | false
|
lighteval:bigbench:figure_of_speech_detection
|
bigbench:figure_of_speech_detection
|
lighteval
|
tasksource/bigbench
|
figure_of_speech_detection
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,222
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:figure_of_speech_detection"
] | false
|
lighteval:bigbench_lite:formal_fallacies_syllogisms_negation
|
bigbench_lite:formal_fallacies_syllogisms_negation
|
lighteval
|
tasksource/bigbench
|
formal_fallacies_syllogisms_negation
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,237
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:formal_fallacies_syllogisms_negation"
] | false
|
lighteval:bigbench:gem
|
bigbench:gem
|
lighteval
|
tasksource/bigbench
|
gem
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,252
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:gem"
] | false
|
lighteval:bigbench:gender_inclusive_sentences_german
|
bigbench:gender_inclusive_sentences_german
|
lighteval
|
tasksource/bigbench
|
gender_inclusive_sentences_german
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,267
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:gender_inclusive_sentences_german"
] | false
|
lighteval:bigbench:general_knowledge
|
bigbench:general_knowledge
|
lighteval
|
tasksource/bigbench
|
general_knowledge
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,282
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:general_knowledge"
] | false
|
lighteval:glue:cola
|
glue:cola
|
lighteval
|
glue
|
cola
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,317
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:cola"
] | false
|
lighteval:glue:mnli
|
glue:mnli
|
lighteval
|
glue
|
mnli_matched
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,332
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:mnli"
] | false
|
lighteval:glue:mnli_mismatched
|
glue:mnli_mismatched
|
lighteval
|
glue
|
mnli_mismatched
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,347
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:mnli_mismatched"
] | false
|
lighteval:glue:mrpc
|
glue:mrpc
|
lighteval
|
glue
|
mrpc
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,362
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:mrpc"
] | false
|
lighteval:glue:qnli
|
glue:qnli
|
lighteval
|
glue
|
qnli
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,377
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:qnli"
] | false
|
lighteval:glue:qqp
|
glue:qqp
|
lighteval
|
glue
|
qqp
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,392
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:qqp"
] | false
|
lighteval:glue:rte
|
glue:rte
|
lighteval
|
glue
|
rte
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,407
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:rte"
] | false
|
lighteval:glue:sst2
|
glue:sst2
|
lighteval
|
glue
|
sst2
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,422
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:sst2"
] | false
|
lighteval:glue:stsb
|
glue:stsb
|
lighteval
|
glue
|
stsb
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,437
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:stsb"
] | false
|
lighteval:glue:wnli
|
glue:wnli
|
lighteval
|
glue
|
wnli
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,452
|
direct_call
| true
|
glue
| 1
|
[
"lighteval"
] |
[
"lighteval:glue:wnli"
] | false
|
lighteval:bigbench:goal_step_wikihow
|
bigbench:goal_step_wikihow
|
lighteval
|
tasksource/bigbench
|
goal_step_wikihow
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,467
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:goal_step_wikihow"
] | false
|
lighteval:gpqa:mc
|
gpqa:mc
|
lighteval
|
Idavidrein/gpqa
|
gpqa_main
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,482
|
direct_call
| true
|
gpqa
| 1
|
[
"lighteval"
] |
[
"lighteval:gpqa:mc"
] | false
|
lighteval:gpqa:diamond
|
gpqa:diamond
|
lighteval
|
Idavidrein/gpqa
|
gpqa_diamond
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,497
|
direct_call
| true
|
gpqa
| 1
|
[
"lighteval"
] |
[
"lighteval:gpqa:diamond"
] | false
|
lighteval:gpqa:extended
|
gpqa:extended
|
lighteval
|
Idavidrein/gpqa
|
gpqa_extended
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,512
|
direct_call
| true
|
gpqa
| 1
|
[
"lighteval"
] |
[
"lighteval:gpqa:extended"
] | false
|
lighteval:gpqa:main
|
gpqa:main
|
lighteval
|
Idavidrein/gpqa
|
gpqa_main
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,527
|
direct_call
| true
|
gpqa
| 1
|
[
"lighteval"
] |
[
"lighteval:gpqa:main"
] | false
|
lighteval:bigbench:gre_reading_comprehension
|
bigbench:gre_reading_comprehension
|
lighteval
|
tasksource/bigbench
|
gre_reading_comprehension
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,542
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:gre_reading_comprehension"
] | false
|
lighteval:gsm_plus
|
gsm_plus
|
lighteval
|
qintongli/GSM-Plus
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,557
|
direct_call
| false
|
gsm_plus
| 1
|
[
"lighteval"
] |
[
"lighteval:gsm_plus"
] | true
|
leaderboard:gsm8k
|
gsm8k
|
leaderboard
|
gsm8k
|
main
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,572
|
direct_call
| false
|
gsm8k
| 1
|
[
"leaderboard"
] |
[
"leaderboard:gsm8k"
] | true
|
lighteval:gsm8k
|
gsm8k
|
lighteval
|
lighteval/openai/gsm8k
|
main
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,589
|
direct_call
| false
|
gsm8k
| 1
|
[
"lighteval"
] |
[
"lighteval:gsm8k"
] | true
|
lighteval:headqa:en
|
headqa:en
|
lighteval
|
lighteval/headqa_harness
|
en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,606
|
direct_call
| true
|
headqa
| 1
|
[
"lighteval"
] |
[
"lighteval:headqa:en"
] | false
|
lighteval:headqa:es
|
headqa:es
|
lighteval
|
lighteval/headqa_harness
|
es
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,624
|
direct_call
| true
|
headqa
| 1
|
[
"lighteval"
] |
[
"lighteval:headqa:es"
] | false
|
leaderboard:hellaswag
|
hellaswag
|
leaderboard
|
hellaswag
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,642
|
direct_call
| false
|
hellaswag
| 1
|
[
"leaderboard"
] |
[
"leaderboard:hellaswag"
] | true
|
helm:hellaswag
|
hellaswag
|
helm
|
hellaswag
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,660
|
direct_call
| false
|
hellaswag
| 1
|
[
"helm"
] |
[
"helm:hellaswag"
] | true
|
lighteval:bigbench:hhh_alignment
|
bigbench:hhh_alignment
|
lighteval
|
tasksource/bigbench
|
hhh_alignment
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,686
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:hhh_alignment"
] | false
|
lighteval:bigbench:hindi_question_answering
|
bigbench:hindi_question_answering
|
lighteval
|
tasksource/bigbench
|
hindi_question_answering
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,701
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:hindi_question_answering"
] | false
|
lighteval:bigbench_lite:hindu_knowledge
|
bigbench_lite:hindu_knowledge
|
lighteval
|
tasksource/bigbench
|
hindu_knowledge
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,716
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:hindu_knowledge"
] | false
|
lighteval:bigbench:hinglish_toxicity
|
bigbench:hinglish_toxicity
|
lighteval
|
tasksource/bigbench
|
hinglish_toxicity
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,731
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:hinglish_toxicity"
] | false
|
lighteval:bigbench:human_organs_senses
|
bigbench:human_organs_senses
|
lighteval
|
tasksource/bigbench
|
human_organs_senses
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,746
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:human_organs_senses"
] | false
|
lighteval:bigbench:hyperbaton
|
bigbench:hyperbaton
|
lighteval
|
tasksource/bigbench
|
hyperbaton
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,761
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:hyperbaton"
] | false
|
lighteval:bigbench:identify_math_theorems
|
bigbench:identify_math_theorems
|
lighteval
|
tasksource/bigbench
|
identify_math_theorems
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,776
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:identify_math_theorems"
] | false
|
lighteval:bigbench:identify_odd_metaphor
|
bigbench:identify_odd_metaphor
|
lighteval
|
tasksource/bigbench
|
identify_odd_metaphor
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,791
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:identify_odd_metaphor"
] | false
|
helm:imdb
|
imdb
|
helm
|
lighteval/IMDB_helm
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,806
|
direct_call
| false
|
imdb
| 1
|
[
"helm"
] |
[
"helm:imdb"
] | false
|
helm:imdb:contrastset
|
imdb:contrastset
|
helm
|
lighteval/IMDB_helm
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,834
|
direct_call
| true
|
imdb
| 1
|
[
"helm"
] |
[
"helm:imdb:contrastset"
] | false
|
lighteval:bigbench:implicatures
|
bigbench:implicatures
|
lighteval
|
tasksource/bigbench
|
implicatures
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,862
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:implicatures"
] | false
|
lighteval:bigbench:implicit_relations
|
bigbench:implicit_relations
|
lighteval
|
tasksource/bigbench
|
implicit_relations
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,877
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:implicit_relations"
] | false
|
lighteval:bigbench:intent_recognition
|
bigbench:intent_recognition
|
lighteval
|
tasksource/bigbench
|
intent_recognition
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,892
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:intent_recognition"
] | false
|
helm:interactive_qa_mmlu:abstract_algebra
|
interactive_qa_mmlu:abstract_algebra
|
helm
|
lighteval/mmlu
|
abstract_algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,907
|
direct_call
| true
|
interactive_qa_mmlu
| 1
|
[
"helm"
] |
[
"helm:interactive_qa_mmlu:abstract_algebra"
] | false
|
helm:interactive_qa_mmlu:college_chemistry
|
interactive_qa_mmlu:college_chemistry
|
helm
|
lighteval/mmlu
|
college_chemistry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,933
|
direct_call
| true
|
interactive_qa_mmlu
| 1
|
[
"helm"
] |
[
"helm:interactive_qa_mmlu:college_chemistry"
] | false
|
helm:interactive_qa_mmlu:global_facts
|
interactive_qa_mmlu:global_facts
|
helm
|
lighteval/mmlu
|
global_facts
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,959
|
direct_call
| true
|
interactive_qa_mmlu
| 1
|
[
"helm"
] |
[
"helm:interactive_qa_mmlu:global_facts"
] | false
|
helm:interactive_qa_mmlu:miscellaneous
|
interactive_qa_mmlu:miscellaneous
|
helm
|
lighteval/mmlu
|
miscellaneous
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 8,985
|
direct_call
| true
|
interactive_qa_mmlu
| 1
|
[
"helm"
] |
[
"helm:interactive_qa_mmlu:miscellaneous"
] | false
|
helm:interactive_qa_mmlu:nutrition
|
interactive_qa_mmlu:nutrition
|
helm
|
lighteval/mmlu
|
nutrition
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,011
|
direct_call
| true
|
interactive_qa_mmlu
| 1
|
[
"helm"
] |
[
"helm:interactive_qa_mmlu:nutrition"
] | false
|
helm:interactive_qa_mmlu:us_foreign_policy
|
interactive_qa_mmlu:us_foreign_policy
|
helm
|
lighteval/mmlu
|
us_foreign_policy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,037
|
direct_call
| true
|
interactive_qa_mmlu
| 1
|
[
"helm"
] |
[
"helm:interactive_qa_mmlu:us_foreign_policy"
] | false
|
lighteval:bigbench:international_phonetic_alphabet_nli
|
bigbench:international_phonetic_alphabet_nli
|
lighteval
|
tasksource/bigbench
|
international_phonetic_alphabet_nli
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,063
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:international_phonetic_alphabet_nli"
] | false
|
lighteval:bigbench:international_phonetic_alphabet_transliterate
|
bigbench:international_phonetic_alphabet_transliterate
|
lighteval
|
tasksource/bigbench
|
international_phonetic_alphabet_transliterate
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,078
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:international_phonetic_alphabet_transliterate"
] | false
|
lighteval:bigbench:intersect_geometry
|
bigbench:intersect_geometry
|
lighteval
|
tasksource/bigbench
|
intersect_geometry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,093
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:intersect_geometry"
] | false
|
lighteval:bigbench:irony_identification
|
bigbench:irony_identification
|
lighteval
|
tasksource/bigbench
|
irony_identification
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,108
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:irony_identification"
] | false
|
lighteval:iwslt17:ar-en
|
iwslt17:ar-en
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_ar-en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,123
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:ar-en"
] | false
|
lighteval:iwslt17:de-en
|
iwslt17:de-en
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_de-en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,138
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:de-en"
] | false
|
lighteval:iwslt17:en-ar
|
iwslt17:en-ar
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_ar-en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,153
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:en-ar"
] | false
|
lighteval:iwslt17:en-de
|
iwslt17:en-de
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_en-de
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,168
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:en-de"
] | false
|
lighteval:iwslt17:en-fr
|
iwslt17:en-fr
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_en-fr
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,183
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:en-fr"
] | false
|
lighteval:iwslt17:en-ja
|
iwslt17:en-ja
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_en-ja
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,198
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:en-ja"
] | false
|
lighteval:iwslt17:en-ko
|
iwslt17:en-ko
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_en-ko
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,213
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:en-ko"
] | false
|
lighteval:iwslt17:en-zh
|
iwslt17:en-zh
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_en-zh
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,228
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:en-zh"
] | false
|
lighteval:iwslt17:fr-en
|
iwslt17:fr-en
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_fr-en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,243
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:fr-en"
] | false
|
lighteval:iwslt17:ja-en
|
iwslt17:ja-en
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_ja-en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,258
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:ja-en"
] | false
|
lighteval:iwslt17:ko-en
|
iwslt17:ko-en
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_ko-en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,273
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:ko-en"
] | false
|
lighteval:iwslt17:zh-en
|
iwslt17:zh-en
|
lighteval
|
lighteval/sacrebleu_manual
|
iwslt17_zh-en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,288
|
direct_call
| true
|
iwslt17
| 1
|
[
"lighteval"
] |
[
"lighteval:iwslt17:zh-en"
] | false
|
lighteval:jeopardy
|
jeopardy
|
lighteval
|
openaccess-ai-collective/jeopardy
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,303
|
direct_call
| false
|
jeopardy
| 1
|
[
"lighteval"
] |
[
"lighteval:jeopardy"
] | true
|
lighteval:bigbench:kanji_ascii
|
bigbench:kanji_ascii
|
lighteval
|
tasksource/bigbench
|
kanji_ascii
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,330
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:kanji_ascii"
] | false
|
lighteval:bigbench:kannada
|
bigbench:kannada
|
lighteval
|
tasksource/bigbench
|
kannada
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,345
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:kannada"
] | false
|
lighteval:bigbench:key_value_maps
|
bigbench:key_value_maps
|
lighteval
|
tasksource/bigbench
|
key_value_maps
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,360
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:key_value_maps"
] | false
|
lighteval:bigbench_lite:known_unknowns
|
bigbench_lite:known_unknowns
|
lighteval
|
tasksource/bigbench
|
known_unknowns
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,375
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:known_unknowns"
] | false
|
lighteval:lambada:standard
|
lambada:standard
|
lighteval
|
lambada
|
plain_text
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,390
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:standard"
] | false
|
lighteval:lambada:standard_cloze
|
lambada:standard_cloze
|
lighteval
|
lambada
|
plain_text
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,405
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:standard_cloze"
] | false
|
lighteval:lambada:openai
|
lambada:openai
|
lighteval
|
EleutherAI/lambada_openai
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,420
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:openai"
] | false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.