task_id
stringlengths 8
69
| name
stringlengths 3
64
| suite
stringclasses 6
values | hf_repo
stringclasses 125
values | hf_subset
stringlengths 0
55
| file_path
stringclasses 6
values | line_number
int64 36
22.8k
| variable_name
stringclasses 1
value | is_subtask
bool 2
classes | main_task
stringclasses 122
values | subtask_count
int64 1
1
| suites
listlengths 1
1
| subtasks
listlengths 1
1
| is_standalone
bool 2
classes |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
lighteval:lambada:openai:de
|
lambada:openai:de
|
lighteval
|
EleutherAI/lambada_openai
|
de
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,435
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:openai:de"
] | false
|
lighteval:lambada:openai:en
|
lambada:openai:en
|
lighteval
|
EleutherAI/lambada_openai
|
en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,450
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:openai:en"
] | false
|
lighteval:lambada:openai:es
|
lambada:openai:es
|
lighteval
|
EleutherAI/lambada_openai
|
es
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,465
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:openai:es"
] | false
|
lighteval:lambada:openai:fr
|
lambada:openai:fr
|
lighteval
|
EleutherAI/lambada_openai
|
fr
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,480
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:openai:fr"
] | false
|
lighteval:lambada:openai:it
|
lambada:openai:it
|
lighteval
|
EleutherAI/lambada_openai
|
it
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,495
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:openai:it"
] | false
|
lighteval:lambada:openai_cloze
|
lambada:openai_cloze
|
lighteval
|
EleutherAI/lambada_openai
|
en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,510
|
direct_call
| true
|
lambada
| 1
|
[
"lighteval"
] |
[
"lighteval:lambada:openai_cloze"
] | false
|
lighteval:bigbench:language_games
|
bigbench:language_games
|
lighteval
|
tasksource/bigbench
|
language_games
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,525
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:language_games"
] | false
|
lighteval:bigbench_lite:language_identification
|
bigbench_lite:language_identification
|
lighteval
|
tasksource/bigbench
|
language_identification
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,540
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:language_identification"
] | false
|
helm:legal_summarization:billsum
|
legal_summarization:billsum
|
helm
|
lighteval/legal_summarization
|
BillSum
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,555
|
direct_call
| true
|
legal_summarization
| 1
|
[
"helm"
] |
[
"helm:legal_summarization:billsum"
] | false
|
helm:legal_summarization:eurlexsum
|
legal_summarization:eurlexsum
|
helm
|
lighteval/legal_summarization
|
EurLexSum
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,577
|
direct_call
| true
|
legal_summarization
| 1
|
[
"helm"
] |
[
"helm:legal_summarization:eurlexsum"
] | false
|
helm:legal_summarization:multilexsum
|
legal_summarization:multilexsum
|
helm
|
lighteval/legal_summarization
|
MultiLexSum
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,599
|
direct_call
| true
|
legal_summarization
| 1
|
[
"helm"
] |
[
"helm:legal_summarization:multilexsum"
] | false
|
helm:legalsupport
|
legalsupport
|
helm
|
lighteval/LegalSupport
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,621
|
direct_call
| false
|
legalsupport
| 1
|
[
"helm"
] |
[
"helm:legalsupport"
] | true
|
helm:lexglue:case_hold
|
lexglue:case_hold
|
helm
|
lighteval/lexglue
|
case_hold
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,648
|
direct_call
| true
|
lexglue
| 1
|
[
"helm"
] |
[
"helm:lexglue:case_hold"
] | false
|
helm:lexglue:ecthr_a
|
lexglue:ecthr_a
|
helm
|
lighteval/lexglue
|
ecthr_a
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,669
|
direct_call
| true
|
lexglue
| 1
|
[
"helm"
] |
[
"helm:lexglue:ecthr_a"
] | false
|
helm:lexglue:ecthr_b
|
lexglue:ecthr_b
|
helm
|
lighteval/lexglue
|
ecthr_b
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,690
|
direct_call
| true
|
lexglue
| 1
|
[
"helm"
] |
[
"helm:lexglue:ecthr_b"
] | false
|
helm:lexglue:eurlex
|
lexglue:eurlex
|
helm
|
lighteval/lexglue
|
eurlex
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,711
|
direct_call
| true
|
lexglue
| 1
|
[
"helm"
] |
[
"helm:lexglue:eurlex"
] | false
|
helm:lexglue:ledgar
|
lexglue:ledgar
|
helm
|
lighteval/lexglue
|
ledgar
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,732
|
direct_call
| true
|
lexglue
| 1
|
[
"helm"
] |
[
"helm:lexglue:ledgar"
] | false
|
helm:lexglue:scotus
|
lexglue:scotus
|
helm
|
lighteval/lexglue
|
scotus
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,753
|
direct_call
| true
|
lexglue
| 1
|
[
"helm"
] |
[
"helm:lexglue:scotus"
] | false
|
helm:lexglue:unfair_tos
|
lexglue:unfair_tos
|
helm
|
lighteval/lexglue
|
unfair_tos
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,774
|
direct_call
| true
|
lexglue
| 1
|
[
"helm"
] |
[
"helm:lexglue:unfair_tos"
] | false
|
helm:lextreme:brazilian_court_decisions_judgment
|
lextreme:brazilian_court_decisions_judgment
|
helm
|
lighteval/lextreme
|
brazilian_court_decisions_judgment
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,795
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:brazilian_court_decisions_judgment"
] | false
|
helm:lextreme:brazilian_court_decisions_unanimity
|
lextreme:brazilian_court_decisions_unanimity
|
helm
|
lighteval/lextreme
|
brazilian_court_decisions_unanimity
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,816
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:brazilian_court_decisions_unanimity"
] | false
|
helm:lextreme:covid19_emergency_event
|
lextreme:covid19_emergency_event
|
helm
|
lighteval/lextreme
|
covid19_emergency_event
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,837
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:covid19_emergency_event"
] | false
|
helm:lextreme:german_argument_mining
|
lextreme:german_argument_mining
|
helm
|
lighteval/lextreme
|
german_argument_mining
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,858
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:german_argument_mining"
] | false
|
helm:lextreme:greek_legal_code_chapter
|
lextreme:greek_legal_code_chapter
|
helm
|
lighteval/lextreme
|
greek_legal_code_chapter
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,879
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:greek_legal_code_chapter"
] | false
|
helm:lextreme:greek_legal_code_subject
|
lextreme:greek_legal_code_subject
|
helm
|
lighteval/lextreme
|
greek_legal_code_subject
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,900
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:greek_legal_code_subject"
] | false
|
helm:lextreme:greek_legal_code_volume
|
lextreme:greek_legal_code_volume
|
helm
|
lighteval/lextreme
|
greek_legal_code_volume
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,921
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:greek_legal_code_volume"
] | false
|
helm:lextreme:greek_legal_ner
|
lextreme:greek_legal_ner
|
helm
|
lighteval/lextreme
|
greek_legal_ner
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,940
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:greek_legal_ner"
] | false
|
helm:lextreme:legalnero
|
lextreme:legalnero
|
helm
|
lighteval/lextreme
|
legalnero
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,961
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:legalnero"
] | false
|
helm:lextreme:lener_br
|
lextreme:lener_br
|
helm
|
lighteval/lextreme
|
lener_br
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 9,982
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:lener_br"
] | false
|
helm:lextreme:mapa_coarse
|
lextreme:mapa_coarse
|
helm
|
lighteval/lextreme
|
mapa_coarse
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,003
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:mapa_coarse"
] | false
|
helm:lextreme:mapa_fine
|
lextreme:mapa_fine
|
helm
|
lighteval/lextreme
|
mapa_fine
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,024
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:mapa_fine"
] | false
|
helm:lextreme:multi_eurlex_level_1
|
lextreme:multi_eurlex_level_1
|
helm
|
lighteval/lextreme
|
multi_eurlex_level_1
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,045
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:multi_eurlex_level_1"
] | false
|
helm:lextreme:multi_eurlex_level_2
|
lextreme:multi_eurlex_level_2
|
helm
|
lighteval/lextreme
|
multi_eurlex_level_2
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,066
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:multi_eurlex_level_2"
] | false
|
helm:lextreme:multi_eurlex_level_3
|
lextreme:multi_eurlex_level_3
|
helm
|
lighteval/lextreme
|
multi_eurlex_level_3
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,087
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:multi_eurlex_level_3"
] | false
|
helm:lextreme:online_terms_of_service_clause_topics
|
lextreme:online_terms_of_service_clause_topics
|
helm
|
lighteval/lextreme
|
online_terms_of_service_clause_topics
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,108
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:online_terms_of_service_clause_topics"
] | false
|
helm:lextreme:online_terms_of_service_unfairness_levels
|
lextreme:online_terms_of_service_unfairness_levels
|
helm
|
lighteval/lextreme
|
online_terms_of_service_unfairness_levels
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,129
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:online_terms_of_service_unfairness_levels"
] | false
|
helm:lextreme:swiss_judgment_prediction
|
lextreme:swiss_judgment_prediction
|
helm
|
lighteval/lextreme
|
swiss_judgment_prediction
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,150
|
direct_call
| true
|
lextreme
| 1
|
[
"helm"
] |
[
"helm:lextreme:swiss_judgment_prediction"
] | false
|
lighteval:bigbench:linguistic_mappings
|
bigbench:linguistic_mappings
|
lighteval
|
tasksource/bigbench
|
linguistic_mappings
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,171
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:linguistic_mappings"
] | false
|
lighteval:bigbench_lite:linguistics_puzzles
|
bigbench_lite:linguistics_puzzles
|
lighteval
|
tasksource/bigbench
|
linguistics_puzzles
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,186
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:linguistics_puzzles"
] | false
|
lighteval:bigbench_lite:logic_grid_puzzle
|
bigbench_lite:logic_grid_puzzle
|
lighteval
|
tasksource/bigbench
|
logic_grid_puzzle
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,201
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:logic_grid_puzzle"
] | false
|
lighteval:bigbench:logical_args
|
bigbench:logical_args
|
lighteval
|
tasksource/bigbench
|
logical_args
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,216
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:logical_args"
] | false
|
lighteval:bigbench_lite:logical_deduction
|
bigbench_lite:logical_deduction
|
lighteval
|
tasksource/bigbench
|
logical_deduction
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,231
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:logical_deduction"
] | false
|
lighteval:bigbench:logical_fallacy_detection
|
bigbench:logical_fallacy_detection
|
lighteval
|
tasksource/bigbench
|
logical_fallacy_detection
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,246
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:logical_fallacy_detection"
] | false
|
lighteval:bigbench:logical_sequence
|
bigbench:logical_sequence
|
lighteval
|
tasksource/bigbench
|
logical_sequence
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,261
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:logical_sequence"
] | false
|
lighteval:logiqa
|
logiqa
|
lighteval
|
lighteval/logiqa_harness
|
logiqa
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,276
|
direct_call
| false
|
logiqa
| 1
|
[
"lighteval"
] |
[
"lighteval:logiqa"
] | true
|
helm:lsat_qa
|
lsat_qa
|
helm
|
lighteval/lsat_qa
|
all
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,294
|
direct_call
| false
|
lsat_qa
| 1
|
[
"helm"
] |
[
"helm:lsat_qa"
] | false
|
helm:lsat_qa:assignment
|
lsat_qa:assignment
|
helm
|
lighteval/lsat_qa
|
assignment
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,320
|
direct_call
| true
|
lsat_qa
| 1
|
[
"helm"
] |
[
"helm:lsat_qa:assignment"
] | false
|
helm:lsat_qa:grouping
|
lsat_qa:grouping
|
helm
|
lighteval/lsat_qa
|
grouping
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,346
|
direct_call
| true
|
lsat_qa
| 1
|
[
"helm"
] |
[
"helm:lsat_qa:grouping"
] | false
|
helm:lsat_qa:miscellaneous
|
lsat_qa:miscellaneous
|
helm
|
lighteval/lsat_qa
|
miscellaneous
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,372
|
direct_call
| true
|
lsat_qa
| 1
|
[
"helm"
] |
[
"helm:lsat_qa:miscellaneous"
] | false
|
helm:lsat_qa:ordering
|
lsat_qa:ordering
|
helm
|
lighteval/lsat_qa
|
ordering
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,398
|
direct_call
| true
|
lsat_qa
| 1
|
[
"helm"
] |
[
"helm:lsat_qa:ordering"
] | false
|
lighteval:math_500
|
math_500
|
lighteval
|
HuggingFaceH4/MATH-500
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,424
|
direct_call
| false
|
math_500
| 1
|
[
"lighteval"
] |
[
"lighteval:math_500"
] | true
|
lighteval:math_500_gpassk
|
math_500_gpassk
|
lighteval
|
HuggingFaceH4/MATH-500
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,440
|
direct_call
| false
|
math_500_gpassk
| 1
|
[
"lighteval"
] |
[
"lighteval:math_500_gpassk"
] | true
|
lighteval:math:algebra
|
math:algebra
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,454
|
direct_call
| true
|
math
| 1
|
[
"lighteval"
] |
[
"lighteval:math:algebra"
] | false
|
lighteval:math:counting_and_probability
|
math:counting_and_probability
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
counting_and_probability
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,479
|
direct_call
| true
|
math
| 1
|
[
"lighteval"
] |
[
"lighteval:math:counting_and_probability"
] | false
|
lighteval:math:geometry
|
math:geometry
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
geometry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,504
|
direct_call
| true
|
math
| 1
|
[
"lighteval"
] |
[
"lighteval:math:geometry"
] | false
|
lighteval:math:intermediate_algebra
|
math:intermediate_algebra
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
intermediate_algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,529
|
direct_call
| true
|
math
| 1
|
[
"lighteval"
] |
[
"lighteval:math:intermediate_algebra"
] | false
|
lighteval:math:number_theory
|
math:number_theory
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
number_theory
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,554
|
direct_call
| true
|
math
| 1
|
[
"lighteval"
] |
[
"lighteval:math:number_theory"
] | false
|
lighteval:math:prealgebra
|
math:prealgebra
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
prealgebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,579
|
direct_call
| true
|
math
| 1
|
[
"lighteval"
] |
[
"lighteval:math:prealgebra"
] | false
|
lighteval:math:precalculus
|
math:precalculus
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
precalculus
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,604
|
direct_call
| true
|
math
| 1
|
[
"lighteval"
] |
[
"lighteval:math:precalculus"
] | false
|
lighteval:math_cot:algebra
|
math_cot:algebra
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,629
|
direct_call
| true
|
math_cot
| 1
|
[
"lighteval"
] |
[
"lighteval:math_cot:algebra"
] | false
|
lighteval:math_cot:counting_and_probability
|
math_cot:counting_and_probability
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
counting_and_probability
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,654
|
direct_call
| true
|
math_cot
| 1
|
[
"lighteval"
] |
[
"lighteval:math_cot:counting_and_probability"
] | false
|
lighteval:math_cot:geometry
|
math_cot:geometry
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
geometry
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,679
|
direct_call
| true
|
math_cot
| 1
|
[
"lighteval"
] |
[
"lighteval:math_cot:geometry"
] | false
|
lighteval:math_cot:intermediate_algebra
|
math_cot:intermediate_algebra
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
intermediate_algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,704
|
direct_call
| true
|
math_cot
| 1
|
[
"lighteval"
] |
[
"lighteval:math_cot:intermediate_algebra"
] | false
|
lighteval:math_cot:number_theory
|
math_cot:number_theory
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
number_theory
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,729
|
direct_call
| true
|
math_cot
| 1
|
[
"lighteval"
] |
[
"lighteval:math_cot:number_theory"
] | false
|
lighteval:math_cot:prealgebra
|
math_cot:prealgebra
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
prealgebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,754
|
direct_call
| true
|
math_cot
| 1
|
[
"lighteval"
] |
[
"lighteval:math_cot:prealgebra"
] | false
|
lighteval:math_cot:precalculus
|
math_cot:precalculus
|
lighteval
|
DigitalLearningGmbH/MATH-lighteval
|
precalculus
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,779
|
direct_call
| true
|
math_cot
| 1
|
[
"lighteval"
] |
[
"lighteval:math_cot:precalculus"
] | false
|
lighteval:bigbench:mathematical_induction
|
bigbench:mathematical_induction
|
lighteval
|
tasksource/bigbench
|
mathematical_induction
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,804
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:mathematical_induction"
] | false
|
lighteval:mathqa
|
mathqa
|
lighteval
|
allenai/math_qa
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,819
|
direct_call
| false
|
mathqa
| 1
|
[
"lighteval"
] |
[
"lighteval:mathqa"
] | true
|
lighteval:bigbench:matrixshapes
|
bigbench:matrixshapes
|
lighteval
|
tasksource/bigbench
|
matrixshapes
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,837
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:matrixshapes"
] | false
|
helm:me_q_sum
|
me_q_sum
|
helm
|
lighteval/me_q_sum
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,852
|
direct_call
| false
|
me_q_sum
| 1
|
[
"helm"
] |
[
"helm:me_q_sum"
] | true
|
helm:med_dialog:healthcaremagic
|
med_dialog:healthcaremagic
|
helm
|
lighteval/med_dialog
|
healthcaremagic
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,874
|
direct_call
| true
|
med_dialog
| 1
|
[
"helm"
] |
[
"helm:med_dialog:healthcaremagic"
] | false
|
helm:med_dialog:icliniq
|
med_dialog:icliniq
|
helm
|
lighteval/med_dialog
|
icliniq
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,896
|
direct_call
| true
|
med_dialog
| 1
|
[
"helm"
] |
[
"helm:med_dialog:icliniq"
] | false
|
helm:med_mcqa
|
med_mcqa
|
helm
|
lighteval/med_mcqa
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,918
|
direct_call
| false
|
med_mcqa
| 1
|
[
"helm"
] |
[
"helm:med_mcqa"
] | true
|
helm:med_paragraph_simplification
|
med_paragraph_simplification
|
helm
|
lighteval/med_paragraph_simplification
|
default
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,945
|
direct_call
| false
|
med_paragraph_simplification
| 1
|
[
"helm"
] |
[
"helm:med_paragraph_simplification"
] | true
|
helm:med_qa
|
med_qa
|
helm
|
bigbio/med_qa
|
med_qa_en_source
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,967
|
direct_call
| false
|
med_qa
| 1
|
[
"helm"
] |
[
"helm:med_qa"
] | true
|
lighteval:bigbench:metaphor_boolean
|
bigbench:metaphor_boolean
|
lighteval
|
tasksource/bigbench
|
metaphor_boolean
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 10,994
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:metaphor_boolean"
] | false
|
lighteval:bigbench:metaphor_understanding
|
bigbench:metaphor_understanding
|
lighteval
|
tasksource/bigbench
|
metaphor_understanding
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,009
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:metaphor_understanding"
] | false
|
lighteval:mgsm:en
|
mgsm:en
|
lighteval
|
juletxara/mgsm
|
en
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,024
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:en"
] | false
|
lighteval:mgsm:es
|
mgsm:es
|
lighteval
|
juletxara/mgsm
|
es
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,042
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:es"
] | false
|
lighteval:mgsm:fr
|
mgsm:fr
|
lighteval
|
juletxara/mgsm
|
fr
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,060
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:fr"
] | false
|
lighteval:mgsm:de
|
mgsm:de
|
lighteval
|
juletxara/mgsm
|
de
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,078
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:de"
] | false
|
lighteval:mgsm:ru
|
mgsm:ru
|
lighteval
|
juletxara/mgsm
|
ru
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,096
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:ru"
] | false
|
lighteval:mgsm:zh
|
mgsm:zh
|
lighteval
|
juletxara/mgsm
|
zh
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,114
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:zh"
] | false
|
lighteval:mgsm:ja
|
mgsm:ja
|
lighteval
|
juletxara/mgsm
|
ja
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,132
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:ja"
] | false
|
lighteval:mgsm:th
|
mgsm:th
|
lighteval
|
juletxara/mgsm
|
th
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,150
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:th"
] | false
|
lighteval:mgsm:sw
|
mgsm:sw
|
lighteval
|
juletxara/mgsm
|
sw
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,168
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:sw"
] | false
|
lighteval:mgsm:bn
|
mgsm:bn
|
lighteval
|
juletxara/mgsm
|
bn
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,186
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:bn"
] | false
|
lighteval:mgsm:te
|
mgsm:te
|
lighteval
|
juletxara/mgsm
|
te
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,204
|
direct_call
| true
|
mgsm
| 1
|
[
"lighteval"
] |
[
"lighteval:mgsm:te"
] | false
|
lighteval:bigbench:minute_mysteries_qa
|
bigbench:minute_mysteries_qa
|
lighteval
|
tasksource/bigbench
|
minute_mysteries_qa
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,222
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:minute_mysteries_qa"
] | false
|
lighteval:bigbench:misconceptions
|
bigbench:misconceptions
|
lighteval
|
tasksource/bigbench
|
misconceptions
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,237
|
direct_call
| true
|
bigbench
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench:misconceptions"
] | false
|
lighteval:bigbench_lite:misconceptions_russian
|
bigbench_lite:misconceptions_russian
|
lighteval
|
tasksource/bigbench
|
misconceptions_russian
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,252
|
direct_call
| true
|
bigbench_lite
| 1
|
[
"lighteval"
] |
[
"lighteval:bigbench_lite:misconceptions_russian"
] | false
|
original:mmlu:abstract_algebra
|
mmlu:abstract_algebra
|
original
|
cais/mmlu
|
abstract_algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,267
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:abstract_algebra"
] | false
|
leaderboard:mmlu:abstract_algebra
|
mmlu:abstract_algebra
|
leaderboard
|
lighteval/mmlu
|
abstract_algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,282
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:abstract_algebra"
] | false
|
helm:mmlu:abstract_algebra
|
mmlu:abstract_algebra
|
helm
|
lighteval/mmlu
|
abstract_algebra
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,297
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:abstract_algebra"
] | false
|
original:mmlu:anatomy
|
mmlu:anatomy
|
original
|
cais/mmlu
|
anatomy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,323
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:anatomy"
] | false
|
leaderboard:mmlu:anatomy
|
mmlu:anatomy
|
leaderboard
|
lighteval/mmlu
|
anatomy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,338
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:anatomy"
] | false
|
helm:mmlu:anatomy
|
mmlu:anatomy
|
helm
|
lighteval/mmlu
|
anatomy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,353
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:anatomy"
] | false
|
original:mmlu:astronomy
|
mmlu:astronomy
|
original
|
cais/mmlu
|
astronomy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,379
|
direct_call
| true
|
mmlu
| 1
|
[
"original"
] |
[
"original:mmlu:astronomy"
] | false
|
leaderboard:mmlu:astronomy
|
mmlu:astronomy
|
leaderboard
|
lighteval/mmlu
|
astronomy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,394
|
direct_call
| true
|
mmlu
| 1
|
[
"leaderboard"
] |
[
"leaderboard:mmlu:astronomy"
] | false
|
helm:mmlu:astronomy
|
mmlu:astronomy
|
helm
|
lighteval/mmlu
|
astronomy
|
/Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py
| 11,409
|
direct_call
| true
|
mmlu
| 1
|
[
"helm"
] |
[
"helm:mmlu:astronomy"
] | false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.