Unverified Commit d5f39bf8 authored by SuperCat's avatar SuperCat Committed by GitHub
Browse files

Add new dataset MMLU-SR tasks (#2032)



* add mmlusr tasks

* renamed all tasks names in mmlusr

* edit format and readme

* added mmlu_sr

* mmlu_sr -> mmlusr

* update

---------
Co-authored-by: default avatarlintangsutawika <lintang@eleuther.ai>
parent cdd954f9
"dataset_name": "question_and_answer_virology"
"description": "The following are multiple choice questions (with answers) about virology.\n\
\n"
"tag": "mmlusr_question_and_answer_other_tasks"
"include": "_mmlusr_qna_yml"
"task": "mmlusr_question_and_answer_virology"
"task_alias": "virology"
"dataset_name": "question_and_answer_world_religions"
"description": "The following are multiple choice questions (with answers) about world\
\ religions.\n\n"
"tag": "mmlusr_question_and_answer_humanities_tasks"
"include": "_mmlusr_qna_yml"
"task": "mmlusr_question_and_answer_world_religions"
"task_alias": "world religions"
import datasets
def process_docs(dataset: datasets.Dataset) -> datasets.Dataset:
def _helper(doc):
# Assuming that the 'answer' field in the dataset now contains numbers 0-3 instead of 'A', 'B', 'C', 'D'
answer_list = ["A", "B", "C", "D"]
# Convert numeric index to corresponding letter
answer_index = int(doc["answer"]) # Make sure the answer is an integer
answer_letter = answer_list[answer_index]
out_doc = {
"questions": doc["question"],
"choices": [doc["choice1"], doc["choice2"], doc["choice3"], doc["choice4"]],
"answer": answer_letter, # Include the letter for clarity
}
return out_doc
return dataset.map(_helper)
dataset_path: NiniCat/MMLU-SR
test_split: test
fewshot_split: train
fewshot_config:
sampler: first_n
output_type: multiple_choice
process_docs: !function utils.process_docs
doc_to_text: "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:"
doc_to_choice: ["A", "B", "C", "D"]
doc_to_target: answer
metric_list:
- metric: acc
aggregation: mean
higher_is_better: true
metadata:
version: 0.0
group: mmlusr_question_only
group_alias: MMLU-SR (Question Only)
task:
- group: mmlusr_qo_stem
group_alias: STEM (Question Only)
task:
- mmlusr_question_only_stem_tasks
aggregate_metric_list:
- metric: acc
weight_by_size: True
metadata:
version: 1
- group: mmlusr_qo_other
group_alias: Other (Question Only)
task:
- mmlusr_question_only_other_tasks
aggregate_metric_list:
- metric: acc
weight_by_size: True
metadata:
version: 1
- group: mmlusr_qo_social_sciences
group_alias: Social Sciences (Question Only)
task:
- mmlusr_question_only_social_sciences_tasks
aggregate_metric_list:
- metric: acc
weight_by_size: True
metadata:
version: 1
- group: mmlusr_qo_humanities
group_alias: Humanities (Question Only)
task:
- mmlusr_question_only_humanities_tasks
aggregate_metric_list:
- metric: acc
weight_by_size: True
metadata:
version: 1
aggregate_metric_list:
- metric: acc
weight_by_size: True
metadata:
version: 1
"dataset_name": "question_only_abstract_algebra"
"description": "The following are multiple choice questions (with answers) about abstract\
\ algebra.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_abstract_algebra"
"task_alias": "abstract algebra"
"dataset_name": "question_only_anatomy"
"description": "The following are multiple choice questions (with answers) about anatomy.\n\
\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_anatomy"
"task_alias": "anatomy"
"dataset_name": "question_only_astronomy"
"description": "The following are multiple choice questions (with answers) about astronomy.\n\
\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_astronomy"
"task_alias": "astronomy"
"dataset_name": "question_only_business_ethics"
"description": "The following are multiple choice questions (with answers) about business\
\ ethics.\n\n"
"tag": "mmlusr_question_only_other_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_business_ethics"
"task_alias": "business ethics"
"dataset_name": "question_only_clinical_knowledge"
"description": "The following are multiple choice questions (with answers) about clinical\
\ knowledge.\n\n"
"tag": "mmlusr_question_only_other_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_clinical_knowledge"
"task_alias": "clinical knowledge"
"dataset_name": "question_only_college_biology"
"description": "The following are multiple choice questions (with answers) about college\
\ biology.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_college_biology"
"task_alias": "college biology"
"dataset_name": "question_only_college_chemistry"
"description": "The following are multiple choice questions (with answers) about college\
\ chemistry.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_college_chemistry"
"task_alias": "college chemistry"
"dataset_name": "question_only_college_computer_science"
"description": "The following are multiple choice questions (with answers) about college\
\ computer science.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_college_computer_science"
"task_alias": "college computer science"
"dataset_name": "question_only_college_mathematics"
"description": "The following are multiple choice questions (with answers) about college\
\ mathematics.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_college_mathematics"
"task_alias": "college mathematics"
"dataset_name": "question_only_college_medicine"
"description": "The following are multiple choice questions (with answers) about college\
\ medicine.\n\n"
"tag": "mmlusr_question_only_other_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_college_medicine"
"task_alias": "college medicine"
"dataset_name": "question_only_college_physics"
"description": "The following are multiple choice questions (with answers) about college\
\ physics.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_college_physics"
"task_alias": "college physics"
"dataset_name": "question_only_computer_security"
"description": "The following are multiple choice questions (with answers) about computer\
\ security.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_computer_security"
"task_alias": "computer security"
"dataset_name": "question_only_conceptual_physics"
"description": "The following are multiple choice questions (with answers) about conceptual\
\ physics.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_conceptual_physics"
"task_alias": "conceptual physics"
"dataset_name": "question_only_econometrics"
"description": "The following are multiple choice questions (with answers) about econometrics.\n\
\n"
"tag": "mmlusr_question_only_social_sciences_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_econometrics"
"task_alias": "econometrics"
"dataset_name": "question_only_electrical_engineering"
"description": "The following are multiple choice questions (with answers) about electrical\
\ engineering.\n\n"
"tag": "mmlusr_question_only_stem_tasks"
"include": "_mmlusr_q_yml"
"task": "mmlusr_question_only_electrical_engineering"
"task_alias": "electrical engineering"
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment