Commit c0d5a660 authored by lintangsutawika's avatar lintangsutawika
Browse files

resolved conflict

parents f7b81bd4 0d1ef037
"dataset_name": "snarks"
"description": "Determine which of two sentences is sarcastic.\n\nAccording to Cambridge University Dictionary, sarcasm is \"the use of remarks that clearly mean the opposite of what they say, made in order to hurt someone's feelings or to criticize something in a humorous way.\" Sarcastic sentences often contain satirical or ironic utterances, hyperboles, ambivalent or witty remarks.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_02_zeroshot_snarks"
"dataset_name": "sports_understanding"
"description": "Determine whether an artificially constructed sentence relating to sports is plausible or not.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_02_zeroshot_sports_understanding"
"doc_to_target": target
"doc_to_choice": ["yes", "no"]
"dataset_name": "temporal_sequences"
"description": "Task description: Answer questions about which times certain events could have occurred.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_02_zeroshot_temporal_sequences"
"dataset_name": "tracking_shuffled_objects_five_objects"
"description": "A task requiring determining the final positions of a set of objects given their initial positions and a description of a sequence of swaps.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_02_zeroshot_tracking_shuffled_objects_five_objects"
"dataset_name": "tracking_shuffled_objects_seven_objects"
"description": "A task requiring determining the final positions of a set of objects given their initial positions and a description of a sequence of swaps.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_02_zeroshot_tracking_shuffled_objects_seven_objects"
"dataset_name": "tracking_shuffled_objects_three_objects"
"description": "A task requiring determining the final positions of a set of objects given their initial positions and a description of a sequence of swaps.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_02_zeroshot_tracking_shuffled_objects_three_objects"
"dataset_name": "web_of_lies"
"description": "Evaluate a random boolean function expressed as a word problem.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_02_zeroshot_web_of_lies"
"doc_to_target": target
"doc_to_choice": ["Yes", "No"]
group: bbh_alt_pv_03_zeroshot
dataset_path: lukaemon/bbh
output_type: multiple_choice
test_split: test
doc_to_text: !function ../../styles.styles_03
doc_to_target: !function ../../styles.doc_to_target
doc_to_choice: !function ../../styles.doc_to_choice
num_fewshot: 0
metric_list:
- metric: acc
- metric: acc_norm
- metric: brier_score
"dataset_name": "boolean_expressions"
"description": "Evaluate the result of a random Boolean expression.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_boolean_expressions"
"doc_to_target": target
"doc_to_choice": ["True", "False"]
"dataset_name": "causal_judgement"
"description": "Answer questions about causal attribution.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_causal_judgement"
"dataset_name": "date_understanding"
"description": "Infer the date from context.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_date_understanding"
"dataset_name": "disambiguation_qa"
"description": "Clarify the meaning of sentences with ambiguous pronouns.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_disambiguation_qa"
"dataset_name": "formal_fallacies"
"description": "Distinguish deductively valid arguments from formal fallacies.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_formal_fallacies"
"doc_to_target": target
"doc_to_choice": ["valid", "invalid"]
"dataset_name": "geometric_shapes"
"description": "Name geometric shapes from their SVG paths.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_geometric_shapes"
"dataset_name": "hyperbaton"
"description": "Order adjectives correctly in English sentences.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_hyperbaton"
"dataset_name": "logical_deduction_five_objects"
"description": "A logical deduction task which requires deducing the order of a sequence of objects.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_logical_deduction_five_objects"
"dataset_name": "logical_deduction_seven_objects"
"description": "A logical deduction task which requires deducing the order of a sequence of objects.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_logical_deduction_seven_objects"
"dataset_name": "logical_deduction_three_objects"
"description": "A logical deduction task which requires deducing the order of a sequence of objects.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_logical_deduction_three_objects"
"dataset_name": "movie_recommendation"
"description": "Recommend movies similar to the given list of movies.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_movie_recommendation"
"process_docs": !function ../../utils.fix_movie_recommendation
"dataset_name": "navigate"
"description": "Given a series of navigation instructions, determine whether one would end up back at the starting point.\n\n"
"include": "_zeroshot_template_yaml"
"task": "bbh_alt_pv_03_zeroshot_navigate"
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment