sciq.py 1.99 KB
Newer Older
Rayyyyy's avatar
Rayyyyy committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
"""
Crowdsourcing Multiple Choice Science Questions
https://aclanthology.org/W17-4413.pdf

The SciQ dataset contains 13,679 crowdsourced science exam questions about Physics,
Chemistry and Biology, among others. The questions are in multiple-choice format
with 4 answer options each. For the majority of the questions, an additional paragraph
with supporting evidence for the correct answer is provided.

Homepage: https://allenai.org/data/sciq
"""
from lm_eval.base import MultipleChoiceTask


_CITATION = """
@inproceedings{Welbl2017CrowdsourcingMC,
    title={Crowdsourcing Multiple Choice Science Questions},
    author={Johannes Welbl and Nelson F. Liu and Matt Gardner},
    booktitle={NUT@EMNLP},
    year={2017}
}
"""


class SciQ(MultipleChoiceTask):
    VERSION = 0
    DATASET_PATH = "sciq"
    DATASET_NAME = None

    def has_training_docs(self):
        return True

    def has_validation_docs(self):
        return True

    def has_test_docs(self):
        return True

    def training_docs(self):
        if self._training_docs is None:
            self._training_docs = list(map(self._process_doc, self.dataset["train"]))
        return self._training_docs

    def validation_docs(self):
        return map(self._process_doc, self.dataset["validation"])

    def test_docs(self):
        return map(self._process_doc, self.dataset["test"])

    def _process_doc(self, doc):
        choices = [
            doc["distractor1"],
            doc["distractor2"],
            doc["distractor3"],
            doc["correct_answer"],
        ]
        src = doc["support"]
        out_doc = {
            "source": src,
            "query": doc["question"],
            "choices": choices,
            "gold": 3,
        }
        return out_doc

    def doc_to_text(self, doc):
        return "{}\nQuestion: {}\nAnswer:".format(doc["source"], doc["query"]).strip()

    def should_decontaminate(self):
        return True

    def doc_to_decontamination_query(self, doc):
        return doc["source"] + " " + doc["query"]