token_counts.py 1.97 KB
Newer Older
VictorSanh's avatar
VictorSanh committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# coding=utf-8
# Copyright 2019-present, the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
VictorSanh's avatar
VictorSanh committed
16
Preprocessing script before training the distilled model.
VictorSanh's avatar
VictorSanh committed
17
"""
VictorSanh's avatar
VictorSanh committed
18
import argparse
19
import logging
Aymeric Augustin's avatar
Aymeric Augustin committed
20
21
22
import pickle
from collections import Counter

VictorSanh's avatar
VictorSanh committed
23

24
25
26
logging.basicConfig(
    format="%(asctime)s - %(levelname)s - %(name)s -   %(message)s", datefmt="%m/%d/%Y %H:%M:%S", level=logging.INFO
)
27
logger = logging.getLogger(__name__)
VictorSanh's avatar
VictorSanh committed
28

29
30
31
32
33
34
35
36
37
38
if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        description="Token Counts for smoothing the masking probabilities in MLM (cf XLM/word2vec)"
    )
    parser.add_argument(
        "--data_file", type=str, default="data/dump.bert-base-uncased.pickle", help="The binarized dataset."
    )
    parser.add_argument(
        "--token_counts_dump", type=str, default="data/token_counts.bert-base-uncased.pickle", help="The dump file."
    )
VictorSanh's avatar
VictorSanh committed
39
40
41
    parser.add_argument("--vocab_size", default=30522, type=int)
    args = parser.parse_args()

42
43
    logger.info(f"Loading data from {args.data_file}")
    with open(args.data_file, "rb") as fp:
VictorSanh's avatar
VictorSanh committed
44
45
        data = pickle.load(fp)

46
    logger.info("Counting occurences for MLM.")
VictorSanh's avatar
VictorSanh committed
47
48
49
    counter = Counter()
    for tk_ids in data:
        counter.update(tk_ids)
50
    counts = [0] * args.vocab_size
VictorSanh's avatar
VictorSanh committed
51
52
53
    for k, v in counter.items():
        counts[k] = v

54
55
    logger.info(f"Dump to {args.token_counts_dump}")
    with open(args.token_counts_dump, "wb") as handle:
VictorSanh's avatar
VictorSanh committed
56
        pickle.dump(counts, handle, protocol=pickle.HIGHEST_PROTOCOL)