test_tokenization_blenderbot.py 1.56 KB
Newer Older
Sam Shleifer's avatar
Sam Shleifer committed
1
2
#!/usr/bin/env python3
# coding=utf-8
Sylvain Gugger's avatar
Sylvain Gugger committed
3
# Copyright 2020 The HuggingFace Team. All rights reserved.
Sam Shleifer's avatar
Sam Shleifer committed
4
#
Sylvain Gugger's avatar
Sylvain Gugger committed
5
# Licensed under the Apache License, Version 2.0 (the "License");
Sam Shleifer's avatar
Sam Shleifer committed
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Blenderbot Tokenizers, including common tests for BlenderbotSmallTokenizer."""
import unittest

from transformers.file_utils import cached_property
20
from transformers.models.blenderbot.tokenization_blenderbot import BlenderbotTokenizer
Lysandre Debut's avatar
Lysandre Debut committed
21

Sam Shleifer's avatar
Sam Shleifer committed
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37

class Blenderbot3BTokenizerTests(unittest.TestCase):
    @cached_property
    def tokenizer_3b(self):
        return BlenderbotTokenizer.from_pretrained("facebook/blenderbot-3B")

    def test_encode_decode_cycle(self):
        tok = self.tokenizer_3b
        src_text = " I am a small frog."
        encoded = tok([src_text], padding=False, truncation=False)["input_ids"]
        decoded = tok.batch_decode(encoded, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        assert src_text == decoded

    def test_3B_tokenization_same_as_parlai(self):
        assert self.tokenizer_3b.add_prefix_space
        assert self.tokenizer_3b([" Sam", "Sam"]).input_ids == [[5502, 2], [5502, 2]]