"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "d59b872c9e77e23acbc8e504e5720cf59313b40b"
Unverified Commit 9a345384 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

fix (#17890)


Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 3ec7d4cf
......@@ -20,7 +20,7 @@ import random
import tempfile
import unittest
from transformers.testing_utils import require_detectron2, require_torch, slow, torch_device
from transformers.testing_utils import require_detectron2, require_torch, require_torch_multi_gpu, slow, torch_device
from transformers.utils import is_detectron2_available, is_torch_available
from ...test_configuration_common import ConfigTester
......@@ -285,6 +285,16 @@ class LayoutLMv2ModelTest(ModelTesterMixin, unittest.TestCase):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
@require_torch_multi_gpu
@unittest.skip(
reason=(
"LayoutLMV2 and its dependency `detectron2` have some layers using `add_module` which doesn't work well"
" with `nn.DataParallel`"
)
)
def test_multi_gpu_data_parallel_forward(self):
pass
def test_model_various_embeddings(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
for type in ["absolute", "relative_key", "relative_key_query"]:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment