"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "a14b055b651e47ee93763dee233f2c41efa29c75"
Unverified Commit 9a345384 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

fix (#17890)


Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 3ec7d4cf
...@@ -20,7 +20,7 @@ import random ...@@ -20,7 +20,7 @@ import random
import tempfile import tempfile
import unittest import unittest
from transformers.testing_utils import require_detectron2, require_torch, slow, torch_device from transformers.testing_utils import require_detectron2, require_torch, require_torch_multi_gpu, slow, torch_device
from transformers.utils import is_detectron2_available, is_torch_available from transformers.utils import is_detectron2_available, is_torch_available
from ...test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
...@@ -285,6 +285,16 @@ class LayoutLMv2ModelTest(ModelTesterMixin, unittest.TestCase): ...@@ -285,6 +285,16 @@ class LayoutLMv2ModelTest(ModelTesterMixin, unittest.TestCase):
config_and_inputs = self.model_tester.prepare_config_and_inputs() config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs) self.model_tester.create_and_check_model(*config_and_inputs)
@require_torch_multi_gpu
@unittest.skip(
reason=(
"LayoutLMV2 and its dependency `detectron2` have some layers using `add_module` which doesn't work well"
" with `nn.DataParallel`"
)
)
def test_multi_gpu_data_parallel_forward(self):
pass
def test_model_various_embeddings(self): def test_model_various_embeddings(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs() config_and_inputs = self.model_tester.prepare_config_and_inputs()
for type in ["absolute", "relative_key", "relative_key_query"]: for type in ["absolute", "relative_key", "relative_key_query"]:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment