Unverified Commit 667939a2 authored by Fanli Lin's avatar Fanli Lin Committed by GitHub
Browse files

[tests] add the missing `require_torch_multi_gpu` flag (#30250)

add gpu flag
parent 440bd3c3
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
import copy import copy
import unittest import unittest
from transformers.testing_utils import require_torch, slow, torch_device from transformers.testing_utils import require_torch, require_torch_multi_gpu, slow, torch_device
from transformers.utils import is_torch_available from transformers.utils import is_torch_available
from ...test_configuration_common import ConfigTester from ...test_configuration_common import ConfigTester
...@@ -344,6 +344,7 @@ class BrosModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase): ...@@ -344,6 +344,7 @@ class BrosModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
config_and_inputs = self.model_tester.prepare_config_and_inputs() config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs) self.model_tester.create_and_check_model(*config_and_inputs)
@require_torch_multi_gpu
def test_multi_gpu_data_parallel_forward(self): def test_multi_gpu_data_parallel_forward(self):
super().test_multi_gpu_data_parallel_forward() super().test_multi_gpu_data_parallel_forward()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment