"examples/vscode:/vscode.git/clone" did not exist on "8733ffcb5e660e81a353b00897f350399d7d5fb1"
Unverified Commit 02bdfc02 authored by Stas Bekman's avatar Stas Bekman Committed by GitHub
Browse files

using multi_gpu consistently (#8446)

* s|multiple_gpu|multi_gpu|g; s|multigpu|multi_gpu|g'

* doc
parent b9356945
...@@ -17,7 +17,7 @@ import random ...@@ -17,7 +17,7 @@ import random
import unittest import unittest
from transformers import is_torch_available from transformers import is_torch_available
from transformers.testing_utils import require_torch, require_torch_multigpu, slow, torch_device from transformers.testing_utils import require_torch, require_torch_multi_gpu, slow, torch_device
from .test_configuration_common import ConfigTester from .test_configuration_common import ConfigTester
from .test_generation_utils import GenerationTesterMixin from .test_generation_utils import GenerationTesterMixin
...@@ -205,8 +205,8 @@ class TransfoXLModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestC ...@@ -205,8 +205,8 @@ class TransfoXLModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestC
output_result = self.model_tester.create_transfo_xl_lm_head(*config_and_inputs) output_result = self.model_tester.create_transfo_xl_lm_head(*config_and_inputs)
self.model_tester.check_transfo_xl_lm_head_output(output_result) self.model_tester.check_transfo_xl_lm_head_output(output_result)
@require_torch_multigpu @require_torch_multi_gpu
def test_multigpu_data_parallel_forward(self): def test_multi_gpu_data_parallel_forward(self):
# Opt-out of this test. # Opt-out of this test.
pass pass
......
...@@ -2,7 +2,7 @@ import sys ...@@ -2,7 +2,7 @@ import sys
from typing import Dict from typing import Dict
from transformers import EvalPrediction, HfArgumentParser, TrainingArguments, is_torch_available from transformers import EvalPrediction, HfArgumentParser, TrainingArguments, is_torch_available
from transformers.testing_utils import TestCasePlus, execute_subprocess_async, require_torch_multigpu from transformers.testing_utils import TestCasePlus, execute_subprocess_async, require_torch_multi_gpu
from transformers.utils import logging from transformers.utils import logging
...@@ -44,7 +44,7 @@ if is_torch_available(): ...@@ -44,7 +44,7 @@ if is_torch_available():
class TestTrainerDistributed(TestCasePlus): class TestTrainerDistributed(TestCasePlus):
@require_torch_multigpu @require_torch_multi_gpu
def test_trainer(self): def test_trainer(self):
distributed_args = f""" distributed_args = f"""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment