Unverified Commit 2d6e663a authored by Philip Meier's avatar Philip Meier Committed by GitHub
Browse files

make transforms v2 get_params a staticmethod (#7177)


Co-authored-by: default avatarNicolas Hug <contact@nicolas-hug.com>
parent bac678c8
......@@ -649,18 +649,15 @@ def test_call_consistency(config, args_kwargs):
)
@pytest.mark.parametrize(
"config",
[config for config in CONSISTENCY_CONFIGS if hasattr(config.legacy_cls, "get_params")],
ids=lambda config: config.legacy_cls.__name__,
)
def test_get_params_alias(config):
assert config.prototype_cls.get_params is config.legacy_cls.get_params
@pytest.mark.parametrize(
("transform_cls", "args_kwargs"),
get_params_parametrization = pytest.mark.parametrize(
("config", "get_params_args_kwargs"),
[
pytest.param(
next(config for config in CONSISTENCY_CONFIGS if config.prototype_cls is transform_cls),
get_params_args_kwargs,
id=transform_cls.__name__,
)
for transform_cls, get_params_args_kwargs in [
(prototype_transforms.RandomResizedCrop, ArgsKwargs(make_image(), scale=[0.3, 0.7], ratio=[0.5, 1.5])),
(prototype_transforms.RandomErasing, ArgsKwargs(make_image(), scale=(0.3, 0.7), ratio=(0.5, 1.5))),
(prototype_transforms.ColorJitter, ArgsKwargs(brightness=None, contrast=None, saturation=None, hue=None)),
......@@ -674,12 +671,36 @@ def test_get_params_alias(config):
(prototype_transforms.RandomPerspective, ArgsKwargs(23, 17, 0.5)),
(prototype_transforms.RandomRotation, ArgsKwargs(degrees=[-20.0, 10.0])),
(prototype_transforms.AutoAugment, ArgsKwargs(5)),
]
],
)
def test_get_params_jit(transform_cls, args_kwargs):
args, kwargs = args_kwargs
torch.jit.script(transform_cls.get_params)(*args, **kwargs)
@get_paramsl_parametrization
def test_get_params_alias(config, get_params_args_kwargs):
assert config.prototype_cls.get_params is config.legacy_cls.get_params
if not config.args_kwargs:
return
args, kwargs = config.args_kwargs[0]
legacy_transform = config.legacy_cls(*args, **kwargs)
prototype_transform = config.prototype_cls(*args, **kwargs)
assert prototype_transform.get_params is legacy_transform.get_params
@get_paramsl_parametrization
def test_get_params_jit(config, get_params_args_kwargs):
get_params_args, get_params_kwargs = get_params_args_kwargs
torch.jit.script(config.prototype_cls.get_params)(*get_params_args, **get_params_kwargs)
if not config.args_kwargs:
return
args, kwargs = config.args_kwargs[0]
transform = config.prototype_cls(*args, **kwargs)
torch.jit.script(transform.get_params)(*get_params_args, **get_params_kwargs)
@pytest.mark.parametrize(
......
......@@ -67,7 +67,7 @@ class Transform(nn.Module):
# Since `get_params` is a `@staticmethod`, we have to bind it to the class itself rather than to an instance.
# This method is called after subclassing has happened, i.e. `cls` is the subclass, e.g. `Resize`.
if cls._v1_transform_cls is not None and hasattr(cls._v1_transform_cls, "get_params"):
cls.get_params = cls._v1_transform_cls.get_params # type: ignore[attr-defined]
cls.get_params = staticmethod(cls._v1_transform_cls.get_params) # type: ignore[attr-defined]
def _extract_params_for_v1_transform(self) -> Dict[str, Any]:
# This method is called by `__prepare_scriptable__` to instantiate the equivalent v1 transform from the current
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment