Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
vision
Commits
c547e5cb
Unverified
Commit
c547e5cb
authored
Aug 11, 2020
by
vfdev
Committed by
GitHub
Aug 11, 2020
Browse files
Deprecated F_t.center_crop, F_t.five_crop, F_t.ten_crop (#2568)
- Updated docs - Put warning in the code - Updated tests
parent
a75fdd41
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
91 additions
and
80 deletions
+91
-80
test/test_functional_tensor.py
test/test_functional_tensor.py
+55
-77
torchvision/transforms/functional_tensor.py
torchvision/transforms/functional_tensor.py
+36
-3
No files found.
test/test_functional_tensor.py
View file @
c547e5cb
...
@@ -64,22 +64,24 @@ class Tester(unittest.TestCase):
...
@@ -64,22 +64,24 @@ class Tester(unittest.TestCase):
def
test_crop
(
self
):
def
test_crop
(
self
):
script_crop
=
torch
.
jit
.
script
(
F_t
.
crop
)
script_crop
=
torch
.
jit
.
script
(
F_t
.
crop
)
img_tensor
=
torch
.
randint
(
0
,
255
,
(
3
,
16
,
16
),
dtype
=
torch
.
uint8
)
img_tensor_clone
=
img_tensor
.
clone
()
img_tensor
,
pil_img
=
self
.
_create_data
(
16
,
18
)
top
=
random
.
randint
(
0
,
15
)
left
=
random
.
randint
(
0
,
15
)
test_configs
=
[
height
=
random
.
randint
(
1
,
16
-
top
)
(
1
,
2
,
4
,
5
),
# crop inside top-left corner
width
=
random
.
randint
(
1
,
16
-
left
)
(
2
,
12
,
3
,
4
),
# crop inside top-right corner
img_cropped
=
F_t
.
crop
(
img_tensor
,
top
,
left
,
height
,
width
)
(
8
,
3
,
5
,
6
),
# crop inside bottom-left corner
img_PIL
=
transforms
.
ToPILImage
()(
img_tensor
)
(
8
,
11
,
4
,
3
),
# crop inside bottom-right corner
img_PIL_cropped
=
F
.
crop
(
img_PIL
,
top
,
left
,
height
,
width
)
]
img_cropped_GT
=
transforms
.
ToTensor
()(
img_PIL_cropped
)
self
.
assertTrue
(
torch
.
equal
(
img_tensor
,
img_tensor_clone
))
for
top
,
left
,
height
,
width
in
test_configs
:
self
.
assertTrue
(
torch
.
equal
(
img_cropped
,
(
img_cropped_GT
*
255
).
to
(
torch
.
uint8
)),
pil_img_cropped
=
F
.
crop
(
pil_img
,
top
,
left
,
height
,
width
)
"functional_tensor crop not working"
)
# scriptable function test
img_tensor_cropped
=
F
.
crop
(
img_tensor
,
top
,
left
,
height
,
width
)
cropped_img_script
=
script_crop
(
img_tensor
,
top
,
left
,
height
,
width
)
self
.
compareTensorToPIL
(
img_tensor_cropped
,
pil_img_cropped
)
self
.
assertTrue
(
torch
.
equal
(
img_cropped
,
cropped_img_script
))
img_tensor_cropped
=
script_crop
(
img_tensor
,
top
,
left
,
height
,
width
)
self
.
compareTensorToPIL
(
img_tensor_cropped
,
pil_img_cropped
)
def
test_hsv2rgb
(
self
):
def
test_hsv2rgb
(
self
):
shape
=
(
3
,
100
,
150
)
shape
=
(
3
,
100
,
150
)
...
@@ -198,71 +200,47 @@ class Tester(unittest.TestCase):
...
@@ -198,71 +200,47 @@ class Tester(unittest.TestCase):
self
.
assertTrue
(
torch
.
equal
(
grayscale_script
,
grayscale_tensor
))
self
.
assertTrue
(
torch
.
equal
(
grayscale_script
,
grayscale_tensor
))
def
test_center_crop
(
self
):
def
test_center_crop
(
self
):
script_center_crop
=
torch
.
jit
.
script
(
F
_t
.
center_crop
)
script_center_crop
=
torch
.
jit
.
script
(
F
.
center_crop
)
img_tensor
=
torch
.
randint
(
0
,
255
,
(
1
,
32
,
32
),
dtype
=
torch
.
uint8
)
img_tensor
_clone
=
img_tensor
.
clone
(
)
img_tensor
,
pil_img
=
self
.
_create_data
(
32
,
34
)
cropped_tensor
=
F_t
.
center_crop
(
img_tensor
,
[
10
,
10
])
cropped_pil_image
=
F
.
center_crop
(
transforms
.
ToPILImage
()(
img_tensor
)
,
[
10
,
1
0
])
cropped_pil_image
=
F
.
center_crop
(
pil_img
,
[
10
,
1
1
])
cropped_pil_tensor
=
(
transforms
.
ToTensor
()(
cropped_pil_image
)
*
255
).
to
(
torch
.
uint8
)
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
,
cropped_pil_tensor
)
)
cropped_tensor
=
F
.
center_crop
(
img_tensor
,
[
10
,
11
]
)
self
.
assertTrue
(
torch
.
equal
(
img_tensor
,
img_tensor_clone
)
)
self
.
compareTensorToPIL
(
cropped_tensor
,
cropped_pil_image
)
# scriptable function test
cropped_
script
=
script_center_crop
(
img_tensor
,
[
10
,
1
0
])
cropped_
tensor
=
script_center_crop
(
img_tensor
,
[
10
,
1
1
])
self
.
assertTrue
(
torch
.
equal
(
cropped_
script
,
cropped_
tensor
)
)
self
.
compareTensorToPIL
(
cropped_
tensor
,
cropped_
pil_image
)
def
test_five_crop
(
self
):
def
test_five_crop
(
self
):
script_five_crop
=
torch
.
jit
.
script
(
F_t
.
five_crop
)
script_five_crop
=
torch
.
jit
.
script
(
F
.
five_crop
)
img_tensor
=
torch
.
randint
(
0
,
255
,
(
1
,
32
,
32
),
dtype
=
torch
.
uint8
)
img_tensor_clone
=
img_tensor
.
clone
()
img_tensor
,
pil_img
=
self
.
_create_data
(
32
,
34
)
cropped_tensor
=
F_t
.
five_crop
(
img_tensor
,
[
10
,
10
])
cropped_pil_image
=
F
.
five_crop
(
transforms
.
ToPILImage
()(
img_tensor
),
[
10
,
10
])
cropped_pil_images
=
F
.
five_crop
(
pil_img
,
[
10
,
11
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
0
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
0
])
*
255
).
to
(
torch
.
uint8
)))
cropped_tensors
=
F
.
five_crop
(
img_tensor
,
[
10
,
11
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
1
],
for
i
in
range
(
5
):
(
transforms
.
ToTensor
()(
cropped_pil_image
[
2
])
*
255
).
to
(
torch
.
uint8
)))
self
.
compareTensorToPIL
(
cropped_tensors
[
i
],
cropped_pil_images
[
i
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
2
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
1
])
*
255
).
to
(
torch
.
uint8
)))
cropped_tensors
=
script_five_crop
(
img_tensor
,
[
10
,
11
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
3
],
for
i
in
range
(
5
):
(
transforms
.
ToTensor
()(
cropped_pil_image
[
3
])
*
255
).
to
(
torch
.
uint8
)))
self
.
compareTensorToPIL
(
cropped_tensors
[
i
],
cropped_pil_images
[
i
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
4
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
4
])
*
255
).
to
(
torch
.
uint8
)))
self
.
assertTrue
(
torch
.
equal
(
img_tensor
,
img_tensor_clone
))
# scriptable function test
cropped_script
=
script_five_crop
(
img_tensor
,
[
10
,
10
])
for
cropped_script_img
,
cropped_tensor_img
in
zip
(
cropped_script
,
cropped_tensor
):
self
.
assertTrue
(
torch
.
equal
(
cropped_script_img
,
cropped_tensor_img
))
def
test_ten_crop
(
self
):
def
test_ten_crop
(
self
):
script_ten_crop
=
torch
.
jit
.
script
(
F_t
.
ten_crop
)
script_ten_crop
=
torch
.
jit
.
script
(
F
.
ten_crop
)
img_tensor
=
torch
.
randint
(
0
,
255
,
(
1
,
32
,
32
),
dtype
=
torch
.
uint8
)
img_tensor_clone
=
img_tensor
.
clone
()
img_tensor
,
pil_img
=
self
.
_create_data
(
32
,
34
)
cropped_tensor
=
F_t
.
ten_crop
(
img_tensor
,
[
10
,
10
])
cropped_pil_image
=
F
.
ten_crop
(
transforms
.
ToPILImage
()(
img_tensor
),
[
10
,
10
])
cropped_pil_images
=
F
.
ten_crop
(
pil_img
,
[
10
,
11
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
0
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
0
])
*
255
).
to
(
torch
.
uint8
)))
cropped_tensors
=
F
.
ten_crop
(
img_tensor
,
[
10
,
11
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
1
],
for
i
in
range
(
10
):
(
transforms
.
ToTensor
()(
cropped_pil_image
[
2
])
*
255
).
to
(
torch
.
uint8
)))
self
.
compareTensorToPIL
(
cropped_tensors
[
i
],
cropped_pil_images
[
i
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
2
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
1
])
*
255
).
to
(
torch
.
uint8
)))
cropped_tensors
=
script_ten_crop
(
img_tensor
,
[
10
,
11
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
3
],
for
i
in
range
(
10
):
(
transforms
.
ToTensor
()(
cropped_pil_image
[
3
])
*
255
).
to
(
torch
.
uint8
)))
self
.
compareTensorToPIL
(
cropped_tensors
[
i
],
cropped_pil_images
[
i
])
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
4
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
4
])
*
255
).
to
(
torch
.
uint8
)))
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
5
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
5
])
*
255
).
to
(
torch
.
uint8
)))
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
6
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
7
])
*
255
).
to
(
torch
.
uint8
)))
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
7
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
6
])
*
255
).
to
(
torch
.
uint8
)))
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
8
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
8
])
*
255
).
to
(
torch
.
uint8
)))
self
.
assertTrue
(
torch
.
equal
(
cropped_tensor
[
9
],
(
transforms
.
ToTensor
()(
cropped_pil_image
[
9
])
*
255
).
to
(
torch
.
uint8
)))
self
.
assertTrue
(
torch
.
equal
(
img_tensor
,
img_tensor_clone
))
# scriptable function test
cropped_script
=
script_ten_crop
(
img_tensor
,
[
10
,
10
])
for
cropped_script_img
,
cropped_tensor_img
in
zip
(
cropped_script
,
cropped_tensor
):
self
.
assertTrue
(
torch
.
equal
(
cropped_script_img
,
cropped_tensor_img
))
def
test_pad
(
self
):
def
test_pad
(
self
):
script_fn
=
torch
.
jit
.
script
(
F_t
.
pad
)
script_fn
=
torch
.
jit
.
script
(
F_t
.
pad
)
...
...
torchvision/transforms/functional_tensor.py
View file @
c547e5cb
...
@@ -240,7 +240,12 @@ def adjust_gamma(img: Tensor, gamma: float, gain: float = 1) -> Tensor:
...
@@ -240,7 +240,12 @@ def adjust_gamma(img: Tensor, gamma: float, gain: float = 1) -> Tensor:
def
center_crop
(
img
:
Tensor
,
output_size
:
BroadcastingList2
[
int
])
->
Tensor
:
def
center_crop
(
img
:
Tensor
,
output_size
:
BroadcastingList2
[
int
])
->
Tensor
:
"""Crop the Image Tensor and resize it to desired size.
"""DEPRECATED. Crop the Image Tensor and resize it to desired size.
.. warning::
This method is deprecated and will be removed in future releases.
Please, use ``F.center_crop`` instead.
Args:
Args:
img (Tensor): Image to be cropped.
img (Tensor): Image to be cropped.
...
@@ -250,6 +255,11 @@ def center_crop(img: Tensor, output_size: BroadcastingList2[int]) -> Tensor:
...
@@ -250,6 +255,11 @@ def center_crop(img: Tensor, output_size: BroadcastingList2[int]) -> Tensor:
Returns:
Returns:
Tensor: Cropped image.
Tensor: Cropped image.
"""
"""
warnings
.
warn
(
"This method is deprecated and will be removed in future releases. "
"Please, use ``F.center_crop`` instead."
)
if
not
_is_tensor_a_torch_image
(
img
):
if
not
_is_tensor_a_torch_image
(
img
):
raise
TypeError
(
'tensor is not a torch image.'
)
raise
TypeError
(
'tensor is not a torch image.'
)
...
@@ -268,8 +278,15 @@ def center_crop(img: Tensor, output_size: BroadcastingList2[int]) -> Tensor:
...
@@ -268,8 +278,15 @@ def center_crop(img: Tensor, output_size: BroadcastingList2[int]) -> Tensor:
def
five_crop
(
img
:
Tensor
,
size
:
BroadcastingList2
[
int
])
->
List
[
Tensor
]:
def
five_crop
(
img
:
Tensor
,
size
:
BroadcastingList2
[
int
])
->
List
[
Tensor
]:
"""Crop the given Image Tensor into four corners and the central crop.
"""DEPRECATED. Crop the given Image Tensor into four corners and the central crop.
.. warning::
This method is deprecated and will be removed in future releases.
Please, use ``F.five_crop`` instead.
.. Note::
.. Note::
This transform returns a List of Tensors and there may be a
This transform returns a List of Tensors and there may be a
mismatch in the number of inputs and targets your ``Dataset`` returns.
mismatch in the number of inputs and targets your ``Dataset`` returns.
...
@@ -283,6 +300,11 @@ def five_crop(img: Tensor, size: BroadcastingList2[int]) -> List[Tensor]:
...
@@ -283,6 +300,11 @@ def five_crop(img: Tensor, size: BroadcastingList2[int]) -> List[Tensor]:
List: List (tl, tr, bl, br, center)
List: List (tl, tr, bl, br, center)
Corresponding top left, top right, bottom left, bottom right and center crop.
Corresponding top left, top right, bottom left, bottom right and center crop.
"""
"""
warnings
.
warn
(
"This method is deprecated and will be removed in future releases. "
"Please, use ``F.five_crop`` instead."
)
if
not
_is_tensor_a_torch_image
(
img
):
if
not
_is_tensor_a_torch_image
(
img
):
raise
TypeError
(
'tensor is not a torch image.'
)
raise
TypeError
(
'tensor is not a torch image.'
)
...
@@ -304,10 +326,16 @@ def five_crop(img: Tensor, size: BroadcastingList2[int]) -> List[Tensor]:
...
@@ -304,10 +326,16 @@ def five_crop(img: Tensor, size: BroadcastingList2[int]) -> List[Tensor]:
def
ten_crop
(
img
:
Tensor
,
size
:
BroadcastingList2
[
int
],
vertical_flip
:
bool
=
False
)
->
List
[
Tensor
]:
def
ten_crop
(
img
:
Tensor
,
size
:
BroadcastingList2
[
int
],
vertical_flip
:
bool
=
False
)
->
List
[
Tensor
]:
"""Crop the given Image Tensor into four corners and the central crop plus the
"""
DEPRECATED.
Crop the given Image Tensor into four corners and the central crop plus the
flipped version of these (horizontal flipping is used by default).
flipped version of these (horizontal flipping is used by default).
.. warning::
This method is deprecated and will be removed in future releases.
Please, use ``F.ten_crop`` instead.
.. Note::
.. Note::
This transform returns a List of images and there may be a
This transform returns a List of images and there may be a
mismatch in the number of inputs and targets your ``Dataset`` returns.
mismatch in the number of inputs and targets your ``Dataset`` returns.
...
@@ -323,6 +351,11 @@ def ten_crop(img: Tensor, size: BroadcastingList2[int], vertical_flip: bool = Fa
...
@@ -323,6 +351,11 @@ def ten_crop(img: Tensor, size: BroadcastingList2[int], vertical_flip: bool = Fa
Corresponding top left, top right, bottom left, bottom right and center crop
Corresponding top left, top right, bottom left, bottom right and center crop
and same for the flipped image's tensor.
and same for the flipped image's tensor.
"""
"""
warnings
.
warn
(
"This method is deprecated and will be removed in future releases. "
"Please, use ``F.ten_crop`` instead."
)
if
not
_is_tensor_a_torch_image
(
img
):
if
not
_is_tensor_a_torch_image
(
img
):
raise
TypeError
(
'tensor is not a torch image.'
)
raise
TypeError
(
'tensor is not a torch image.'
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment