Unverified Commit 6ca9c76a authored by Philip Meier's avatar Philip Meier Committed by GitHub
Browse files

Upgrade usort to `1.0.2` and black to 22.3.0 (#5106)



* upgrade usort to

* Also update black

* Actually use 1.0.2

* Apply pre-commit
Co-authored-by: default avatarNicolas Hug <contact@nicolas-hug.com>
parent 9293be7e
...@@ -6,7 +6,7 @@ from torch import Tensor ...@@ -6,7 +6,7 @@ from torch import Tensor
from torchvision.extension import _assert_has_ops from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once from ..utils import _log_api_usage_once
from ._box_convert import _box_cxcywh_to_xyxy, _box_xyxy_to_cxcywh, _box_xywh_to_xyxy, _box_xyxy_to_xywh from ._box_convert import _box_cxcywh_to_xyxy, _box_xywh_to_xyxy, _box_xyxy_to_cxcywh, _box_xyxy_to_xywh
from ._utils import _upcast from ._utils import _upcast
...@@ -331,7 +331,7 @@ def complete_box_iou(boxes1: Tensor, boxes2: Tensor, eps: float = 1e-7) -> Tenso ...@@ -331,7 +331,7 @@ def complete_box_iou(boxes1: Tensor, boxes2: Tensor, eps: float = 1e-7) -> Tenso
w_gt = boxes2[:, 2] - boxes2[:, 0] w_gt = boxes2[:, 2] - boxes2[:, 0]
h_gt = boxes2[:, 3] - boxes2[:, 1] h_gt = boxes2[:, 3] - boxes2[:, 1]
v = (4 / (torch.pi ** 2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2) v = (4 / (torch.pi**2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2)
with torch.no_grad(): with torch.no_grad():
alpha = v / (1 - iou + v + eps) alpha = v / (1 - iou + v + eps)
return diou - alpha * v return diou - alpha * v
......
...@@ -58,7 +58,7 @@ def complete_box_iou_loss( ...@@ -58,7 +58,7 @@ def complete_box_iou_loss(
h_pred = y2 - y1 h_pred = y2 - y1
w_gt = x2g - x1g w_gt = x2g - x1g
h_gt = y2g - y1g h_gt = y2g - y1g
v = (4 / (torch.pi ** 2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2) v = (4 / (torch.pi**2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2)
with torch.no_grad(): with torch.no_grad():
alpha = v / (1 - iou + v + eps) alpha = v / (1 - iou + v + eps)
......
...@@ -37,7 +37,7 @@ def drop_block2d( ...@@ -37,7 +37,7 @@ def drop_block2d(
N, C, H, W = input.size() N, C, H, W = input.size()
block_size = min(block_size, W, H) block_size = min(block_size, W, H)
# compute the gamma of Bernoulli distribution # compute the gamma of Bernoulli distribution
gamma = (p * H * W) / ((block_size ** 2) * ((H - block_size + 1) * (W - block_size + 1))) gamma = (p * H * W) / ((block_size**2) * ((H - block_size + 1) * (W - block_size + 1)))
noise = torch.empty((N, C, H - block_size + 1, W - block_size + 1), dtype=input.dtype, device=input.device) noise = torch.empty((N, C, H - block_size + 1, W - block_size + 1), dtype=input.dtype, device=input.device)
noise.bernoulli_(gamma) noise.bernoulli_(gamma)
...@@ -83,7 +83,7 @@ def drop_block3d( ...@@ -83,7 +83,7 @@ def drop_block3d(
N, C, D, H, W = input.size() N, C, D, H, W = input.size()
block_size = min(block_size, D, H, W) block_size = min(block_size, D, H, W)
# compute the gamma of Bernoulli distribution # compute the gamma of Bernoulli distribution
gamma = (p * D * H * W) / ((block_size ** 3) * ((D - block_size + 1) * (H - block_size + 1) * (W - block_size + 1))) gamma = (p * D * H * W) / ((block_size**3) * ((D - block_size + 1) * (H - block_size + 1) * (W - block_size + 1)))
noise = torch.empty( noise = torch.empty(
(N, C, D - block_size + 1, H - block_size + 1, W - block_size + 1), dtype=input.dtype, device=input.device (N, C, D - block_size + 1, H - block_size + 1, W - block_size + 1), dtype=input.dtype, device=input.device
) )
......
from collections import OrderedDict from collections import OrderedDict
from typing import Tuple, List, Dict, Callable, Optional from typing import Callable, Dict, List, Optional, Tuple
import torch.nn.functional as F import torch.nn.functional as F
from torch import nn, Tensor from torch import nn, Tensor
......
import torch import torch
from ..utils import _log_api_usage_once from ..utils import _log_api_usage_once
from ._utils import _upcast_non_float, _loss_inter_union from ._utils import _loss_inter_union, _upcast_non_float
def generalized_box_iou_loss( def generalized_box_iou_loss(
......
import warnings import warnings
from typing import Callable, List, Optional, Union, Tuple, Sequence from typing import Callable, List, Optional, Sequence, Tuple, Union
import torch import torch
from torch import Tensor from torch import Tensor
......
import warnings import warnings
from typing import Optional, List, Dict, Tuple, Union from typing import Dict, List, Optional, Tuple, Union
import torch import torch
import torch.fx import torch.fx
......
...@@ -4,7 +4,7 @@ from torch.nn.modules.utils import _pair ...@@ -4,7 +4,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def ps_roi_align( def ps_roi_align(
......
...@@ -4,7 +4,7 @@ from torch.nn.modules.utils import _pair ...@@ -4,7 +4,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def ps_roi_pool( def ps_roi_pool(
......
...@@ -7,7 +7,7 @@ from torch.nn.modules.utils import _pair ...@@ -7,7 +7,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def roi_align( def roi_align(
......
...@@ -7,7 +7,7 @@ from torch.nn.modules.utils import _pair ...@@ -7,7 +7,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def roi_pool( def roi_pool(
......
from . import datasets from . import datasets, features, models, transforms, utils
from . import features
from . import models
from . import transforms
from . import utils
import pathlib import pathlib
from typing import Any, Dict, List, Callable, Type, Optional, Union, TypeVar from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union
from torchvision.prototype.datasets import home from torchvision.prototype.datasets import home
from torchvision.prototype.datasets.utils import Dataset from torchvision.prototype.datasets.utils import Dataset
......
...@@ -11,7 +11,7 @@ from .fer2013 import FER2013 ...@@ -11,7 +11,7 @@ from .fer2013 import FER2013
from .food101 import Food101 from .food101 import Food101
from .gtsrb import GTSRB from .gtsrb import GTSRB
from .imagenet import ImageNet from .imagenet import ImageNet
from .mnist import MNIST, FashionMNIST, KMNIST, EMNIST, QMNIST from .mnist import EMNIST, FashionMNIST, KMNIST, MNIST, QMNIST
from .oxford_iiit_pet import OxfordIIITPet from .oxford_iiit_pet import OxfordIIITPet
from .pcam import PCAM from .pcam import PCAM
from .sbd import SBD from .sbd import SBD
......
import pathlib import pathlib
import re import re
from typing import Any, Dict, List, Tuple, BinaryIO, Union from typing import Any, BinaryIO, Dict, List, Tuple, Union
import numpy as np import numpy as np
from torchdata.datapipes.iter import ( from torchdata.datapipes.iter import Filter, IterDataPipe, IterKeyZipper, Mapper
IterDataPipe,
Mapper,
Filter,
IterKeyZipper,
)
from torchvision.prototype.datasets.utils import Dataset, GDriveResource, OnlineResource from torchvision.prototype.datasets.utils import Dataset, GDriveResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import ( from torchvision.prototype.datasets.utils._internal import (
INFINITE_BUFFER_SIZE,
read_mat,
hint_sharding, hint_sharding,
hint_shuffling, hint_shuffling,
INFINITE_BUFFER_SIZE,
read_categories_file, read_categories_file,
read_mat,
) )
from torchvision.prototype.features import Label, BoundingBox, _Feature, EncodedImage from torchvision.prototype.features import _Feature, BoundingBox, EncodedImage, Label
from .._api import register_dataset, register_info from .._api import register_dataset, register_info
......
import csv import csv
import pathlib import pathlib
from typing import Any, Dict, List, Optional, Tuple, Iterator, Sequence, BinaryIO, Union from typing import Any, BinaryIO, Dict, Iterator, List, Optional, Sequence, Tuple, Union
from torchdata.datapipes.iter import ( from torchdata.datapipes.iter import Filter, IterDataPipe, IterKeyZipper, Mapper, Zipper
IterDataPipe, from torchvision.prototype.datasets.utils import Dataset, GDriveResource, OnlineResource
Mapper,
Filter,
Zipper,
IterKeyZipper,
)
from torchvision.prototype.datasets.utils import (
Dataset,
GDriveResource,
OnlineResource,
)
from torchvision.prototype.datasets.utils._internal import ( from torchvision.prototype.datasets.utils._internal import (
INFINITE_BUFFER_SIZE,
getitem, getitem,
path_accessor,
hint_sharding, hint_sharding,
hint_shuffling, hint_shuffling,
INFINITE_BUFFER_SIZE,
path_accessor,
) )
from torchvision.prototype.features import EncodedImage, _Feature, Label, BoundingBox from torchvision.prototype.features import _Feature, BoundingBox, EncodedImage, Label
from .._api import register_dataset, register_info from .._api import register_dataset, register_info
......
...@@ -2,22 +2,18 @@ import abc ...@@ -2,22 +2,18 @@ import abc
import io import io
import pathlib import pathlib
import pickle import pickle
from typing import Any, Dict, List, Optional, Tuple, Iterator, cast, BinaryIO, Union from typing import Any, BinaryIO, cast, Dict, Iterator, List, Optional, Tuple, Union
import numpy as np import numpy as np
from torchdata.datapipes.iter import ( from torchdata.datapipes.iter import Filter, IterDataPipe, Mapper
IterDataPipe,
Filter,
Mapper,
)
from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import ( from torchvision.prototype.datasets.utils._internal import (
hint_sharding,
hint_shuffling, hint_shuffling,
path_comparator, path_comparator,
hint_sharding,
read_categories_file, read_categories_file,
) )
from torchvision.prototype.features import Label, Image from torchvision.prototype.features import Image, Label
from .._api import register_dataset, register_info from .._api import register_dataset, register_info
......
import pathlib import pathlib
from typing import Any, Dict, List, Optional, Tuple, BinaryIO, Union from typing import Any, BinaryIO, Dict, List, Optional, Tuple, Union
from torchdata.datapipes.iter import IterDataPipe, Mapper, Filter, IterKeyZipper, Demultiplexer, JsonParser, UnBatcher from torchdata.datapipes.iter import Demultiplexer, Filter, IterDataPipe, IterKeyZipper, JsonParser, Mapper, UnBatcher
from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import ( from torchvision.prototype.datasets.utils._internal import (
INFINITE_BUFFER_SIZE, getitem,
hint_sharding, hint_sharding,
hint_shuffling, hint_shuffling,
path_comparator, INFINITE_BUFFER_SIZE,
path_accessor, path_accessor,
getitem, path_comparator,
) )
from torchvision.prototype.features import Label, EncodedImage from torchvision.prototype.features import EncodedImage, Label
from .._api import register_dataset, register_info from .._api import register_dataset, register_info
......
import pathlib import pathlib
import re import re
from collections import OrderedDict from collections import defaultdict, OrderedDict
from collections import defaultdict from typing import Any, BinaryIO, cast, Dict, List, Optional, Tuple, Union
from typing import Any, Dict, List, Optional, Tuple, cast, BinaryIO, Union
import torch import torch
from torchdata.datapipes.iter import ( from torchdata.datapipes.iter import (
IterDataPipe,
Mapper,
Filter,
Demultiplexer, Demultiplexer,
Filter,
Grouper, Grouper,
IterDataPipe,
IterKeyZipper, IterKeyZipper,
JsonParser, JsonParser,
Mapper,
UnBatcher, UnBatcher,
) )
from torchvision.prototype.datasets.utils import ( from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
HttpResource,
OnlineResource,
Dataset,
)
from torchvision.prototype.datasets.utils._internal import ( from torchvision.prototype.datasets.utils._internal import (
MappingIterator,
INFINITE_BUFFER_SIZE,
getitem, getitem,
read_categories_file,
path_accessor,
hint_sharding, hint_sharding,
hint_shuffling, hint_shuffling,
INFINITE_BUFFER_SIZE,
MappingIterator,
path_accessor,
read_categories_file,
) )
from torchvision.prototype.features import BoundingBox, Label, _Feature, EncodedImage from torchvision.prototype.features import _Feature, BoundingBox, EncodedImage, Label
from .._api import register_dataset, register_info from .._api import register_dataset, register_info
...@@ -151,7 +146,7 @@ class Coco(Dataset): ...@@ -151,7 +146,7 @@ class Coco(Dataset):
) )
_META_FILE_PATTERN = re.compile( _META_FILE_PATTERN = re.compile(
fr"(?P<annotations>({'|'.join(_ANN_DECODERS.keys())}))_(?P<split>[a-zA-Z]+)(?P<year>\d+)[.]json" rf"(?P<annotations>({'|'.join(_ANN_DECODERS.keys())}))_(?P<split>[a-zA-Z]+)(?P<year>\d+)[.]json"
) )
def _filter_meta_files(self, data: Tuple[str, Any]) -> bool: def _filter_meta_files(self, data: Tuple[str, Any]) -> bool:
......
import pathlib import pathlib
from typing import Any, Dict, List, Tuple, Union from typing import Any, Dict, List, Tuple, Union
from torchdata.datapipes.iter import IterDataPipe, Mapper, Filter from torchdata.datapipes.iter import Filter, IterDataPipe, Mapper
from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import ( from torchvision.prototype.datasets.utils._internal import (
path_comparator,
hint_sharding, hint_sharding,
hint_shuffling, hint_shuffling,
path_comparator,
read_categories_file, read_categories_file,
) )
from torchvision.prototype.features import EncodedImage, Label from torchvision.prototype.features import EncodedImage, Label
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment