Unverified Commit 6ca9c76a authored by Philip Meier's avatar Philip Meier Committed by GitHub
Browse files

Upgrade usort to `1.0.2` and black to 22.3.0 (#5106)



* upgrade usort to

* Also update black

* Actually use 1.0.2

* Apply pre-commit
Co-authored-by: default avatarNicolas Hug <contact@nicolas-hug.com>
parent 9293be7e
......@@ -6,7 +6,7 @@ from torch import Tensor
from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once
from ._box_convert import _box_cxcywh_to_xyxy, _box_xyxy_to_cxcywh, _box_xywh_to_xyxy, _box_xyxy_to_xywh
from ._box_convert import _box_cxcywh_to_xyxy, _box_xywh_to_xyxy, _box_xyxy_to_cxcywh, _box_xyxy_to_xywh
from ._utils import _upcast
......@@ -331,7 +331,7 @@ def complete_box_iou(boxes1: Tensor, boxes2: Tensor, eps: float = 1e-7) -> Tenso
w_gt = boxes2[:, 2] - boxes2[:, 0]
h_gt = boxes2[:, 3] - boxes2[:, 1]
v = (4 / (torch.pi ** 2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2)
v = (4 / (torch.pi**2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2)
with torch.no_grad():
alpha = v / (1 - iou + v + eps)
return diou - alpha * v
......
......@@ -58,7 +58,7 @@ def complete_box_iou_loss(
h_pred = y2 - y1
w_gt = x2g - x1g
h_gt = y2g - y1g
v = (4 / (torch.pi ** 2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2)
v = (4 / (torch.pi**2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2)
with torch.no_grad():
alpha = v / (1 - iou + v + eps)
......
......@@ -37,7 +37,7 @@ def drop_block2d(
N, C, H, W = input.size()
block_size = min(block_size, W, H)
# compute the gamma of Bernoulli distribution
gamma = (p * H * W) / ((block_size ** 2) * ((H - block_size + 1) * (W - block_size + 1)))
gamma = (p * H * W) / ((block_size**2) * ((H - block_size + 1) * (W - block_size + 1)))
noise = torch.empty((N, C, H - block_size + 1, W - block_size + 1), dtype=input.dtype, device=input.device)
noise.bernoulli_(gamma)
......@@ -83,7 +83,7 @@ def drop_block3d(
N, C, D, H, W = input.size()
block_size = min(block_size, D, H, W)
# compute the gamma of Bernoulli distribution
gamma = (p * D * H * W) / ((block_size ** 3) * ((D - block_size + 1) * (H - block_size + 1) * (W - block_size + 1)))
gamma = (p * D * H * W) / ((block_size**3) * ((D - block_size + 1) * (H - block_size + 1) * (W - block_size + 1)))
noise = torch.empty(
(N, C, D - block_size + 1, H - block_size + 1, W - block_size + 1), dtype=input.dtype, device=input.device
)
......
from collections import OrderedDict
from typing import Tuple, List, Dict, Callable, Optional
from typing import Callable, Dict, List, Optional, Tuple
import torch.nn.functional as F
from torch import nn, Tensor
......
import torch
from ..utils import _log_api_usage_once
from ._utils import _upcast_non_float, _loss_inter_union
from ._utils import _loss_inter_union, _upcast_non_float
def generalized_box_iou_loss(
......
import warnings
from typing import Callable, List, Optional, Union, Tuple, Sequence
from typing import Callable, List, Optional, Sequence, Tuple, Union
import torch
from torch import Tensor
......
import warnings
from typing import Optional, List, Dict, Tuple, Union
from typing import Dict, List, Optional, Tuple, Union
import torch
import torch.fx
......
......@@ -4,7 +4,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape
from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def ps_roi_align(
......
......@@ -4,7 +4,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape
from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def ps_roi_pool(
......
......@@ -7,7 +7,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape
from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def roi_align(
......
......@@ -7,7 +7,7 @@ from torch.nn.modules.utils import _pair
from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once
from ._utils import convert_boxes_to_roi_format, check_roi_boxes_shape
from ._utils import check_roi_boxes_shape, convert_boxes_to_roi_format
def roi_pool(
......
from . import datasets
from . import features
from . import models
from . import transforms
from . import utils
from . import datasets, features, models, transforms, utils
import pathlib
from typing import Any, Dict, List, Callable, Type, Optional, Union, TypeVar
from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union
from torchvision.prototype.datasets import home
from torchvision.prototype.datasets.utils import Dataset
......
......@@ -11,7 +11,7 @@ from .fer2013 import FER2013
from .food101 import Food101
from .gtsrb import GTSRB
from .imagenet import ImageNet
from .mnist import MNIST, FashionMNIST, KMNIST, EMNIST, QMNIST
from .mnist import EMNIST, FashionMNIST, KMNIST, MNIST, QMNIST
from .oxford_iiit_pet import OxfordIIITPet
from .pcam import PCAM
from .sbd import SBD
......
import pathlib
import re
from typing import Any, Dict, List, Tuple, BinaryIO, Union
from typing import Any, BinaryIO, Dict, List, Tuple, Union
import numpy as np
from torchdata.datapipes.iter import (
IterDataPipe,
Mapper,
Filter,
IterKeyZipper,
)
from torchdata.datapipes.iter import Filter, IterDataPipe, IterKeyZipper, Mapper
from torchvision.prototype.datasets.utils import Dataset, GDriveResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
INFINITE_BUFFER_SIZE,
read_mat,
hint_sharding,
hint_shuffling,
INFINITE_BUFFER_SIZE,
read_categories_file,
read_mat,
)
from torchvision.prototype.features import Label, BoundingBox, _Feature, EncodedImage
from torchvision.prototype.features import _Feature, BoundingBox, EncodedImage, Label
from .._api import register_dataset, register_info
......
import csv
import pathlib
from typing import Any, Dict, List, Optional, Tuple, Iterator, Sequence, BinaryIO, Union
from torchdata.datapipes.iter import (
IterDataPipe,
Mapper,
Filter,
Zipper,
IterKeyZipper,
)
from torchvision.prototype.datasets.utils import (
Dataset,
GDriveResource,
OnlineResource,
)
from typing import Any, BinaryIO, Dict, Iterator, List, Optional, Sequence, Tuple, Union
from torchdata.datapipes.iter import Filter, IterDataPipe, IterKeyZipper, Mapper, Zipper
from torchvision.prototype.datasets.utils import Dataset, GDriveResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
INFINITE_BUFFER_SIZE,
getitem,
path_accessor,
hint_sharding,
hint_shuffling,
INFINITE_BUFFER_SIZE,
path_accessor,
)
from torchvision.prototype.features import EncodedImage, _Feature, Label, BoundingBox
from torchvision.prototype.features import _Feature, BoundingBox, EncodedImage, Label
from .._api import register_dataset, register_info
......
......@@ -2,22 +2,18 @@ import abc
import io
import pathlib
import pickle
from typing import Any, Dict, List, Optional, Tuple, Iterator, cast, BinaryIO, Union
from typing import Any, BinaryIO, cast, Dict, Iterator, List, Optional, Tuple, Union
import numpy as np
from torchdata.datapipes.iter import (
IterDataPipe,
Filter,
Mapper,
)
from torchdata.datapipes.iter import Filter, IterDataPipe, Mapper
from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
hint_sharding,
hint_shuffling,
path_comparator,
hint_sharding,
read_categories_file,
)
from torchvision.prototype.features import Label, Image
from torchvision.prototype.features import Image, Label
from .._api import register_dataset, register_info
......
import pathlib
from typing import Any, Dict, List, Optional, Tuple, BinaryIO, Union
from typing import Any, BinaryIO, Dict, List, Optional, Tuple, Union
from torchdata.datapipes.iter import IterDataPipe, Mapper, Filter, IterKeyZipper, Demultiplexer, JsonParser, UnBatcher
from torchdata.datapipes.iter import Demultiplexer, Filter, IterDataPipe, IterKeyZipper, JsonParser, Mapper, UnBatcher
from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
INFINITE_BUFFER_SIZE,
getitem,
hint_sharding,
hint_shuffling,
path_comparator,
INFINITE_BUFFER_SIZE,
path_accessor,
getitem,
path_comparator,
)
from torchvision.prototype.features import Label, EncodedImage
from torchvision.prototype.features import EncodedImage, Label
from .._api import register_dataset, register_info
......
import pathlib
import re
from collections import OrderedDict
from collections import defaultdict
from typing import Any, Dict, List, Optional, Tuple, cast, BinaryIO, Union
from collections import defaultdict, OrderedDict
from typing import Any, BinaryIO, cast, Dict, List, Optional, Tuple, Union
import torch
from torchdata.datapipes.iter import (
IterDataPipe,
Mapper,
Filter,
Demultiplexer,
Filter,
Grouper,
IterDataPipe,
IterKeyZipper,
JsonParser,
Mapper,
UnBatcher,
)
from torchvision.prototype.datasets.utils import (
HttpResource,
OnlineResource,
Dataset,
)
from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
MappingIterator,
INFINITE_BUFFER_SIZE,
getitem,
read_categories_file,
path_accessor,
hint_sharding,
hint_shuffling,
INFINITE_BUFFER_SIZE,
MappingIterator,
path_accessor,
read_categories_file,
)
from torchvision.prototype.features import BoundingBox, Label, _Feature, EncodedImage
from torchvision.prototype.features import _Feature, BoundingBox, EncodedImage, Label
from .._api import register_dataset, register_info
......@@ -151,7 +146,7 @@ class Coco(Dataset):
)
_META_FILE_PATTERN = re.compile(
fr"(?P<annotations>({'|'.join(_ANN_DECODERS.keys())}))_(?P<split>[a-zA-Z]+)(?P<year>\d+)[.]json"
rf"(?P<annotations>({'|'.join(_ANN_DECODERS.keys())}))_(?P<split>[a-zA-Z]+)(?P<year>\d+)[.]json"
)
def _filter_meta_files(self, data: Tuple[str, Any]) -> bool:
......
import pathlib
from typing import Any, Dict, List, Tuple, Union
from torchdata.datapipes.iter import IterDataPipe, Mapper, Filter
from torchdata.datapipes.iter import Filter, IterDataPipe, Mapper
from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource
from torchvision.prototype.datasets.utils._internal import (
path_comparator,
hint_sharding,
hint_shuffling,
path_comparator,
read_categories_file,
)
from torchvision.prototype.features import EncodedImage, Label
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment