"torchvision/git@developer.sourcefind.cn:OpenDAS/vision.git" did not exist on "5f0532daea1f4bdf5e2c511361b484cdb002b50b"
Unverified Commit 6aa0fa30 authored by Nicolas Hug's avatar Nicolas Hug Committed by GitHub
Browse files

Set allow_redefinition = True for mypy (#4531)



* Allow redefinition for mypy

* appease mypy
Co-authored-by: default avatarPhilip Meier <github.pmeier@posteo.de>
parent c3062d28
......@@ -3,6 +3,7 @@
files = torchvision
show_error_codes = True
pretty = True
allow_redefinition = True
[mypy-torchvision.io._video_opt.*]
......
import csv
import os
from collections import namedtuple
from functools import partial
from typing import Any, Callable, List, Optional, Union, Tuple
import PIL
......@@ -115,15 +114,14 @@ class CelebA(VisionDataset):
filename: str,
header: Optional[int] = None,
) -> CSV:
data, indices, headers = [], [], []
fn = partial(os.path.join, self.root, self.base_folder)
with open(fn(filename)) as csv_file:
with open(os.path.join(self.root, self.base_folder, filename)) as csv_file:
data = list(csv.reader(csv_file, delimiter=" ", skipinitialspace=True))
if header is not None:
headers = data[header]
data = data[header + 1 :]
else:
headers = []
indices = [row[0] for row in data]
data = [row[1:] for row in data]
......
......@@ -119,7 +119,7 @@ def parse_devkit_archive(root: str, file: Optional[str] = None) -> None:
"""
import scipy.io as sio
def parse_meta_mat(devkit_root: str) -> Tuple[Dict[int, str], Dict[str, str]]:
def parse_meta_mat(devkit_root: str) -> Tuple[Dict[int, str], Dict[str, Tuple[str, ...]]]:
metafile = os.path.join(devkit_root, "data", "meta.mat")
meta = sio.loadmat(metafile, squeeze_me=True)["synsets"]
nums_children = list(zip(*meta))[4]
......
......@@ -117,11 +117,11 @@ class Caltech101(Dataset):
images_dp, anns_dp = resource_dps
images_dp = TarArchiveReader(images_dp)
images_dp = Filter(images_dp, self._is_not_background_image)
images_dp: IterDataPipe = Filter(images_dp, self._is_not_background_image)
images_dp = Shuffler(images_dp, buffer_size=INFINITE_BUFFER_SIZE)
anns_dp = TarArchiveReader(anns_dp)
anns_dp = Filter(anns_dp, self._is_ann)
anns_dp: IterDataPipe = Filter(anns_dp, self._is_ann)
dp = KeyZipper(
images_dp,
......@@ -136,7 +136,7 @@ class Caltech101(Dataset):
def generate_categories_file(self, root: Union[str, pathlib.Path]) -> None:
dp = self.resources(self.default_config)[0].to_datapipe(pathlib.Path(root) / self.name)
dp = TarArchiveReader(dp)
dp = Filter(dp, self._is_not_background_image)
dp: IterDataPipe = Filter(dp, self._is_not_background_image)
dir_names = {pathlib.Path(path).parent.name for path, _ in dp}
create_categories_file(HERE, self.name, sorted(dir_names))
......@@ -185,7 +185,7 @@ class Caltech256(Dataset):
) -> IterDataPipe[Dict[str, Any]]:
dp = resource_dps[0]
dp = TarArchiveReader(dp)
dp = Filter(dp, self._is_not_rogue_file)
dp: IterDataPipe = Filter(dp, self._is_not_rogue_file)
dp = Shuffler(dp, buffer_size=INFINITE_BUFFER_SIZE)
return Mapper(dp, self._collate_and_decode_sample, fn_kwargs=dict(decoder=decoder))
......
......@@ -49,8 +49,8 @@ def from_data_folder(
root = pathlib.Path(root).expanduser().resolve()
categories = sorted(entry.name for entry in os.scandir(root) if entry.is_dir())
masks: Union[List[str], str] = [f"*.{ext}" for ext in valid_extensions] if valid_extensions is not None else ""
dp: IterDataPipe = FileLister(str(root), recursive=recursive, masks=masks)
dp = Filter(dp, _is_not_top_level_file, fn_kwargs=dict(root=root))
dp = FileLister(str(root), recursive=recursive, masks=masks)
dp: IterDataPipe = Filter(dp, _is_not_top_level_file, fn_kwargs=dict(root=root))
dp = Shuffler(dp, buffer_size=INFINITE_BUFFER_SIZE)
dp = FileLoader(dp)
return (
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment