Commit 7ab42043 authored by Soumith Chintala's avatar Soumith Chintala
Browse files

cifar debugging

parent eeacb39b
...@@ -2,10 +2,10 @@ import torch ...@@ -2,10 +2,10 @@ import torch
import torchvision.datasets as dset import torchvision.datasets as dset
import torchvision.transforms as transforms import torchvision.transforms as transforms
# print('\n\nCifar 10') print('\n\nCifar 10')
# a = dset.CIFAR10(root="abc/def/ghi", download=True) a = dset.CIFAR10(root="abc/def/ghi", download=True)
# print(a[3]) print(a[3])
# print('\n\nCifar 100') # print('\n\nCifar 100')
# a = dset.CIFAR100(root="abc/def/ghi", download=True) # a = dset.CIFAR100(root="abc/def/ghi", download=True)
...@@ -14,10 +14,15 @@ import torchvision.transforms as transforms ...@@ -14,10 +14,15 @@ import torchvision.transforms as transforms
dataset = dset.CIFAR10(root='cifar', download=True, transform=transforms.ToTensor()) dataset = dset.CIFAR10(root='cifar', download=True, transform=transforms.ToTensor())
dataloader = torch.utils.data.DataLoader(dataset, batch_size=32, dataloader = torch.utils.data.DataLoader(dataset, batch_size=1,
shuffle=True, num_workers=2) shuffle=True, num_workers=2)
for i, data in enumerate(dataloader, 0):
print(data)
if i == 10:
break
# miter = dataloader.__iter__() # miter = dataloader.__iter__()
# def getBatch(): # def getBatch():
# global miter # global miter
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment