官方网站进行查看DataLoader
batch_size 的含义
import torchvision
from torch.utils.data import DataLoader
test_data = torchvision.datasets.CIFAR10('D:\Pytorch\pythonProject\Transform\dataset', train=False, transform=torchvision.transforms.ToTensor())
test_loader = DataLoader(dataset=test_data, batch_size=4, shuffle=False, num_workers=0, drop_last=False)
img, target = test_data[0]
print(img.shape)
print(target)
for data in test_loader:
imgs, targets = data
print(imgs.shape)
print(targets)
import torchvision
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
test_data = torchvision.datasets.CIFAR10('D:\Pytorch\pythonProject\Transform\dataset', train=False, transform=torchvision.transforms.ToTensor())
test_loader = DataLoader(dataset=test_data, batch_size=64, shuffle=True, num_workers=0, drop_last=True)
img, target = test_data[0]
print(img.shape)
print(target)
writer = SummaryWriter('dataloader')
for epoch in range(2):
step = 0
for data in test_loader:
imgs, targets = data
writer.add_images('Epoch: {}'.format(epoch), imgs, step)
step += 1
writer.close()
shuffle=True 的话,会随机成batch