import torch
import torch.utils.data as Data
# 添加随机种子以使结果可复现
torch.manual_seed(1)# reproducible# 批大小
BATCH_SIZE =5# BATCH_SIZE = 8
x = torch.linspace(1,10,10)# this is x data (torch tensor)
y = torch.linspace(10,1,10)# this is y data (torch tensor)
torch_dataset = Data.TensorDataset(x, y)
loader = Data.DataLoader(
dataset=torch_dataset,# torch TensorDataset format
batch_size=BATCH_SIZE,# mini batch size
shuffle=True,# random shuffle for training
num_workers=2,# subprocesses for loading data)defshow_batch():for epoch inrange(3):# train entire dataset 3 timesfor step,(batch_x, batch_y)inenumerate(loader):# for each training step# train your data...print('Epoch: ', epoch,'| Step: ', step,'| batch x: ',
batch_x.numpy(),'| batch y: ', batch_y.numpy())if __name__ =='__main__':
show_batch()
运行结果:
BATCH_SIZE = 5,shuffle=False
import torch
import torch.utils.data as Data
# 添加随机种子以使结果可复现
torch.manual_seed(1)# reproducible# 批大小
BATCH_SIZE =5# BATCH_SIZE = 8
x = torch.linspace(1,10,10)# this is x data (torch tensor)
y = torch.linspace(10,1,10)# this is y data (torch tensor)
torch_dataset = Data.TensorDataset(x, y)
loader = Data.DataLoader(
dataset=torch_dataset,# torch TensorDataset format
batch_size=BATCH_SIZE,# mini batch size
shuffle=False,# random shuffle for training
num_workers=2,# subprocesses for loading data)defshow_batch():for epoch inrange(3):# train entire dataset 3 timesfor step,(batch_x, batch_y)inenumerate(loader):# for each training step# train your data...print('Epoch: ', epoch,'| Step: ', step,'| batch x: ',
batch_x.numpy(),'| batch y: ', batch_y.numpy())if __name__ =='__main__':
show_batch()
运行结果:
BATCH_SIZE = 8,shuffle=True
import torch
import torch.utils.data as Data
# 添加随机种子以使结果可复现
torch.manual_seed(1)# reproducible# 批大小# BATCH_SIZE = 5
BATCH_SIZE =8
x = torch.linspace(1,10,10)# this is x data (torch tensor)
y = torch.linspace(10,1,10)# this is y data (torch tensor)
torch_dataset = Data.TensorDataset(x, y)
loader = Data.DataLoader(
dataset=torch_dataset,# torch TensorDataset format
batch_size=BATCH_SIZE,# mini batch size
shuffle=True,# random shuffle for training
num_workers=2,# subprocesses for loading data)defshow_batch():for epoch inrange(3):# train entire dataset 3 timesfor step,(batch_x, batch_y)inenumerate(loader):# for each training step# train your data...print('Epoch: ', epoch,'| Step: ', step,'| batch x: ',
batch_x.numpy(),'| batch y: ', batch_y.numpy())if __name__ =='__main__':
show_batch()
运行结果:
BATCH_SIZE = 8,shuffle=False
import torch
import torch.utils.data as Data
# 添加随机种子以使结果可复现
torch.manual_seed(1)# reproducible# 批大小# BATCH_SIZE = 5
BATCH_SIZE =8
x = torch.linspace(1,10,10)# this is x data (torch tensor)
y = torch.linspace(10,1,10)# this is y data (torch tensor)
torch_dataset = Data.TensorDataset(x, y)
loader = Data.DataLoader(
dataset=torch_dataset,# torch TensorDataset format
batch_size=BATCH_SIZE,# mini batch size
shuffle=False,# random shuffle for training
num_workers=2,# subprocesses for loading data)defshow_batch():for epoch inrange(3):# train entire dataset 3 timesfor step,(batch_x, batch_y)inenumerate(loader):# for each training step# train your data...print('Epoch: ', epoch,'| Step: ', step,'| batch x: ',
batch_x.numpy(),'| batch y: ', batch_y.numpy())if __name__ =='__main__':
show_batch()
简介 上一篇我们批量执行完用例后,生成的测试报告是文本形式的,不够直观,而且报告一般都是发给leader的,所以最好是直观一目了然,为了更好的展示测试报告,最好是生成 HTML 格式的。unittest 里面是不能生成…