nn.CrossEntropyLoss()
#optimizer
optimizer=torch.optim.Adam(net.parameters(),lr=lr)
#optimizer=torch.optim.SGD(net.parameters(),lr=lr,momentum=0.9,weight_decay=5e-4)
scheduler=torch.optim.lr_scheduler.StepLR(optimizer,step_size=5,gamma=0.9)
ifnotos.path.exists(“logCNN“):
os.mkdir(“logCNN“)
writer=tensorboardX.SummaryWriter(“logCNN“)
forepochinrange(epoch_num):
train_sum_loss=0
train_sum_correct=0
train_sum_fp=0
train_sum_fn=0
train_sum_tp=0
train_sum_tn=0
fori,datainenumerate(trainDataLoader):
net.train()
inputs,labels=data
inputs=inputs.unsqueeze(1).to(torch.float32)
labels=labels.type(torch.LongTensor)
inputs,labels=inputs.to(device),labels.to(device)
outputs=net(inputs)
loss=loss_func(outputs,labels)
optimizer.zero_grad()
l
本章未完,请点击下一页继续阅读! 第4页 / 共10页