diff --git a/pytorch_nn.py b/pytorch_nn.py index 64d90ef..3392598 100644 --- a/pytorch_nn.py +++ b/pytorch_nn.py @@ -67,7 +67,7 @@ def forward(self, x): if batch_idx % log_interval == 0: print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( epoch, batch_idx * len(data), len(train_loader.dataset), - 100. * batch_idx / len(train_loader), loss.data[0])) + 100. * batch_idx / len(train_loader), loss.data)) # run a test loop test_loss = 0 @@ -77,7 +77,7 @@ def forward(self, x): data = data.view(-1, 28 * 28) net_out = net(data) # sum up batch loss - test_loss += criterion(net_out, target).data[0] + test_loss += criterion(net_out, target).data pred = net_out.data.max(1)[1] # get the index of the max log-probability correct += pred.eq(target.data).sum() @@ -92,4 +92,4 @@ def forward(self, x): if run_opt == 1: simple_gradient() elif run_opt == 2: - create_nn() \ No newline at end of file + create_nn()