From 98b777b73b29be83e686103b3c62d7beb7966e80 Mon Sep 17 00:00:00 2001 From: "Jean A. Senellart" Date: Wed, 2 Nov 2016 09:33:21 +0100 Subject: [PATCH] test on inputGpu emptiness symmetric in backward and forward --- DataParallelTable.lua | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/DataParallelTable.lua b/DataParallelTable.lua index 9b5f3923..c3d7d739 100644 --- a/DataParallelTable.lua +++ b/DataParallelTable.lua @@ -231,8 +231,8 @@ function DataParallelTable:__backward(method, input, gradOutput, scale) self:_distribute(self.gradOutputGpu, gradOutput) self.gradInputGpu = self.impl:exec(function(m, i) - if torch.isTensor(inputGpu[i]) and inputGpu[i]:numel() == 0 then - return torch.CudaTensor() + if not _hasData(inputGpu[i]) then + return inputGpu[i] else return m[method](m, inputGpu[i], gradOutputGpu[i], scale) end @@ -246,8 +246,8 @@ function DataParallelTable:__backward(method, input, gradOutput, scale) if method == 'accGradParameters' then self.impl:exec(function(m, i) - if torch.isTensor(inputGpu[i]) and inputGpu[i]:numel() == 0 then - return torch.CudaTensor() + if not _hasData(inputGpu[i]) then + return inputGpu[i] else return m:accGradParameters(inputGpu[i], gradOutputGpu[i], scale) end