Skip to content

Commit

Permalink
Merge pull request #364 from SYSTRAN/master
Browse files Browse the repository at this point in the history
test on inputGpu emptiness symmetric in backward and forward
  • Loading branch information
soumith authored Nov 2, 2016
2 parents 64224a6 + 98b777b commit aa256bc
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions DataParallelTable.lua
Original file line number Diff line number Diff line change
Expand Up @@ -231,8 +231,8 @@ function DataParallelTable:__backward(method, input, gradOutput, scale)
self:_distribute(self.gradOutputGpu, gradOutput)

self.gradInputGpu = self.impl:exec(function(m, i)
if torch.isTensor(inputGpu[i]) and inputGpu[i]:numel() == 0 then
return torch.CudaTensor()
if not _hasData(inputGpu[i]) then
return inputGpu[i]
else
return m[method](m, inputGpu[i], gradOutputGpu[i], scale)
end
Expand All @@ -246,8 +246,8 @@ function DataParallelTable:__backward(method, input, gradOutput, scale)

if method == 'accGradParameters' then
self.impl:exec(function(m, i)
if torch.isTensor(inputGpu[i]) and inputGpu[i]:numel() == 0 then
return torch.CudaTensor()
if not _hasData(inputGpu[i]) then
return inputGpu[i]
else
return m:accGradParameters(inputGpu[i], gradOutputGpu[i], scale)
end
Expand Down

0 comments on commit aa256bc

Please sign in to comment.