Skip to content

Commit

Permalink
Adding Operators (#738)
Browse files Browse the repository at this point in the history
* Adding Floor and Abs Opertors in autograd and sonnx

* Update test_operation.py
  • Loading branch information
Shashankwer authored Sep 18, 2020
1 parent 814e2f7 commit fb9be0f
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 0 deletions.
44 changes: 44 additions & 0 deletions python/singa/autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -4299,6 +4299,50 @@ def ceil(x):
return Ceil()(x)[0]


class Floor(Operator):
"""
Floor takes one input data (Tensor) and produces one output data (Tensor),
where the floor is, `y = floor(x)`, is applied to the tensor elementwise
"""

def __init__(self):
super(Floor, self).__init__()

def forward(self, x):
"""
forward of floor
Args:
x (CTensor): input tensor
Returns:
the output CTensor
"""
return singa.Floor(x)

def backward(self, dy):
"""
backward of floor. Derivative of floor is 0
Args:
dy (CTensor): gradient tensor
Returns:
the gradient tensor over the input tensor.
"""
dy = singa.Tensor(dy.shape(), dy.device())
dy.SetFloatValue(0.)
return dy


def floor(x):
"""
floor takes one input data (Tensor) and produces one output data (Tensor)
the value of floor is `y = floor(x)`, is applied to the tensor elementwise.
Args:
x(Tensor): input tensor.
Returns:
the output tensor
"""
return Floor()(x)[0]


class Split(Operator):
"""
Init a Split, Split a tensor into a list of tensors, along the specified
Expand Down
2 changes: 2 additions & 0 deletions python/singa/sonnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -1089,6 +1089,8 @@ class SingaBackend(Backend):
'Unsqueeze': 'Unsqueeze',
'NonZero': 'NonZero',
'Ceil': 'Ceil',
'Floor': 'Floor',
'Abs': 'Abs',
# special op
'ScatterElements': 'ScatterElements',
'Cast': 'Cast',
Expand Down
22 changes: 22 additions & 0 deletions test/python/test_operation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2881,6 +2881,28 @@ def test_ceil_cpu(self):
def test_ceil_gpu(self):
self.ceil_test(gpu_dev)

def floor_test(self,dev):
X = np.array([-1.9,1.2]).astype(np.float32)
DY = np.ones((2),dtype=np.float32)
y = np.floor(X)
x = tensor.from_numpy(X)
dy = tensor.from_numpy(DY)
x.to_device(dev)
dy.to_device(dev)

result = autograd.floor(x)
dx = result.creator.backward(dy.data)
DX = np.zeros((2),dtype=np.float32)
np.testing.assert_array_almost_equal(tensor.to_numpy(result),y,decimal=5)
np.testing.assert_array_almost_equal(tensor.to_numpy(tensor.from_raw_tensor(dx)),DX,decimal=5)

def test_floor_cpu(self):
self.floor_test(cpu_dev)

@unittest.skipIf(not singa_wrap.USE_CUDA, 'CUDA is not enabled')
def test_floor_gpu(self):
self.floor_test(gpu_dev)

def _test_scatter_elements(self, dev):
# testing witout axis
data = np.zeros((3, 3), dtype=np.float32)
Expand Down

0 comments on commit fb9be0f

Please sign in to comment.