-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathRelu.h
52 lines (40 loc) · 1.45 KB
/
Relu.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
#ifndef LIBDL_RELU_H
#define LIBDL_RELU_H
#include "CNode.h"
#include "../Utils.h"
template <typename D, std::int64_t R>
class Relu : public CNode<D, R> {
public:
Relu(
const std::shared_ptr<Tensor<D, R>> &x,
const std::shared_ptr<Tensor<D, R>> &result)
: CNode<D, R>(Utils::removeOption<std::shared_ptr<CNodeBase>>({x->gradFn}), result),
x(x->data),
cx(x->gradFn) {}
/*
* \brief applies the relu function elementwise
*
* \param x tensor of any shape
*
* \return a new tensor with the same shape as x in which all negative values are set to zero
* */
static std::shared_ptr<Tensor<D, R>> relu(
const std::shared_ptr<Tensor<D, R>> &x) {
auto tmp = (*x->data >= x->data->constant(0)).select(*x->data, x->data->constant(0));
auto result = std::make_shared<Tensor<D, R>>(tmp, x->data->dimensions());
if (x->needsGradient() && !CNodeBase::noGrad)
result->setGradFn(std::make_shared<Relu<D, R>>(x, result));
return result;
}
void computeGradients() override {
if (cx.has_value()) {
auto grad = (*x >= x->constant(0)).select(*CNode<D, R>::grad, x->constant(0));
cx.value()->addGrad(grad);
}
CNode<D, R>::finishComputeGradient();
}
private:
std::shared_ptr<Eigen::Tensor<D, R>> x;
std::optional<std::shared_ptr<CNode<D, R>>> cx;
};
#endif //LIBDL_RELU_H