-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathtest_layer_lineq.cc
60 lines (50 loc) · 1.31 KB
/
test_layer_lineq.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
/* tensor.cc for LITE
* Copyright (C) 2017 Mo Zhou <[email protected]>
* MIT License
*/
#include <iostream>
#include "tensor.hpp"
#include "blob.hpp"
#include "layer.hpp"
using namespace std;
int
main(void)
{
// AB = C, given B and C, find A
double b[16] = {1.,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1}; // 4x4
double c[4] = {-4., -2, 2, 4}; // 1x4
cout << "Initialize Test Net" << endl;
Blob<double> X (4, 4, "", false); X.setName("X"); X.value.copy(b, 16);
Blob<double> y (1, 4, "", false); y.setName("y"); y.value.copy(c, 4);
Blob<double> yhat (1, 4); yhat.setName("yhat");
Blob<double> loss (1); loss.setName("loss");
LinearLayer<double> fc1 (1, 4, false);
MSELoss<double> loss1;
X.dump(true, false);
y.dump(true, false);
fc1.W.dump(true, false);
fc1.b.dump(true, false);
for (int iteration = 0; iteration < 50; iteration++) {
cout << ">> Iteration :: " << iteration << endl;
// -- forward
fc1.forward(X, yhat);
loss1.forward(yhat, loss, y);
// -- zerograd
yhat.zeroGrad();
loss.zeroGrad();
fc1.zeroGrad();
// -- backward
loss1.backward(yhat, loss, y);
fc1.backward(X, yhat);
// -- report
loss1.report();
//yhat.dump();
//fc1.W.dump(false, true);
//fc1.b.dump(false, true);
// update
fc1.SGD(5e-1);
}
fc1.W.dump();
fc1.b.dump();
return 0;
}