-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgrammar.ne
100 lines (85 loc) · 5.57 KB
/
grammar.ne
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
@{%
const tokenScalar = {test: x => x.type === 'variable' && x.value.match !== null && x.value.match.type === 'Scalar'};
const tokenMatrix = {test: x => x.type === 'variable' && x.value.match !== null && (x.value.match.type === 'Matrix' || x.value.match.type === 'Vector')};
const tokenPlus = {test: x => x.type === 'plus'};
const tokenTimes = {test: x => x.type === 'times'};
const tokenDivides = {test: x => x.type === 'divides'};
const tokenNorm = {test: x => x.type === 'norm'};
const tokenNumber = {test: x => x.type === 'number'};
const tokenMinus = {test: x => x.type === 'minus'};
const tokenPower = {test: x => x.type === 'power'};
const tokenTranspose = {test: x => x.type === 'transpose'};
const tokenLparen = {test: x => x.type === 'lparen'};
const tokenRparen = {test: x => x.type === 'rparen'};
const tokenLbracket = {test: x => x.type === 'lbracket'};
const tokenRbracket = {test: x => x.type === 'rbracket'};
const tokenRelu = {test: x => x.type === 'relu'};
const tokenSin = {test: x => x.type === 'sin'};
const tokenCos = {test: x => x.type === 'cos'};
const tokenSigmoid = {test: x => x.type === 'sigmoid'};
const tokenTanh = {test: x => x.type === 'tanh'};
const tokenSqrt = {test: x => x.type === 'sqrt'};
const tokenAbs = {test: x => x.type === 'abs'};
const tokenOnehot = {test: x => x.type === 'onehot'};
const tokenSoftmax = {test: x => x.type === 'softmax'};
const tokenUnderscore = {test: x => x.type === 'underscore'};
const tokenComma = {test: x => x.type === 'comma'};
%}
main -> sAS {% id %}
sP -> %tokenLparen sAS %tokenRparen {% ([l, s, r]) => s %}
| s {% id %}
sE -> sP %tokenPower sE {% ([fst, _, snd]) => (() => tf.pow(fst(), snd())) %}
| sP {% id %}
sMD -> sMD %tokenTimes sE {% ([fst, _, snd]) => (() => tf.mul(fst(), snd())) %}
| sMD %tokenDivides sE {% ([fst, _, snd]) => (() => tf.div(fst(), snd())) %}
| sMD sE {% ([fst, snd]) => (() => tf.mul(fst(), snd())) %}
| sE {% id %}
sAS -> sAS %tokenPlus sMD {% ([fst, _, snd]) => (() => tf.add(fst(), snd())) %}
| sAS %tokenMinus sMD {% ([fst, _, snd]) => (() => tf.sub(fst(), snd())) %}
| %tokenMinus sMD {% ([_, snd]) => (() => tf.sub(0, snd())) %}
| sMD {% id %}
s -> %tokenScalar {% ([s]) => (() => s.value.tfvar) %}
| %tokenRelu %tokenLparen sAS %tokenRparen {% ([f, l, s, r]) => (() => tf.relu(s())) %}
| %tokenSin %tokenLparen sAS %tokenRparen {% ([f, l, s, r]) => (() => tf.sin(s())) %}
| %tokenCos %tokenLparen sAS %tokenRparen {% ([f, l, s, r]) => (() => tf.cos(s())) %}
| %tokenSigmoid %tokenLparen sAS %tokenRparen {% ([f, l, s, r]) => (() => tf.sigmoid(s())) %}
| %tokenTanh %tokenLparen sAS %tokenRparen {% ([f, l, s, r]) => (() => tf.tanh(s())) %}
| %tokenSqrt %tokenLparen sAS %tokenRparen {% ([f, l, s, r]) => (() => tf.sqrt(s())) %}
| %tokenAbs sAS %tokenAbs {% ([l, s, r]) => (() => tf.abs(s())) %}
| %tokenNorm mAS %tokenNorm %tokenUnderscore %tokenNumber {% ([l, m, r, u, o]) => (() => tf.norm(m(), parseFloat(o.value))) %}
| %tokenNumber {% ([n]) => (() => parseFloat(n.value)) %}
mP -> %tokenLparen mAS %tokenRparen {% ([l, m, r]) => m %}
| m {% id %}
mE -> mP %tokenPower sP {% ([fst, _, snd]) => (() => tf.pow(fst(), snd())) %}
| mP %tokenPower %tokenTranspose {% ([fst, _, t]) => (() => tf.transpose(fst())) %}
| mP {% id %}
mMD -> mMD %tokenTimes mE {% ([fst, _, snd]) => (() => tf.matMul(fst(), snd())) %}
| mMD mE {% ([fst, snd]) => (() => tf.matMul(fst(), snd())) %}
| mE {% id %}
smMD -> sE %tokenTimes smMD {% ([fst, _, snd]) => (() => tf.mul(fst(), snd())) %}
| smMD %tokenTimes sE {% ([fst, _, snd]) => (() => tf.mul(fst(), snd())) %}
| smMD %tokenDivides sE {% ([fst, _, snd]) => (() => tf.div(fst(), snd())) %}
| sE smMD {% ([fst, snd]) => (() => tf.mul(fst(), snd())) %}
| mMD {% id %}
mAS -> mAS %tokenPlus smMD {% ([fst, _, snd]) => (() => tf.add(fst(), snd())) %}
| mAS %tokenMinus smMD {% ([fst, _, snd]) => (() => tf.sub(fst(), snd())) %}
| %tokenMinus smMD {% ([_, snd]) => (() => tf.sub(0, snd())) %}
| smMD {% id %}
m -> %tokenMatrix {% ([m]) => (() => m.value.tfvar) %}
| %tokenRelu %tokenLparen mAS %tokenRparen {% ([f, l, m, r]) => (() => tf.relu(m())) %}
| %tokenSin %tokenLparen mAS %tokenRparen {% ([f, l, m, r]) => (() => tf.sin(m())) %}
| %tokenCos %tokenLparen mAS %tokenRparen {% ([f, l, m, r]) => (() => tf.cos(m())) %}
| %tokenSigmoid %tokenLparen mAS %tokenRparen {% ([f, l, m, r]) => (() => tf.sigmoid(m())) %}
| %tokenTanh %tokenLparen mAS %tokenRparen {% ([f, l, m, r]) => (() => tf.tanh(m())) %}
| %tokenSqrt %tokenLparen mAS %tokenRparen {% ([f, l, m, r]) => (() => tf.sqrt(m())) %}
| %tokenAbs mAS %tokenAbs {% ([l, m, r]) => (() => tf.abs(m())) %}
| %tokenSoftmax %tokenLparen mAS %tokenRparen {% ([f, l, m, r]) => (() => tf.softmax(m())) %}
| %tokenOnehot %tokenLparen mAS %tokenComma %tokenNumber %tokenRparen {% ([f, l, m, c, n, r]) => (() => tf.oneHot(m().toInt(), parseFloat(n.value))) %}
| %tokenLbracket scalarSequence %tokenRbracket {% ([l, seq, r]) => (() => tf.tensor1d(seq())) %}
| %tokenLbracket vectorSequence %tokenRbracket {% ([l, seq, r]) => (() => tf.tensor1d(seq())) %}
scalarSequence -> sAS %tokenComma scalarSequence {% ([s, c, seq]) => (() => [s()].concat(seq())) %}
| sAS {% ([s]) => (() => [s()]) %}
vectorSequence -> mAS %tokenComma vectorSequence {% ([m, c, seq]) => (() => [m()].concat(seq())) %}
| mAS {% ([m]) => (() => [m()]) %}
sequence -> scalarSequence {% ([seq]) => (() => seq()) %}
| vectorSequence {% ([seq]) => (() => seq()) %}