forked from segfault802/nettalk
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathann.py
More file actions
executable file
·116 lines (101 loc) · 3.33 KB
/
ann.py
File metadata and controls
executable file
·116 lines (101 loc) · 3.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
#!/usr/bin/python2
import math
import copy
import random as r
from misc import *
#from bigfloat import *
#functions for the neural network including update, backprogation
#sigmoid, etc.
def sigmoid(value):
#print "sigmoid: ", value
return 1 / (1 + math.exp(- value))
def dsigmoid(value):
return math.exp(- value) / ((math.exp(- value) + 1) * (math.exp(- value) + 1))
#update the network, w contains the weights
#v is a ragged array of values for each layer (including input)
def update(w,v):
#prev = copy.deepcopy(v)
n = len(v)
for i in range(1,n): #each layer
for j in range(len(v[i])): #node in each layer
sum = 0
for k in range(len(w[i][j])):
for l in range(len(w[i][j][k])):
#print "Update I: ", i,"J: ", j, "K: ", k,"L: ",l
sum += sigmoid(v[k][l])*w[i][j][k][l]
#print v
v[i][j] = sum
#finds the mean squared error of the output for a given pattern
def error(output,desired):
error = 0
n = len(output)
for i in range(n):
error += ((desired[i] - sigmoid(output[i])) * (desired[i] - sigmoid(output[i])))
return error
#parameters are lists
def backpropagate(v,desired,w,gradient):
for i in range(len(v[2])):
gradient[2][i] = (desired[i] - sigmoid(v[2][i]))*dsigmoid(v[2][i])
for i in range(len(v[1])):
sum = 0
for j in range(len(v[2])):
sum += gradient[2][j]*w[2][j][1][i]*dsigmoid(v[1][i])
gradient[1][i] = sum
def compute_delta(delta,gradient,output,a,topology):
numLayers = len(topology)
for i in range(1,numLayers):
for j in range(topology[i]):
for k in range(i):
for l in range(topology[k]):
#print "I: ", i,"J: ", j, "K: ", k,"L: ",l
delta[i][j][k][l] = delta[i][j][k][l]*a + ((1 - a) * gradient[i][j] * sigmoid(output[k][l]))
def update_weight(delta,weights,eta,topology):
numLayers = len(topology)
for i in range(1,numLayers):
for j in range(topology[i]):
for k in range(i):
for l in range(topology[k]):
#delta[i][j][k][l] = delta[i][j][k][l]*a + ((1 - a) * gradient[i][j] * output[i][k])
weights[i][j][k][l] += delta[i][j][k][l]*eta
#allocate all the lists to be used in here,
#topology is a vector
def allocate_lists(topology,v,weights,delta,gradient):
numLayers = len(topology)
for i in range(numLayers):
nodesInLayer = topology[i]
new = []
v.append(copy.deepcopy(new))
gradient.append(copy.deepcopy(new))
for j in range(nodesInLayer):
v[i].append(0)
gradient[i].append(0)
delta.append(copy.deepcopy(new))
weights.append(copy.deepcopy(new))
for i in range(1,numLayers):
#print "i: ",i
delta.append(copy.deepcopy(new))
weights.append(copy.deepcopy(new))
for j in range(topology[i]):
#print "J: ", j
delta[i].append(copy.deepcopy(new))
weights[i].append(copy.deepcopy(new))
for k in range(i):
#print "I: ", i,"J: ", j, "K: ", k
delta[i][j].append(copy.deepcopy(new))
weights[i][j].append(copy.deepcopy(new))
for l in range(topology[k]):
#print "I: ", i,"J: ", j, "K: ", k,"L: ",l
delta[i][j][k].append(0)
weights[i][j][k].append(0)
def initialize_weights(weights,topology):
numLayers = len(topology)
for i in range(1,numLayers):
for j in range(topology[i]):
for k in range(i):
for l in range(topology[k]):
num = r.random() * .3
sign = r.random()
if(sign < .5):
num *= -1
weights[i][j][k][l] = num
#weights[i][j][k][l] = BigFloat(num,context=precision(100))