44
44
def error_propagation(self,errors,out_weights):
45
45
self.error=sum([err*ow for (err,ow) in zip(errors,out_weights)])
46
46
def error_propagation2(self,next_layer,position):
47
#this a layer aware implementation
48
#position is the position of the current neuron in the current layer
49
#next_layer is supposed to be the layer infront of the current neuron
47
#this a Layer aware implementation
48
#position is the position of the current Neuron in the current Layer
49
#next_layer is supposed to be the Layer infront of the current Neuron
50
50
self.error=sum([neuron.weights[position]*neuron.error for neuron in next_layer.neurons])
51
51
def update_weights(self,inputs):
52
52
global learning,momentum
54
54
self.weights=[old_weight+learning*change+momentum*old_change for (old_weight,change,old_change) in zip(self.weights,changes,self.changes)]
55
55
self.changes=changes
58
58
def __init__(self,num_neurons,num_weights,weight_range=(0,1),bias_range=(0,0)):
59
59
if num_neurons<=0 or num_weights<-1:
60
print "class layer: Wrong Initialization"
60
print "class Layer: Wrong Initialization"
63
63
if num_weights==-1:
64
#num_weights==-1 -> input layer
64
#num_weights==-1 -> input Layer
65
65
for i in range(num_neurons):
66
self.neurons.append(neuron(num_weights,[1],0))
66
self.neurons.append(Neuron(num_weights,[1],0))
68
68
for i in range(num_neurons):
69
69
bias_rand=random_shifted(weight_range)
70
self.neurons.append(neuron(num_weights,[],bias_rand,weight_range))
70
self.neurons.append(Neuron(num_weights,[],bias_rand,weight_range))
71
71
self.inputs=[0.0]*len(self.neurons)
72
72
self.__outputs__=[0.0]*len(self.neurons)
74
return reduce(lambda x,y: x+"\t"+y,[neuron.__str__() for neuron in self.neurons])
74
return reduce(lambda x,y: x+"\t"+y,[Neuron.__str__() for Neuron in self.neurons])
75
75
def calculate(self,inputs):
76
76
self.inputs=copy.deepcopy(inputs)
77
self.__outputs__=[neuron.calculate(self.inputs) for neuron in self.neurons]
77
self.__outputs__=[Neuron.calculate(self.inputs) for Neuron in self.neurons]
78
78
return self.__outputs__
79
79
def error_propagate(self,next_layer):
80
80
for i in range(len(self.neurons)):
81
81
self.neurons[i].error_propagation2(next_layer,i)
82
82
def update_weights(self):
83
for neuron in self.neurons:
84
neuron.update_weights(self.inputs)
83
for Neuron in self.neurons:
84
Neuron.update_weights(self.inputs)
87
87
def __init__(self,array=[],weight_ranges=[(0,1)],bias_range=(0,0)):
88
#array: is a list of the number of the nodes in each layer
88
#array: is a list of the number of the nodes in each Layer
90
90
print "class nNetwork: Network not initialized"
93
self.layers.append(layer(array[0],-1,weight_ranges[0],bias_range))
93
self.layers.append(Layer(array[0],-1,weight_ranges[0],bias_range))
94
94
if len(weight_ranges)==1:
95
95
weight_ranges=weight_ranges*len(array)
97
97
if len(weight_ranges)<len(array)-1:
98
98
print "class nNetwork: not enough weight_ranges"
99
99
for i in range(1,len(array)):
100
self.layers.append(layer(array[i],array[i-1],weight_ranges[i],bias_range))
100
self.layers.append(Layer(array[i],array[i-1],weight_ranges[i],bias_range))
101
101
def __str__(self):
103
return reduce(lambda x,y: x+"\n"+y,[layer.__str__() for layer in self.layers])
103
return reduce(lambda x,y: x+"\n"+y,[Layer.__str__() for Layer in self.layers])
104
104
def neurons(self):
106
106
for i in self.layers:
109
109
def calculate(self,inputs):
111
for layer in self.layers[1:]:
112
output=layer.calculate(output)
111
for Layer in self.layers[1:]:
112
output=Layer.calculate(output)
114
114
def error_propagate(self,references):
115
115
for i in range(len(self.layers[-1].neurons)):
116
116
self.layers[-1].neurons[i].error=-1*self.layers[-1].__outputs__[i]+references[i]
117
117
for i in range(len(self.layers)-2,0,-1):
118
118
self.layers[i].error_propagate(self.layers[i+1])
119
return sum([neuron.error for neuron in self.layers[-1].neurons])**2
119
return sum([Neuron.error for Neuron in self.layers[-1].neurons])**2
120
120
def update_weights(self):
121
for layer in self.layers[1:]:
122
layer.update_weights()
121
for Layer in self.layers[1:]:
122
Layer.update_weights()
123
123
def back_propagation(self,references):
124
124
error=self.error_propagate(references)
125
125
self.update_weights()
134
134
def toChromosome(self):
136
for layer in self.layers[1:]:
137
for neuron in layer.neurons:
138
genes.extend(neuron.weights)
136
for Layer in self.layers[1:]:
137
for Neuron in Layer.neurons:
138
genes.extend(Neuron.weights)
141
print "Error converting the network to chromosome"
141
print "Error converting the Network to chromosome"
143
return genetic.realChromosome(size,genetic.alwaysGood,genetic.alwaysOK,(-1,1),genes)
143
return genetic.RealChromosome(size,genetic.alwaysGood,genetic.alwaysOK,(-1,1),genes)
148
foo=network([2,3,1],[(0,1),(-0.2,0.2),(-2,2)],(1,1))
148
foo=Network([2,3,1],[(0,1),(-0.2,0.2),(-2,2)],(1,1))
149
149
Input=[[0,0],[0,1],[1,0],[1,1]]
150
150
out=[[0],[1],[1],[0]]