User:Elgreengeeto/Python Linear Perceptron: Difference between revisions

From Noisebridge
Jump to navigation Jump to search
(New page: #a dot product is the sum of the products of aligned-elements from two #same-lengthed arrays def dot_product(a, b): sum = 0 i = 0 while i < len(a): sum += a[i] * b[i] ...)
 
No edit summary
Line 1: Line 1:
<pre>
#a dot product is the sum of the products of aligned-elements from two
#a dot product is the sum of the products of aligned-elements from two
#same-lengthed arrays
#same-lengthed arrays
Line 63: Line 64:
train(data, subject)
train(data, subject)
print "final bias weight: %s, final input weights: %s" % (subject.bias_weight, subject.weights)
print "final bias weight: %s, final input weights: %s" % (subject.bias_weight, subject.weights)
</pre>

Revision as of 20:39, 6 May 2009

#a dot product is the sum of the products of aligned-elements from two
#same-lengthed arrays
def dot_product(a, b):
   sum = 0
   i = 0
   while i < len(a):
       sum += a[i] * b[i]
       i += 1
   return sum

class Perceptron:

	#percieve based on a list of inputs
	def percieve(self, inputs):
		#if there is not a list of weights, make one of = length to list of inputs
		if len(self.weights) < len(inputs):
			for ip in inputs:
				self.weights.append(self.defualt_weight)
		#get the dot product of those inputs with the bias and the input's weights with the bias's weight
		sum = dot_product(inputs + [self.bias], self.weights + [self.bias_weight])
		return sum

	#learn method compares output of percieve() to the expected output and adjusts weights by plus or minus
	#the learn rate which is incrementally decreased by a factor of 0.99
	def learn(self, inputs, expected):
		if self.learn_rate < 0.001:
			return
		self.learn_rate = self.learn_rate * 0.999
		print "learn rate is %s" % (self.learn_rate)
		train_step = (expected - self.percieve(inputs)) * self.learn_rate
		self.bias_weight += self.bias_weight * train_step
		print "bias weight is: %s" % (self.bias_weight)
		for i in xrange(len(inputs)):
			self.weights[i] += inputs[i] * train_step
			print "input %s weight is: %s" % (i, self.weights[i])

	#this defines how a Perceptron object represents itself in the interpreter
	def __str__(self):
		return "Weights: %s. Threshhold: %s. Learn rate: %s." % (self.weights, self.threshhold, self.learn_rate)
	#same as above
	__repr__ = __str__
	
	#defines initial values for a new Perceptron object
	def __init__(self):
		self.bias = 1.0	
		self.bias_weight = 0.0
		self.defualt_weight = 0.0
		self.weights = []
		self.learn_rate = 1

#trains a perceptron according to data
def train(data, perceptron):
	for case in data:
		perceptron.learn(case[0],case[1])

#define a dataset to try and train a perceptron
data = [[[1.0],2.0],[[2.0],4.0],[[3.0],6.0]]

#do the damned thing
if __name__ == '__main__':
	subject = Perceptron()
	for i in range(100000):
		train(data, subject)
	print "final bias weight: %s, final input weights: %s" % (subject.bias_weight, subject.weights)