Pattern Recognition · Uncategorized

Rosenblatt’s Perceptron on Iris Data using Python

Perceptron Class

from math import exp
import numpy

class Perceptron:
    eta = 0.01
    epochs = 100
    b = 0.001
    w = [-b, 0, 0]
    ## constant for feature index in dataset
    fi = 0
    fj = 1
    ## constant for classes to test only ignore rest
    ci = 0
    cj = 1
    def __init__(self, eta, epochs):
        self.eta = eta
        self.epochs = epochs
        self.b = 0.001
        ## INITIAL WEGHTS
        self.w = [self.b, 0, 0]
        
    def signum(self, V):
        return 1 / 1 + exp(V)
    
    def activation(self, PHI):
        if PHI > 0:
            return +1
        else:
            return -1
    
    def train(self, data, target):
        for epo_i in range(0, self.epochs):
            counter = 0
            ee = 0
            for T in data:
                ## SKIPS CLASSES TO LIMIT PROBLEM TO ONLY TWO CLASSES ##
                if target != self.ci and target != self.cj:
                    counter = counter + 1
                    continue
                ## END SKIP ##
                X = [1 ,T[self.fj], T[self.fj]] ## Inputs only two features
                #print 'X = ', X
                #print 'W = ', W
                V = numpy.inner(self.w, X)
                #print 'V = ', V
                Y = self.signum(V)
                #print 'Y = ', Y
                Y = self.activation(Y)
                d = target
                #print 'Y = ', Y
                #print 'd = ', target
                #print 'W(n) = ', self.w
                self.w = self.w + self.eta * (d - Y) * numpy.array(X)
                ##print 'W(n+1) = ', self.w
                counter = counter + 1
                ee = ee + pow(Y - d, 2)
            print ee / len(data)
        
    def test(self, data, target):
        print 'Testing:'
        correct = 0
        wrong = 0
        counter = 0
        for T in data:
            ## SKIPS CLASSES TO LIMIT PROBLEM TO ONLY TWO CLASSES ##
            if target != self.ci and target != self.cj:
                counter = counter + 1
                continue
            ## END SKIP ##
            X = [1 ,T[self.fj], T[self.fj]] ## Inputs only two features
            #print 'X = ', X
            #print 'W = ', W
            V = numpy.inner(self.w, X)
            #print 'V = ', V
            Y = self.signum(V)
            #print 'Y = ', Y
            Y = self.activation(Y)
            d = target
            #print 'Y = ', Y
            #print 'd = ', target
            if Y == 1 and target == self.ci:
                correct = correct + 1
            else:
                wrong = wrong + 1
            counter = counter + 1
        print 'correct=', correct
        print 'wrong=', wrong

Perceptron Test

from sklearn.datasets import load_iris
from Perceptron import Perceptron

iris = load_iris()

## Constants
b = 0.001 ## Bias
ETA = 0.03
EPOCHS = 100
NUM_DATASET = 150
NUM_PER_CLASS = 50
NUM_TRAINING = 20
NUM_TESTING = 30

l0 = iris.data[0:NUM_TRAINING]
l1 = iris.data[NUM_PER_CLASS:NUM_PER_CLASS+NUM_TRAINING]

objPerc = Perceptron(ETA, EPOCHS)

objPerc.train(l0, 0)
objPerc.train(l1, 1)

t0 = iris.data[NUM_TRAINING:NUM_PER_CLASS]
t1 = iris.data[NUM_PER_CLASS+NUM_TRAINING:NUM_PER_CLASS+NUM_PER_CLASS]

objPerc.test(t0, 0)
objPerc.test(t1, 1)

Reference:

  1. http://sebastianraschka.com/Articles/2015_singlelayer_neurons.html#frank-rosenblatts-perceptron
Advertisements

Leave a Reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out / Change )

Twitter picture

You are commenting using your Twitter account. Log Out / Change )

Facebook photo

You are commenting using your Facebook account. Log Out / Change )

Google+ photo

You are commenting using your Google+ account. Log Out / Change )

Connecting to %s