% Single Layer Perceptron Neural Network - Binary Classification Example % Author: Shujaat Khan (shujaat123@gmail.com clc clear all close all N = 1000; % for skewed distribution (unequal variance with both attributes) if(1), Av = [1 10]; % variance in classA samples Am = [10,0]; % mean of classA samples Bv = [5 2]; % variance in classB samples Bm = [0,10]; % mean of classB samples S_classA = repmat(Av.^(-0.5),N,1).*randn(N,2) + Am; % N number of samples have 2-attributes S_classB = repmat(Bv.^(-0.5),N,1).*randn(N,2) + Bm; % N number of samples have 2-attributes end; % for circular distribution (equal variance with both attributes) if(0), Av = 10; % variance in classA samples Am = [10,0]; % mean of classA samples Bv = 10; % variance in classB samples Bm = [0,10]; % mean of classB samples S_classA = sqrt(Av)*randn(N,2) + Am; % N number of samples have 2-attributes S_classB = sqrt(Bv)*randn(N,2) + Bm; % N number of samples have 2-attributes end; class_data = [S_classA; S_classB]; class_label = [-1*ones(N,1);1*ones(N,1)]; % -1 for classA and +1 for classB figure(1) scatter(S_classA(:,1),S_classA(:,2),'or') hold on scatter(S_classB(:,1),S_classB(:,2),'*k') grid minor title('Binary Classification') xlabel('attribute # 1') ylabel('attribute # 2') % legend('Class A','Class B') % Objective (sample classification) % -x+y < 0 -- classA % -x+y > 0 -- classB meu = 1e-3; % learning-rate (step-size) epochs = 5000; % number of times all samples processed through classifier iterations = size(class_data,1); % total number of samples % define classification model Y = tansig(X*W) X = [class_data, ones(size(class_data,1),1)]; % input and bias % X = [class_data]; W = randn(size(X,2),1); Y = tansig(X*W); J(1) = mse(Y-class_label); % initial error % (classification boundary equation) % W(2)*X(:,2) + W(1)*X(:,1) + W(1)*X(:,1) = 0 % W(2)*X(2) = - W(1)*X(:,1) - W(3)*X(:,3) % since X(:,3) = 1 therefore W(3)*X(:,3) = W(3) % X(2) = - W(1)/W(2) * X(1) - W(3)/W(2) % y = mx +b ~~~ y = X(:,2)= - W(1)/W(2) *X(:,1) + (-W(3)/W(2)) % initial classification boundary plot(X(:,1), -W(1)/W(2)*X(:,1) - W(3)/W(2),'b') for epoch = 1:epochs for i = 1:iterations v = X(i,:)*W; pred_class = tansig(v); % y pred_error = class_label(i,1)-pred_class; % e % dJ/dW = (dJ/de)(de/dy)(dy/dv)(dv/dw) --- chain rule % (dJ/de)(de/dy) = -1 % (dy/dv) = dtansig(v,pred_class) % (dv/dw) = X(i,:) W = W + meu* dtansig(v,pred_class) *X(i,:)'*pred_error; end Y = tansig(X*W); J = [J;mse(Y-class_label)]; % plot(X(:,1), -W(1)/W(2)*X(:,1) - W(3)/W(2),'b') % pause(0.25) end plot(X(:,1), -W(1)/W(2)*X(:,1) - W(3)/W(2),'m') legend('Class A','Class B','initial-classification-boundary', ... 'final-classification-boundary','location','best') figure(2), semilogy(0:epochs,J), ylabel('Mean-Squared-Error'), ... xlabel('Epochs'), title('Learning Curve'); figure(3) x= -10:0.01:10; figure(3),plot(x,tansig(x)) hold on plot(x,dtansig(x,(tansig(x)))) legend('tansig \Phi(x)','d\Phi(x)/dx') xlabel('sitimulus') ylabel('response') title('Sigmoid/Neural-response function')