_NEURAL NETWORKS AND CHARACTER RECOGNITION_ by Ken Karnofsky [LISTING ONE] R = 35; % number of inputs S1 = 10; % number of neurons in layer 1 S2 = 26; % number of neurons in layer 2 [W1,B1] = nwlog(S1,R); % initial conditions for layer 1 W2 = rands(S2,S1)*0.01; % initial conditions [LISTING TWO] P = alphabet; % input matrix - 26x35 T = targets; % target matrix - 26x26 disp_freq = 20; % define training parameters max_epoch = 5000; % " err_goal = 0.1; % " lr = 0.01; % " lr_inc = 1.05; % " lr_dec = 0.7; % " momentum = 0.95; % " err_ratio = 1.04; % " % All training parameters are packed into the vector TP TP = [disp_freq, max_epoch, err_goal, lr, lr_inc, lr_dec, momentum, err_ratio]; % The function trainbpx trains the network using the specified initial % conditions and transfer functions. It returns the trained network % weights and biases, the number of epochs (iterations) required, and a % record of errors and learning rate at each epoch. [W1,B1,W2,B2,epochs,TR] = trainbpx(W1,B1,UlogsigU,W2,B2,UlogsigU,P,T,TP); [LISTING THREE] max_epoch = 300; % define training parameters err_goal = 0.6; % " TP = [disp_freq ... AS ABOVE ... err_ratio]; for pass = 1:10, P = [alphabet, alphabet, ... % clean training vectors (alphabet + rand(R,Q)*0.1), ... % and with varying (alphabet + rand(R,Q)*0.2)]; % amounts of noise T = [targets, targets, targets, targets]; % 4 target vectors batched together [W1,B1,W2,B2,TE,TR] = ... % training procedure trainbpx(W1,B1,UlogsigU,W2,B2,UlogsigU,P,T,TP); end [LISTING FOUR] % Define an input example with random noise added. noisyM = alphabet(:,13) + randn(35,1)*0.2; % Output A2 is a function of the input vector, the transfer function for each % layer, and the trained weights and biases. A2 = logsig(W2*logsig(W1*noisyM,B1),B2); % Because A2 may be noisy (not exactly 1 or 0); the competitive function % picks the element closest to one. The FIND function returns its index. answer = find(compet(A2) == 1);