%% Script file to manually compute backpropagation- That is gradient %% descent, where the gradient is computed via backprop. % Input patterns and output targets: To be consistent, use column vectors X=linspace(-7,7,40); y1=sigma(0.3*X-0.3)+1.2; y2=sigma(-1.3*X+0.5)+0.5; T=y1+y2; [dimX,numpts]=size(X); [dimT,numpts]=size(T); lr=0.2; %learning rate or alpha (some use eta) dimHidden=2; %Number of nodes in the hidden layer % Initialize the neural network parameters: W1=randn(dimHidden,dimX); b1=zeros(dimHidden,1); W2=randn(dimT,dimHidden); b2=zeros(dimT,1); % Loop through the data lots of times maxiters=70*numpts; lrinit=0.3; lrfin=0.05; for j=1:maxiters lr=lrinit*(lrfin/lrinit)^((j-1)/maxiters); k=ceil(rand*numpts); x=X(:,k); t=T(:,k); P=W1*x+b1; S=sigma(P); y=W2*S+b2; Delta2=t-y; Delta1=(W2'*Delta2).*S.*(ones(size(S))-S); DW1=Delta1*x'; DW2=Delta2*S'; Db1=Delta1; Db2=Delta2; W1=W1+lr*DW1; W2=W2+lr*DW2; b1=b1+lr*Db1; b2=b2+lr*Db2; Z=W2*sigma(W1*X+repmat(b1,1,numpts))+repmat(b2,1,numpts); if mod(j,20)==0 xx=linspace(-7,7); gg=W2*sigma(W1*xx+repmat(b1,1,100))+b2; plot(xx,gg,X,T,'r*') axis([-8 8 2.2 3]) pause(0.1); end end