%% Housing prices example using Stochastic Gradient Descent. % Compare to HousingPrices.m (which is regular grad descent with fixed step size) % and HousingPricesAdaptiveStep, which uses backtracking line search. close all clear clc X=[1100 199000 1400 245000 1425 319000 1550 240000 1600 312000 1700 279000 1700 310000 1875 308000 2350 405000 2450 324000]; %% Step 0: Preprocess the data plot(X(:,1),X(:,2),'k^-') % Rescale the data mx=mean(X,1); sx=std(X,1); % Keep these around in case you need to rescale Xs=(X-mx)./sx; % new data. figure(1) plot(Xs(:,1),Xs(:,2),'k^-') % We'll break up the data for Step 2: x=Xs(:,1); t=Xs(:,2); %Targets %% Step 1: Randomly initialize the training parameters and other constants m=randn; b=randn; MaxIters=150; alpha = 0.01; tol=1e-6; % How close to zero should grad f be to stop %% Main Loop: Steps 2-4 for i=1:MaxIters % Step 2: Calculate the error at each point y=m*x+b; ErrVec=t-y; Error(i)=sum(ErrVec.*ErrVec); % Step 3: Stochastic Gradient Descent: Only 1 data point r=randi([1,length(x)]); temp1=ErrVec(r)*(-x(r)); temp2=ErrVec(r)*(-1); Em=2*temp1; Eb=2*temp2; % Step 4: Adjust the parameters using gradient descent. m=m-alpha*Em; b=b-alpha*Eb; % Stop if the gradient is very close to zero fprintf('Gradient is %f\n',Em^2+Eb^2); if (Em^2+Eb^2)<=tol fprintf('Solution found in %d steps\n',i) break end end %% Closing: We'll visualize our results: figure(2) plot(Error) figure(1) hold on t=linspace(min(x),max(x)); z=m*t+b; plot(t,z,'r-'); hold off