function [thetahat,yhat,Phat] = rlsff(YU,nk,lambda) % % The function [thetahat,yhat] = rlsff(YU,nk,lambda) implements % the recursive least squares algorithm with forgetting factor, % lambda, once the supposed model order is defined, as nk. % The matrix YU contains the output vector followed by the column % vectors of the inputs, if present. The function generates the matrix % thetahat with the estimates of the model parameters, together with the % vector of the predicted (reconstructed) output, yhat. % rho = 100; % Variance of the initial estimate of thetahat(0) % P(0) = rho * I; if(nargin < 3), lambda = 1; end; % classical RLS (no ff) [N,nio] = size(YU); % N samples and one column at least! % otherwise, 1 + # of inputs % nio >= 1 thetahat = zeros(nk*nio,N); % thetahat(0) = 0 yhat = zeros(N,1); % MISO model, 1 output Phat = zeros(nk*nio,N); % Covariance matrix Pt = rho * eye(nk*nio); for t = nk+1:N, if(nio == 1), psit = YU(t-1:-1:t-nk,1)'; end; % only 1 output % AR model if(nio == 2), psit = [YU(t-1:-1:t-nk,1)' YU(t-1:-1:t-nk,2)']'; end; % ARX SISO model if(nio > 2), for indxi = 1:nio, psit = [psit' YU(t-1:-1:t-nk,indxi)']'; psit = [YU(t-1:-1:t-nk,1)' psit']'; % ARX MISO model end; end; yhat(t,1) = (psit')*thetahat(:,t-1); epst = YU(t,1) - yhat(t,1); Pt = (1/lambda)*(Pt - (Pt*psit*(psit')*Pt)/(lambda + (psit')*Pt*psit)); Kt = Pt * psit; thetahat(:,t) = thetahat(:,t-1) + Kt * epst; Phat(:,t) = diag(Pt); end return