nllsw.m (1751B)
1 function [u,beta,se,tstat]=nllsw(beta0,X,mname,verbose,maxit,param,weights); 2 3 % nllsw.m 10/16/97 from nlls.m Weighted NLLS -- takes weights and 4 % computes as if each observation appeared in the data weights(i) times. 5 % 6 % A generic NLLS routine, to be adapted to whichever problem is at hand 7 % 8 % beta0=initial guess 9 % X = [y x] 10 % 11 % Model: y=m(beta,x)+u 12 % 13 % The function uufct calls m(beta,x) to compute u'*u. 14 % mname is a user-defined function (e.g. 'mfct') giving 15 % m(beta,x). 16 17 options=foptions; 18 if exist('verbose')==1; options(1)=verbose; end; 19 if exist('weights')~=1; disp 'error -- no weights!'; abc; end; 20 if exist('param')~=1; param=[]; end; 21 if exist('maxit')==1; options(14)=maxit; end; 22 23 beta=fmins('uufctw',beta0,options,[],X,mname,param,weights); 24 25 % Now calculate standard errors using the Gradients 26 % Basically, make sure that y and m (and therefore u) are weighted. 27 [N col]=size(X); 28 y=X(:,1); 29 x=X(:,2:col); 30 k=length(beta); 31 M=gradp(mname,beta,[],x,param); 32 % To adjust for weights 33 M=M.*kron(ones(1,k),sqrt(weights)); 34 35 if isempty(param); 36 eval(['u=y-' mname '(beta,x);']); 37 else; 38 eval(['u=y-' mname '(beta,x,param);']); 39 end; 40 41 % Weights 42 ux=u.*sqrt(weights); 43 uu=ux'*ux; 44 45 sigma2=uu/(N-k); 46 mminv=inv(M'*M); 47 vcv=sigma2*mminv; 48 se=sqrt(diag(vcv)); 49 tstat=beta./se; 50 51 % White robust vcv 52 f=mult(M,ux); 53 Sw=f'*f/(N-k); 54 robvcv=N*mminv*Sw*mminv; 55 robse=sqrt(diag(robvcv)); 56 robt =beta./robse; 57 58 yx=y.*sqrt(weights); 59 ybar=1/N*(ones(1,N)*yx); 60 rsq=1-(uu)/(yx'*yx - N*ybar^2); 61 62 fprintf('R-Squared: %7.4f\n',rsq); 63 fprintf('uu*100: %7.4f\n',u'*u*100); 64 disp ' Beta S.E. t-stat WhiteSE WhiteT'; 65 disp '------------------------------------------------------------'; 66 cshow(' ',[beta se tstat robse robt],'%12.6f'); 67 68