%=========================================================================% % This program estimates a linear regression using W-GME building from % Wu (2009) %=========================================================================% %input1: dependent variable %input2: regressors %input3: z-entropy %input4: v-entropy %input5: the weight for data entropy (the bound etha) %output1: beta %output2: # of iterations %output3: value of objective function %output4: lgrange multipliers (lambdas) %=========================================================================% function beta=gme_w(y,x,z,v,delta) %=========================================================================% %Data %=========================================================================% y=data(:,1); x=data(:,2:14); z=[-0.6 -0.2 0.2 0.6 0.9]; ss=std(y)*6; v=[-ss -ss/2 0 ss/2 ss]; %n-number of observations; k number of regressors [n k]=size(x); %i-dimension of support for z-entropy i=size(z,2); %j-dimension of support for v-entropy j=size(v,2); %V-full support for errors z=ones(k,1)*z; v=repmat(v,n,1); %v=ones(n,1)*v; delta=0.2; %=========================================================================% %Initial Values %=========================================================================% len=1; change=1; lambda=zeros(n,1); increm=zeros(n,1); it=0; %=========================================================================% %Estimation Procedure %=========================================================================% while change>1e-6 it=it+1; %evaluate the prob. p and w of z,v at the value of lambda newz=exp(-(z/(1-delta).*kron(transpose(x)*lambda,ones(1,i)))); sum_newz=newz*ones(i,1); p=newz./repmat(sum_newz,1,i); newv=exp(-lambda*ones(1,j).*v/delta); sum_newv=newv*ones(j,1); w=newv./repmat(sum_newv,1,j); %evaluate Jacobian g=y-x*(z.*p*ones(i,1))-v.*w*ones(j,1); %evaluate Hessian. %the inverse of the Hessian would involve inverting an nxn matrix %which is very expensive in computing. Use the updating formula %(Greene, 4th Edition, pp32) instead, %which requires only the inverse of a kxk matrix %first get the inverse of var-cov matrix of z and v inv_z=diag((sum((p.*(z.^2))')-sum((p.*z)').^2).^(-1))*(1-delta); inv_v=diag((sum((w.*(v.^2))')-sum((w.*v)').^2).^(-1))*delta; %get the inverse of Hessian temp=inv_v*x; inv_H=-inv_v+temp/(inv_z+x'*temp)*temp'; %evaluate step direction increm=inv_H*g; %update lambda lambda=lambda+increm; %calculate the tolerance, using g'inv(H)*g which is scale free len0=len; len=g'*increm; change=abs(len-len0); end %evaluate BETA beta=sum((p.*z)')'; %evaluate objective function vobj=transpose(y)*(lambda)+sum(log(sum(exp(-transpose(z.*kron(transpose(x)*(lambda),ones(1,size(z,2))))))))+sum(log(sum(transpose(exp(-(lambda)*ones(1,j)*v'))))); b=x\y; s2=(y-x*beta)'*(y-x*beta)./(length(y)-length(b)); se=sqrt(diag(inv(x'*x))).*sqrt(s2); %t-stats t=(beta./se); dtt=sum(y-x*beta)^2; mad=median(y-x*beta); tsq=sum((y-mean(y)).^2); [beta se t] %entropy-information stats norment=entropy(p)/entropy(w); infindex=1-norment %Goodness of fit stats Rsq=0.1197*infindex/0.0167 Rsqadj=Rsq-(1-Rsq)*k/(n-k-1) vobj dtt mad %Plot shadow prices of entropy (TVA) plot(lambda) it