% NLS ESTIMATION OF COBB-DOUGLAS PRODUCTION FUNCTION; clc; % Parameters gradient_method = 1; newton = 1; steepest_descent = 0; quadratic_hillclimbing = 0; hypotest = 0; gaussnewton = 0; artificial = 1; % Load Data & Define Variables M = importdata('matlabex14data.txt',' ',1); data = M.data; L = data(:,1); K = data(:,2); Q = data(:,3); n = size(data,1); % Simulate Artifical Data if artificial == 1; beta = [1;0.5;0.5]; sig = 0.1; Q = beta(1)*(L.^beta(2)).*(K.^beta(3)) + sig*randn(n,1); end; %Initial Values b = [1;0.5;0.5]; b0 = b; k = size(b,1); % *********************** % Gauss-Newton Estimation % *********************** if gaussnewton == 1 % Algorithm loops = 1; g = zeros(n,k); disp(['GAUSS-NEWTON ALGORITHM']); disp([' loops b1 b2 b3 ']); disp([loops b(1,1) b(2,1) b(3,1)]); cc = 1; while cc > 1e-8 h = b(1,1).*(L.^b(2,1)).*(K.^b(3,1)); g(:,1) = h./b(1,1); g(:,2) = log(L).*h; g(:,3) = log(K).*h; resid = Q - h; oldb = b; b = b + inv(g'*g)*(g'*resid); cc = (b - oldb)'*(b - oldb); loops = loops + 1; disp([loops b(1,1) b(2,1) b(3,1)]); end; end; % ********************************* % Restricted Gauss-Newton Algorithm % ********************************* if hypotest == 1; rb = [1; 0.5]; loops = 1; rg = zeros(n,k-1); disp(' '); disp(['RESTRICTED GAUSS-NEWTON ALGORITHM']); disp([' loops b1 b2']); disp([loops rb(1,1) rb(2,1)]); cc = 1; while cc>1e-6 h = rb(1,1)*(L.^rb(2,1)).*(K.^(1-rb(2,1))); rg(:,1) = h/rb(1,1); rg(:,2) = (log(L)-log(K)).*h; rresid = Q - h; oldrb = rb; rb = rb + inv(rg'*rg)*(rg'*rresid); cc = (rb - oldrb)'*(rb - oldrb); loops = loops + 1; disp([loops rb(1,1) rb(2,1)]); end % ****************** % Hypothesis Testing % ****************** % Asymptotically Valid F Test sb = resid'*resid; srb = rresid'*rresid; F = (srb - sb)/(sb/(n-3)); disp(' '); disp(['Asymptotically Valid F Statistic = ' num2str(F')]); disp(' '); % Wald Test R = [0 1 1]; q = 1; s2 = sb/n; V = s2*inv(g'*g); W = (R*b - q)'*inv(R*V*R')*(R*b - q); disp(['Wald Statistic = ' num2str(W')]); disp(' '); % Likelihood Ratio Test LR = n*(log(srb)-log(sb)); disp(['Likelihood Ratio Statistic = ' num2str(LR')]); disp(' '); end; % ************************** % Newton's Method Estimation % ************************** if gradient_method == 1; % Initial Values b = b0; % Algorithm loops = 1; g = zeros(k,1); Hs = zeros(k,k); disp(' '); disp(['NEWTON-RAPHSON ALGORITHM']); disp([' loops b1 b2 b3 ']); disp([loops b(1,1) b(2,1) b(3,1)]); cc = 1; while cc>1e-8 h = b(1,1)*(L.^b(2,1)).*(K.^b(3,1)); resid = Q - h; g(1,1) = -2*sum(resid.*h/b(1,1)); g(2,1) = -2*sum(resid.*(log(L).*h)); g(3,1) = -2*sum(resid.*(log(K).*h)); Hs(1,1) = 2*sum((h/b(1,1)).^2); Hs(1,2) = -2*sum(resid.*log(L).*(h/b(1,1)) - (h.^2).*log(L)/b(1,1)); Hs(1,3) = -2*sum(resid.*log(K).*(h/b(1,1)) - (h.^2).*log(K)/b(1,1)); Hs(2,1) = Hs(1,2); Hs(2,2) = -2*sum(resid.*(log(L).^2).*h - (h.^2).*(log(L).^2)); Hs(2,3) = -2*sum(resid.*log(L).*log(K).*h - (h.^2).*log(L).*log(K)); Hs(3,1) = Hs(1,3); Hs(3,2) = Hs(2,3); Hs(3,3) = -2*sum(resid.*(log(K).^2).*h - (h.^2).*(log(K).^2)); oldb = b; if newton == 1; b = b - inv(Hs)*g; end; if steepest_descent == 1; lambda = g'*g/(g'*Hs*g); %lambda = 0.1; b = b - lambda*g; end; if quadratic_hillclimbing == 1; alpha = 0.1; b = b - inv(Hs+alpha*eye(k))*g; end; cc = (b - oldb)'*(b -oldb); loops = loops + 1; disp([loops b(1,1) b(2,1) b(3,1)]); end end; % ************* % Graph Surface % ************* b2 = (0.2:0.02:1)'; b3 = (0.2:0.02:1)'; m = (1-0.2)/0.02 + 1; ssresid = zeros(m,m); for ii = (1:1:m) for jj = (1:1:m) resid = Q - b(1,1)*(L.^b2(ii,1)).*(K.^b3(jj,1)); ssresid(ii,jj) = resid'*resid; end end surf(b2',b3,ssresid); title('Nonlinear Minimization of the Sum of Squared Residuals'); xlabel('b2'); ylabel('b3'); zlabel('Sum of Squared Residuals');