Saturday, June 6, 2015

Logistic Regression with Regularization - Machine Learning

My solutions to Exercises for Week 3 : Regularization - Logistic Regression (Coursera.org - Machine Learning Course):

1. Sigmoid Function

function g = sigmoid(z)
%SIGMOID Compute sigmoid functoon
%   J = SIGMOID(z) computes the sigmoid of z.

% You need to return the following variables correctly 
g = zeros(size(z));

% ====================== YOUR CODE HERE ======================
% Instructions: Compute the sigmoid of each value of z (z can be a matrix,
%               vector or scalar).

for i = 1:size(z, 1)
 for j=1:size(z, 2)
  g(i, j) = pinv(1+pinv(e^z(i, j)));
 end
end
% =============================================================

end

2. Compute cost for logistic regression

function [J, grad] = costFunction(theta, X, y)
%COSTFUNCTION Compute cost and gradient for logistic regression
%   J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the
%   parameter for logistic regression and the gradient of the cost
%   w.r.t. to the parameters.

% Initialize some useful values
m = length(y); % number of training examples

% You need to return the following variables correctly 
J = 0;
grad = zeros(size(theta));




% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta.
%               You should set J to the cost.
%               Compute the partial derivatives and set grad to the partial
%               derivatives of the cost w.r.t. each parameter in theta
%
% Note: grad should have the same dimensions as theta
% 
thetaTx = (theta'*X')';
h = sigmoid(thetaTx);  
J = -(1/m)*(sum(y'*log(h)+(1-y)'*log(1-h)));
grad = (1/m)*(((h-y)'*X)');
% =============================================================

end

3. Gradient for logistic regression

function [J, grad] = costFunction(theta, X, y)
%COSTFUNCTION Compute cost and gradient for logistic regression
%   J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the
%   parameter for logistic regression and the gradient of the cost
%   w.r.t. to the parameters.

% Initialize some useful values
m = length(y); % number of training examples

% You need to return the following variables correctly 
J = 0;
grad = zeros(size(theta));




% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta.
%               You should set J to the cost.
%               Compute the partial derivatives and set grad to the partial
%               derivatives of the cost w.r.t. each parameter in theta
%
% Note: grad should have the same dimensions as theta
% 
thetaTx = (theta'*X')';
h = sigmoid(thetaTx);  
J = -(1/m)*(sum(y'*log(h)+(1-y)'*log(1-h)));
grad = (1/m)*(((h-y)'*X)');
% =============================================================

end

4. Predict Function

function p = predict(theta, X)
%PREDICT Predict whether the label is 0 or 1 using learned logistic 
%regression parameters theta
%   p = PREDICT(theta, X) computes the predictions for X using a 
%   threshold at 0.5 (i.e., if sigmoid(theta'*x) >= 0.5, predict 1)

m = size(X, 1); % Number of training examples

% You need to return the following variables correctly
p = zeros(m, 1);

% ====================== YOUR CODE HERE ======================
% Instructions: Complete the following code to make predictions using
%               your learned logistic regression parameters. 
%               You should set p to a vector of 0's and 1's
%

thetaTx = (theta'*X')';
h = sigmoid(thetaTx);

for i = 1:m
 if h(i)>=0.5 p(i) = 1; else p(i) = 0;
 end 
end

% =========================================================================
end

5. Compute cost for regularized LR

function [J, grad] = costFunctionReg(theta, X, y, lambda)
%COSTFUNCTIONREG Compute cost and gradient for logistic regression with regularization
%   J = COSTFUNCTIONREG(theta, X, y, lambda) computes the cost of using
%   theta as the parameter for regularized logistic regression and the
%   gradient of the cost w.r.t. to the parameters. 

% Initialize some useful values
m = length(y); % number of training examples 
% You need to return the following variables correctly 
J = 0;
grad = zeros(size(theta));

% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta.
%               You should set J to the cost.
%               Compute the partial derivatives and set grad to the partial
%               derivatives of the cost w.r.t. each parameter in theta


thetaTx = (theta'*X')';
h = sigmoid(thetaTx);  
leftJ = -(1/m)*(sum(y'*log(h)+(1-y)'*log(1-h)));
 
rightJ = (lambda/(2*m))*sum((theta.^2)(2:end,1));
 
J = leftJ+rightJ;

error = h-y;
grad(1) = (error'*X(:,1))/m ;
for i=2:size(grad, 1)
 grad(i) = (error'*X(:,i) + (lambda*theta(i)))/m;
end

% =============================================================

end

6. Gradient for regularized LR

function [J, grad] = costFunctionReg(theta, X, y, lambda)
%COSTFUNCTIONREG Compute cost and gradient for logistic regression with regularization
%   J = COSTFUNCTIONREG(theta, X, y, lambda) computes the cost of using
%   theta as the parameter for regularized logistic regression and the
%   gradient of the cost w.r.t. to the parameters. 

% Initialize some useful values
m = length(y); % number of training examples 
% You need to return the following variables correctly 
J = 0;
grad = zeros(size(theta));

% ====================== YOUR CODE HERE ======================
% Instructions: Compute the cost of a particular choice of theta.
%               You should set J to the cost.
%               Compute the partial derivatives and set grad to the partial
%               derivatives of the cost w.r.t. each parameter in theta


thetaTx = (theta'*X')';
h = sigmoid(thetaTx);  
leftJ = -(1/m)*(sum(y'*log(h)+(1-y)'*log(1-h)));
 
rightJ = (lambda/(2*m))*sum((theta.^2)(2:end,1));
 
J = leftJ+rightJ;

error = h-y;
grad(1) = (error'*X(:,1))/m ;
for i=2:size(grad, 1)
 grad(i) = (error'*X(:,i) + (lambda*theta(i)))/m;
end

% =============================================================

end

No comments:

Post a Comment