diff --git a/costFunction.m b/costFunction.m index 94336bb..4234ffb 100644 --- a/costFunction.m +++ b/costFunction.m @@ -1,25 +1,15 @@ function [J, grad] = costFunction(theta, X, y) -%COSTFUNCTION Compute cost and gradient for logistic regression +%COSTFUNCTION Computing cost and gradient for logistic regression % J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the % parameter for logistic regression and the gradient of the cost % w.r.t. to the parameters. -% Initialize some useful values +% Initializing some useful values m = length(y); % number of training examples - -% You need to return the following variables correctly + J = 0; grad = zeros(size(theta)); -% ====================== YOUR CODE HERE ====================== -% Instructions: Compute the cost of a particular choice of theta. -% You should set J to the cost. -% Compute the partial derivatives and set grad to the partial -% derivatives of the cost w.r.t. each parameter in theta -% -% Note: grad should have the same dimensions as theta -% - h = sigmoid(X*theta); y1 = -y.*log(h); y0 = -(1-y).*log(1-h); @@ -28,7 +18,4 @@ grad = (X'*(h-y))/m; - -% ============================================================= - end