diff --git a/machine-learning-ex2/ex2/costFunctionReg.m b/machine-learning-ex2/ex2/costFunctionReg.m index ddcf0cb..ace082f 100644 --- a/machine-learning-ex2/ex2/costFunctionReg.m +++ b/machine-learning-ex2/ex2/costFunctionReg.m @@ -17,16 +17,9 @@ % Compute the partial derivatives and set grad to the partial % derivatives of the cost w.r.t. each parameter in theta -tempTheta = theta; -tempTheta(1) = 0; - -J = (-1 / m) * sum(y.*log(sigmoid(X * theta)) + (1 - y).*log(1 - sigmoid(X * theta))) + (lambda / (2 * m))*sum(tempTheta.^2); -temp = sigmoid (X * theta); -error = temp - y; -grad = (1 / m) * (X' * error) + (lambda/m)*tempTheta; - - - +[Jt,gt]=costFunction(theta,X,y); +J= Jt + sum((lambda/(2*m))*(theta(2:end).^2)); +grad = gt + [0;(lambda/m)*theta(2:end)]; % ============================================================= end diff --git a/machine-learning-ex2/ex2/predict.m b/machine-learning-ex2/ex2/predict.m index d5d0620..6566ed4 100644 --- a/machine-learning-ex2/ex2/predict.m +++ b/machine-learning-ex2/ex2/predict.m @@ -8,19 +8,12 @@ % You need to return the following variables correctly p = zeros(m, 1); -p = round(sigmoid(X * theta)); % ====================== YOUR CODE HERE ====================== % Instructions: Complete the following code to make predictions using % your learned logistic regression parameters. % You should set p to a vector of 0's and 1's % - - - - - - - +p = sigmoid(X*theta) > 0.5; % =========================================================================