From f9aeb2c15871b6dd501614689b9d85c1af3c3c37 Mon Sep 17 00:00:00 2001 From: Apoorva Raj Bhadani Date: Wed, 22 May 2019 09:37:54 +0530 Subject: [PATCH 1/2] Better predict function --- machine-learning-ex2/ex2/predict.m | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/machine-learning-ex2/ex2/predict.m b/machine-learning-ex2/ex2/predict.m index d5d0620..6566ed4 100644 --- a/machine-learning-ex2/ex2/predict.m +++ b/machine-learning-ex2/ex2/predict.m @@ -8,19 +8,12 @@ % You need to return the following variables correctly p = zeros(m, 1); -p = round(sigmoid(X * theta)); % ====================== YOUR CODE HERE ====================== % Instructions: Complete the following code to make predictions using % your learned logistic regression parameters. % You should set p to a vector of 0's and 1's % - - - - - - - +p = sigmoid(X*theta) > 0.5; % ========================================================================= From ea430eeae8212ba6f105e45a842568546722590a Mon Sep 17 00:00:00 2001 From: Apoorva Raj Bhadani Date: Wed, 22 May 2019 10:07:41 +0530 Subject: [PATCH 2/2] Better code for regularized cost and gradient --- machine-learning-ex2/ex2/costFunctionReg.m | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/machine-learning-ex2/ex2/costFunctionReg.m b/machine-learning-ex2/ex2/costFunctionReg.m index ddcf0cb..ace082f 100644 --- a/machine-learning-ex2/ex2/costFunctionReg.m +++ b/machine-learning-ex2/ex2/costFunctionReg.m @@ -17,16 +17,9 @@ % Compute the partial derivatives and set grad to the partial % derivatives of the cost w.r.t. each parameter in theta -tempTheta = theta; -tempTheta(1) = 0; - -J = (-1 / m) * sum(y.*log(sigmoid(X * theta)) + (1 - y).*log(1 - sigmoid(X * theta))) + (lambda / (2 * m))*sum(tempTheta.^2); -temp = sigmoid (X * theta); -error = temp - y; -grad = (1 / m) * (X' * error) + (lambda/m)*tempTheta; - - - +[Jt,gt]=costFunction(theta,X,y); +J= Jt + sum((lambda/(2*m))*(theta(2:end).^2)); +grad = gt + [0;(lambda/m)*theta(2:end)]; % ============================================================= end