diff --git a/mlclass-ex1/gradientDescent.m b/mlclass-ex1/gradientDescent.m index 56bf8f2..25b4cf3 100644 --- a/mlclass-ex1/gradientDescent.m +++ b/mlclass-ex1/gradientDescent.m @@ -18,19 +18,10 @@ % - x = X(:,2); - h = theta(1) + (theta(2)*x); - - theta_zero = theta(1) - alpha * (1/m) * sum(h-y); - theta_one = theta(2) - alpha * (1/m) * sum((h - y) .* x); - - theta = [theta_zero; theta_one]; - % ============================================================ - - % Save the cost J in every iteration - J_history(iter) = computeCost(X, y, theta); - % disp(J_history(iter)); + %this is also possible in one line + theta = theta - alpha * (1/m) * sum((X * theta - y) .* X)'; + end disp(min(J_history)); -end \ No newline at end of file +end