diff --git a/mlclass-ex1/gradientDescent.m b/mlclass-ex1/gradientDescent.m index 56bf8f2..7b78865 100644 --- a/mlclass-ex1/gradientDescent.m +++ b/mlclass-ex1/gradientDescent.m @@ -17,14 +17,8 @@ % of the cost function (computeCost) and gradient here. % +theta = theta - alpha * (1/m) * sum((X*theta-y).*X)' - x = X(:,2); - h = theta(1) + (theta(2)*x); - - theta_zero = theta(1) - alpha * (1/m) * sum(h-y); - theta_one = theta(2) - alpha * (1/m) * sum((h - y) .* x); - - theta = [theta_zero; theta_one]; % ============================================================ % Save the cost J in every iteration @@ -33,4 +27,4 @@ end disp(min(J_history)); -end \ No newline at end of file +end