Skip to content

Commit

Permalink
Minor fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
jtlowery committed Mar 12, 2017
1 parent 3bb3024 commit 0314610
Show file tree
Hide file tree
Showing 10 changed files with 44 additions and 44 deletions.
18 changes: 9 additions & 9 deletions ex2/costFunction.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from numpy import log
import numpy as np
from sigmoid import sigmoid


Expand All @@ -10,13 +10,13 @@ def costFunction(theta, X, y):
# Initialize some useful values
m = y.size # number of training examples

# ====================== YOUR CODE HERE ======================
# Instructions: Compute the cost of a particular choice of theta.
# You should set J to the cost.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
#
# Note: grad should have the same dimensions as theta
# =============================================================

# ====================== YOUR CODE HERE ======================
# Instructions: Compute the cost of a particular choice of theta.
# You should set J to the cost.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
#
# Note: grad should have the same dimensions as theta
# =============================================================
return J
13 changes: 7 additions & 6 deletions ex2/costFunctionReg.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import numpy as np
from costFunction import costFunction


Expand All @@ -11,11 +12,11 @@ def costFunctionReg(theta, X, y, Lambda):
# Initialize some useful values
m = len(y) # number of training examples

# ====================== YOUR CODE HERE ======================
# Instructions: Compute the cost of a particular choice of theta.
# You should set J to the cost.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
# =============================================================
# ====================== YOUR CODE HERE ======================
# Instructions: Compute the cost of a particular choice of theta.
# You should set J to the cost.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
# =============================================================

return J
6 changes: 4 additions & 2 deletions ex2/ex2.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
from sigmoid import sigmoid
from predict import predict
from show import show
from ml import plotData, plotDecisionBoundary


## Machine Learning Online Class - Exercise 2: Logistic Regression
#
Expand All @@ -20,7 +22,7 @@
# This file contains code that helps you get started on the second part
# of the exercise which covers regularization with logistic regression.
#
# You will need to complete the following functions in this exericse:
# You will need to complete the following functions in this exercise:
#
# sigmoid.py
# costFunction.py
Expand All @@ -37,7 +39,7 @@
# or any other files other than those mentioned above.
#

from ml import plotData, plotDecisionBoundary

# Load Data
# The first two columns contains the exam scores and the third column
# contains the label.
Expand Down
1 change: 0 additions & 1 deletion ex2/ex2_reg.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def plotBoundary(theta, X, y):
show()



# Initialization

# Load Data
Expand Down
12 changes: 6 additions & 6 deletions ex2/gradientFunction.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from sigmoid import sigmoid
from numpy import squeeze, asarray


def gradientFunction(theta, X, y):
Expand All @@ -12,10 +12,10 @@ def gradientFunction(theta, X, y):

m = len(y) # number of training examples

# ====================== YOUR CODE HERE ======================
# Instructions: Compute the gradient of a particular choice of theta.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
# =============================================================
# ====================== YOUR CODE HERE ======================
# Instructions: Compute the gradient of a particular choice of theta.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
# =============================================================

return grad
13 changes: 6 additions & 7 deletions ex2/gradientFunctionReg.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from numpy import asfortranarray, squeeze, asarray

import numpy as np
from gradientFunction import gradientFunction


Expand All @@ -12,10 +11,10 @@ def gradientFunctionReg(theta, X, y, Lambda):
"""
m = len(y) # number of training examples

# ====================== YOUR CODE HERE ======================
# Instructions: Compute the gradient of a particular choice of theta.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
# =============================================================
# ====================== YOUR CODE HERE ======================
# Instructions: Compute the gradient of a particular choice of theta.
# Compute the partial derivatives and set grad to the partial
# derivatives of the cost w.r.t. each parameter in theta
# =============================================================

return grad
2 changes: 1 addition & 1 deletion ex2/ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from mpl_toolkits.mplot3d import axes3d


def plotData(X,y):
def plotData(X, y):
pos = X[np.where(y == 1, True, False).flatten()]
neg = X[np.where(y == 0, True, False).flatten()]
plt.plot(pos[:, 0], pos[:, 1], '+', markersize=7, markeredgecolor='black', markeredgewidth=2)
Expand Down
15 changes: 7 additions & 8 deletions ex2/predict.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
from numpy import round

import numpy as np
from sigmoid import sigmoid


Expand All @@ -9,11 +8,11 @@ def predict(theta, X):
(i.e., if sigmoid(theta'*x) >= 0.5, predict 1)
"""

# ====================== YOUR CODE HERE ======================
# Instructions: Complete the following code to make predictions using
# your learned logistic regression parameters.
# You should set p to a vector of 0's and 1's
#
# =========================================================================
# ====================== YOUR CODE HERE ======================
# Instructions: Complete the following code to make predictions using
# your learned logistic regression parameters.
# You should set p to a vector of 0's and 1's
#
# =========================================================================

return p
2 changes: 1 addition & 1 deletion ex2/sigmoid.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from numpy import e
import numpy as np


def sigmoid(z):
Expand Down
6 changes: 3 additions & 3 deletions ex2/submit.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@

def output(part_id):
X = np.column_stack((np.ones(20),
(np.exp(1) * np.sin(np.linspace(1, 20, 20))),
(np.exp(0.5) * np.cos(np.linspace(1, 20, 20)))))
(np.exp(1) * np.sin(np.linspace(1, 20, 20))),
(np.exp(0.5) * np.cos(np.linspace(1, 20, 20)))))
Y = np.sin(X[:, 0] + X[:, 1]) > 0

fname = srcs[part_id - 1].rsplit('.', 1)[0]
mod = __import__(fname, fromlist=[fname], level=1)
mod = __import__(fname, fromlist=[fname], level=0)
func = getattr(mod, fname)

if part_id == 1:
Expand Down

0 comments on commit 0314610

Please sign in to comment.