@@ -19,71 +19,71 @@ def save_csv(filename, X, y_true, y_pred):
1919
2020class LinearRegression :
2121 def __init__ (self ):
22- self .weights = None
22+ self .theta = None
2323
2424 def fit (self , X , y ):
2525 X_bias = np .c_ [np .ones ((X .shape [0 ], 1 )), X ]
26- self .weights = np .linalg .pinv (X_bias .T @ X_bias ) @ X_bias .T @ y
26+ self .theta = np .linalg .pinv (X_bias .T @ X_bias ) @ X_bias .T @ y
2727
2828 def predict (self , X ):
2929 X_bias = np .c_ [np .ones ((X .shape [0 ], 1 )), X ]
30- return X_bias @ self .weights
30+ return X_bias @ self .theta
3131
3232class RidgeRegression :
33- def __init__ (self , theta = 1.0 ):
34- self .theta = theta
35- self .weights = None
33+ def __init__ (self , lam = 1.0 ):
34+ self .lam = lam
35+ self .theta = None
3636
3737 def fit (self , X , y ):
3838 X_bias = np .c_ [np .ones ((X .shape [0 ], 1 )), X ]
3939 n = X_bias .shape [1 ]
4040 I = np .eye (n )
4141 I [0 , 0 ] = 0
42- self .weights = np .linalg .pinv (X_bias .T @ X_bias + self .theta * I ) @ X_bias .T @ y
42+ self .theta = np .linalg .pinv (X_bias .T @ X_bias + self .lam * I ) @ X_bias .T @ y
4343
4444 def predict (self , X ):
4545 X_bias = np .c_ [np .ones ((X .shape [0 ], 1 )), X ]
46- return X_bias @ self .weights
46+ return X_bias @ self .theta
4747
4848class LassoRegression :
49- def __init__ (self , theta = 1.0 , max_iter = 1000 , tol = 1e-4 ):
50- self .theta = theta
49+ def __init__ (self , lam = 1.0 , max_iter = 1000 , tol = 1e-4 ):
50+ self .lam = lam
5151 self .max_iter = max_iter
5252 self .tol = tol
53- self .weights = None
53+ self .theta = None
5454
5555 def fit (self , X , y ):
5656 X_bias = np .c_ [np .ones ((X .shape [0 ], 1 )), X ]
5757 n_samples , n_features = X_bias .shape
58- self .weights = np .zeros (n_features )
58+ self .theta = np .zeros (n_features )
5959
6060 for _ in range (self .max_iter ):
61- weights_old = self .weights .copy ()
61+ theta_old = self .theta .copy ()
6262 for j in range (n_features ):
63- tmp = X_bias @ self .weights - X_bias [:, j ] * self .weights [j ]
63+ tmp = X_bias @ self .theta - X_bias [:, j ] * self .theta [j ]
6464 rho = np .dot (X_bias [:, j ], y - tmp )
6565 if j == 0 :
66- self .weights [j ] = rho / np .sum (X_bias [:, j ] ** 2 )
66+ self .theta [j ] = rho / np .sum (X_bias [:, j ] ** 2 )
6767 else :
68- if rho < - self .theta / 2 :
69- self .weights [j ] = (rho + self .theta / 2 ) / np .sum (X_bias [:, j ] ** 2 )
70- elif rho > self .theta / 2 :
71- self .weights [j ] = (rho - self .theta / 2 ) / np .sum (X_bias [:, j ] ** 2 )
68+ if rho < - self .lam / 2 :
69+ self .theta [j ] = (rho + self .lam / 2 ) / np .sum (X_bias [:, j ] ** 2 )
70+ elif rho > self .lam / 2 :
71+ self .theta [j ] = (rho - self .lam / 2 ) / np .sum (X_bias [:, j ] ** 2 )
7272 else :
73- self .weights [j ] = 0
74- if np .linalg .norm (self .weights - weights_old , ord = 1 ) < self .tol :
73+ self .theta [j ] = 0
74+ if np .linalg .norm (self .theta - theta_old , ord = 1 ) < self .tol :
7575 break
7676
7777 def predict (self , X ):
7878 X_bias = np .c_ [np .ones ((X .shape [0 ], 1 )), X ]
79- return X_bias @ self .weights
79+ return X_bias @ self .theta
8080
8181class KernelRidgeRegression :
82- def __init__ (self , theta = 1.0 , gamma = 0.1 ):
83- self .theta = theta
82+ def __init__ (self , lam = 1.0 , gamma = 0.1 ):
83+ self .lam = lam
8484 self .gamma = gamma
8585 self .X_train = None
86- self .theta_vec = None
86+ self .lam_vec = None
8787
8888 def _rbf_kernel (self , X1 , X2 ):
8989 dists = np .sum ((X1 [:, np .newaxis ] - X2 [np .newaxis , :]) ** 2 , axis = 2 )
@@ -93,11 +93,11 @@ def fit(self, X, y):
9393 self .X_train = X
9494 K = self ._rbf_kernel (X , X )
9595 n = K .shape [0 ]
96- self .theta_vec = np .linalg .pinv (K + self .theta * np .eye (n )) @ y
96+ self .lam_vec = np .linalg .pinv (K + self .lam * np .eye (n )) @ y
9797
9898 def predict (self , X ):
9999 K = self ._rbf_kernel (X , self .X_train )
100- return K @ self .theta_vec
100+ return K @ self .lam_vec
101101
102102if __name__ == "__main__" :
103103 np .random .seed (42 )
@@ -106,9 +106,9 @@ def predict(self, X):
106106
107107 models = {
108108 "linear" : LinearRegression (),
109- "ridge" : RidgeRegression (theta = 1.0 ),
110- "lasso" : LassoRegression (theta = 0.1 ),
111- "kernel_ridge" : KernelRidgeRegression (theta = 1.0 , gamma = 5.0 )
109+ "ridge" : RidgeRegression (lam = 1.0 ),
110+ "lasso" : LassoRegression (lam = 0.1 ),
111+ "kernel_ridge" : KernelRidgeRegression (lam = 1.0 , gamma = 5.0 )
112112 }
113113
114114 for name , model in models .items ():
0 commit comments