时间:2021-05-22
公共的抽象基类
import numpy as npfrom abc import ABCMeta, abstractmethodclass LinearModel(metaclass=ABCMeta): """ Abstract base class of Linear Model. """ def __init__(self): # Before fit or predict, please transform samples' mean to 0, var to 1. self.scaler = StandardScaler() @abstractmethod def fit(self, X, y): """fit func""" def predict(self, X): # before predict, you must run fit func. if not hasattr(self, 'coef_'): raise Exception('Please run `fit` before predict') X = self.scaler.transform(X) X = np.c_[np.ones(X.shape[0]), X] # `x @ y` == `np.dot(x, y)` return X @ self.coef_Linear Regression
class LinearRegression(LinearModel): """ Linear Regression. """ def __init__(self): super().__init__() def fit(self, X, y): """ :param X_: shape = (n_samples + 1, n_features) :param y: shape = (n_samples]) :return: self """ self.scaler.fit(X) X = self.scaler.transform(X) X = np.c_[np.ones(X.shape[0]), X] self.coef_ = np.linalg.inv(X.T @ X) @ X.T @ y return selfLasso
class Lasso(LinearModel): """ Lasso Regression, training by Coordinate Descent. cost = ||X @ coef_||^2 + alpha * ||coef_||_1 """ def __init__(self, alpha=1.0, n_iter=1000, e=0.1): self.alpha = alpha self.n_iter = n_iter self.e = e super().__init__() def fit(self, X, y): self.scaler.fit(X) X = self.scaler.transform(X) X = np.c_[np.ones(X.shape[0]), X] self.coef_ = np.zeros(X.shape[1]) for _ in range(self.n_iter): z = np.sum(X * X, axis=0) tmp = np.zeros(X.shape[1]) for k in range(X.shape[1]): wk = self.coef_[k] self.coef_[k] = 0 p_k = X[:, k] @ (y - X @ self.coef_) if p_k < -self.alpha / 2: w_k = (p_k + self.alpha / 2) / z[k] elif p_k > self.alpha / 2: w_k = (p_k - self.alpha / 2) / z[k] else: w_k = 0 tmp[k] = w_k self.coef_[k] = wk if np.linalg.norm(self.coef_ - tmp) < self.e: break self.coef_ = tmp return selfRidge
class Ridge(LinearModel): """ Ridge Regression. """ def __init__(self, alpha=1.0): self.alpha = alpha super().__init__() def fit(self, X, y): """ :param X_: shape = (n_samples + 1, n_features) :param y: shape = (n_samples]) :return: self """ self.scaler.fit(X) X = self.scaler.transform(X) X = np.c_[np.ones(X.shape[0]), X] self.coef_ = np.linalg.inv( X.T @ X + self.alpha * np.eye(X.shape[1])) @ X.T @ y return self测试代码
import matplotlib.pyplot as pltimport numpy as npdef gen_reg_data(): X = np.arange(0, 45, 0.1) X = X + np.random.random(size=X.shape[0]) * 20 y = 2 * X + np.random.random(size=X.shape[0]) * 20 + 10 return X, ydef test_linear_regression(): clf = LinearRegression() X, y = gen_reg_data() clf.fit(X, y) plt.plot(X, y, '.') X_axis = np.arange(-5, 75, 0.1) plt.plot(X_axis, clf.predict(X_axis)) plt.title("Linear Regression") plt.show()def test_lasso(): clf = Lasso() X, y = gen_reg_data() clf.fit(X, y) plt.plot(X, y, '.') X_axis = np.arange(-5, 75, 0.1) plt.plot(X_axis, clf.predict(X_axis)) plt.title("Lasso") plt.show()def test_ridge(): clf = Ridge() X, y = gen_reg_data() clf.fit(X, y) plt.plot(X, y, '.') X_axis = np.arange(-5, 75, 0.1) plt.plot(X_axis, clf.predict(X_axis)) plt.title("Ridge") plt.show()测试效果
更多机器学习代码,请访问 https://github.com/WiseDoge/plume
以上就是Python 实现 3 种回归模型(Linear Regression,Lasso,Ridge)的示例的详细内容,更多关于Python 实现 回归模型的资料请关注其它相关文章!
声明:本页内容来源网络,仅供用户参考;我单位不保证亦不表示资料全面及准确无误,也不保证亦不表示这些资料为最新信息,如因任何原因,本网内容或者用户因倚赖本网内容造成任何损失或损害,我单位将不会负任何法律责任。如涉及版权问题,请提交至online#300.cn邮箱联系删除。
也有些正则方法可以限制回归算法输出结果中系数的影响,其中最常用的两种正则方法是lasso回归和岭回归。lasso回归和岭回归算法跟常规线性回归算法极其相似,有一
Lasso原理Lasso与弹性拟合比较python实现importnumpyasnpimportmatplotlib.pyplotaspltfromsklear
前言说到如何用Python执行线性回归,大部分人会立刻想到用sklearn的linear_model,但事实是,Python至少有8种执行线性回归的方法,skl
Python3利用Axes3D库画3D模型图,供大家参考,具体内容如下最近在学习机器学习相关的算法,用python实现。自己实现两个特征的线性回归,用Axes3
本文实例讲述了Python实现的简单线性回归算法。分享给大家供大家参考,具体如下:用python实现R的线性模型(lm)中一元线性回归的简单方法,使用R的wom