标签:Python 梯度 self 线性 train pred test import model
中文网站上找不到一个简洁的像样的梯度下降法实现的多元线性回归算法。
简洁的公式推导需要用简洁的代码来实现
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
from sklearn.preprocessing import StandardScaler
class Linear_Regression():
def __init__(self):
self.lr = 0.01
self.num_iter = 1000
self.w = None
self.loss_list = []
def fit(self,X,y):
X = np.hstack((X,np.ones(X.shape[0]).reshape(-1,1)))
print(X.shape)
N, m = X.shape
self.w = np.zeros(m)
for i in range(self.num_iter):
grad = X.T @ (X @ self.w - y)/N
print("grad:{}".format(i))
self.w -= self.lr * grad
loss = (1/2*N) * (X @ self.w - y).T @ (X @ self.w - y)
self.loss_list.append(loss)
def predict(self,X):
X = np.hstack((X,np.ones(X.shape[0]).reshape(-1,1)))
y_pred = X @ self.w
return y_pred
if __name__ == '__main__':
X, y = datasets.load_boston(return_X_y=True)
Scaler = StandardScaler()
X = Scaler.fit_transform(X)
print(X.shape)
print(y.shape)
model = LinearRegression()
LR_MSE_list = []
ND_MSE_list = []
for _ in range(10):
X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.3)
model = Linear_Regression()
model.fit(X=X_train, y=y_train)
y_pred = model.predict(X=X_test)
plt.plot(np.arange(len(model.loss_list)),model.loss_list)
plt.show()
print(mean_squared_error(y_true=y_test, y_pred=y_pred))
model_1 = LinearRegression()
model_1.fit(X=X_train, y=y_train)
y_pred = model_1.predict(X=X_test)
print("diaobao::",mean_squared_error(y_true=y_test, y_pred=y_pred))
break
标签:Python,梯度,self,线性,train,pred,test,import,model 来源: https://blog.csdn.net/DeniuHe/article/details/119424707
本站声明: 1. iCode9 技术分享网(下文简称本站)提供的所有内容,仅供技术学习、探讨和分享; 2. 关于本站的所有留言、评论、转载及引用,纯属内容发起人的个人观点,与本站观点和立场无关; 3. 关于本站的所有言论和文字,纯属内容发起人的个人观点,与本站观点和立场无关; 4. 本站文章均是网友提供,不完全保证技术分享内容的完整性、准确性、时效性、风险性和版权归属;如您发现该文章侵犯了您的权益,可联系我们第一时间进行删除; 5. 本站为非盈利性的个人网站,所有内容不会用来进行牟利,也不会利用任何形式的广告来间接获益,纯粹是为了广大技术爱好者提供技术内容和技术思想的分享性交流网站。