打印维度是(100,2),函数内运行是(100,1)?哪里错了?
来源:6-4 实现线性回归中的梯度下降法
Jason_chen0755
2020-02-08
from playML.LinearRegression import LinearRegression
import numpy as np
import matplotlib.pyplot as plt
if __name__ == "__main__":
x = np.random.random(size=100)
y = x * 3. + 4. + np.random.normal(size=100)
# plt.scatter(x,y)
# plt.show()
X = x.reshape(-1,1)
X_b = np.hstack([np.ones((len(y),1)),X])
print(X_b.shape)
lin_reg = LinearRegression()
lin_reg.fit_gd(X,y)
def fit_gd(self, X, y, eta=0.01, n_iters=1e4):
assert X.shape[0] == y.shape[0], \
"The size of X_train and y_train must be same!"
X_b = np.hstack([np.ones((len(y), 1)),X])
initial_theta = np.zeros(X_b.shape[1])
def J(theta, X_b, y):
try:
return (y -X_b.dot(theta)) ** 2 / len(y)
except:
return float('inf')
def dJ(theta, X_b,y):
# res = np.empty(len(theta))
# res[0] = np.sum(X_b.dot(theta) - y)
# for i in range(1,len(theta)):
# res[i] = (X_b.dot(theta - y)).dot(X_b[:,i])
return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(theta)
def gradient_descend(X_b,y,initial_theta,eta=0.01,n_iters=1e4,epsilon=1e-8):
theta = initial_theta
i_iters = 0
while i_iters < n_iters:
gradient = dJ(theta,X_b,y)
last_theta = theta
theta = theta - eta * gradient
if abs(J(last_theta,X_b,y) - J(theta,X_b,y)) < epsilon:
break
i_iters += 1
return theta
self._theta = gradient_descend(X,y,initial_theta,eta,n_iters=1e4,epsilon=1e-8)
self.interception_ = self._theta[0]
self.coef_ = self._theta[1:]
return self
运行结果:
/Users/jason/anaconda3/bin/python3.7 /Users/jason/Desktop/MarchineLearning/main-gradient-descendent.py
Traceback (most recent call last):
File “/Users/jason/Desktop/MarchineLearning/main-gradient-descendent.py”, line 15, in
lin_reg.fit_gd(X,y)
File “/Users/jason/Desktop/MarchineLearning/playML/LinearRegression.py”, line 52, in fit_gd
self._theta = gradient_descend(X,y,initial_theta,eta,n_iters=1e4,epsilon=1e-8)
File “/Users/jason/Desktop/MarchineLearning/playML/LinearRegression.py”, line 44, in gradient_descend
gradient = dJ(theta,X_b,y)
File “/Users/jason/Desktop/MarchineLearning/playML/LinearRegression.py”, line 38, in dJ
return X_b.T.dot(X_b.dot(theta) - y) * 2 / len(theta)
ValueError: shapes (100,1) and (2,) not aligned: 1 (dim 1) != 2 (dim 0)
**(100, 2)**这是打印的维度
1回答
-
liuyubobobo
2020-02-09
你传的是 X 不是 X_b?
052020-02-13
相似问题