## 算法推导

$$w_0*x_0^2+w_1*x_1^2+w_2*x_0*x_1+w_3*x_0+w_4*x_1+b$$

$$w_3*x_0+w_4*x_1+b$$

### 如何增加约束条件

$y = w_0*x + w_1*b \\ s.t. ||W||^2 < r^2, W=(w_0, w_1)$

### 约束条件下的公式推导

$argmin \frac{1}{N}\sum_{i=1}^{N}(w*x_i+b-y_i)^2$

$argmin \frac{1}{N}\sum_{i=1}^{N}(w*x_i+b-y_i)^2 \\ s.t. ||W||^2 < r^2, W=(w_0, w_1)$

$argmin \frac{1}{N}\sum_{i=1}^{N}(Wx_i-y_i)^2 + λ(||W||^2-r^2)$

$\frac{1}{N}N(2x_i^Tx_iW-2x_i^Ty_i+0)+2λW = 0 \\ 2x_i^Tx_iW + 2λW = 2x_i^Ty_i \\ (x_i^Tx_i+λI)W = x_i^Ty_i, I为单位阵 \\ W = (x_i^Tx_i+λI)^{-1}x_i^Ty_i$

## 代码实现

### 岭回归对象初始化

if self.lambdaVal == 0:
return super(RidgeRegression,self).train()

### 参数W计算

xTx = self.X.T @ self.X
I = np.eye(xTx.shape[0])
self.w = np.linalg.inv(xTx + self.lambdaVal*I) @ self.X.T @ self.y
self.w = self.w.reshape(-1)
self.w,self.b = self.w[1:],self.w[0]

## 全部代码

import numpy as np
import matplotlib.pyplot as plt
from 线性回归最小二乘法矩阵实现 import LinearRegression as LR
from 多项式回归 import PolynomialRegression as PR
'''

'''
class RidgeRegression(PR):
def __init__(self,X,y,degrees=1,lambdaVal=0):
super(RidgeRegression,self).__init__(X,y,degrees)
self.lambdaVal = lambdaVal
def train(self):
if self.lambdaVal == 0:
return super(RidgeRegression,self).train()
xTx = self.X.T @ self.X
I = np.eye(xTx.shape[0])
self.w = np.linalg.inv(xTx + self.lambdaVal*I) @ self.X.T @ self.y
self.w = self.w.reshape(-1)
self.w,self.b = self.w[1:],self.w[0]
return self.w,self.b
def pain(pos=141,xlabel='x',ylabel='y',title='',x=[],y=[],line_x=[],line_y=[]):
plt.subplot(pos)
plt.title(title)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.scatter(x,y)
plt.plot(line_x,line_y)
if __name__ == '__main__':
rnd = np.random.RandomState(3)
x_min, x_max = 0, 10

def pain(pos=141,xlabel='x',ylabel='y',title='',x=[],y=[],line_x=[],line_y=[]):
plt.subplot(pos)
plt.title(title)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.scatter(x,y)
plt.plot(line_x,line_y)

# 上帝函数 y=f(x)
def f(x):
return x**5-22*x**4+161*x**3-403*x**2+36*x+938

# 上帝分布 P(Y|X)
def P(X):
return f(X) + rnd.normal(scale=30, size=X.shape)

# 通过 P(X, Y) 生成数据集 D
X = rnd.uniform(x_min, x_max, 10)   # 通过均匀分布产生 X
y = P(X)                            # 通过 P(Y|X) 产生 y
X,y = X.reshape(-1,1),y.reshape(-1,1)
x_min,x_max = min(X),max(X)

for pos,deg in zip([331,332,333],[2,5,10]):
model = PR(X=X,y=y,degrees=deg)
w,b = model.train()
print(f'最小二乘法的矩阵方式结果为：w={w} b={b}')
line_x = [x_min+(x_max-x_min)*(i/100) for i in range(-1,102,1)]
line_y = [model.predict(x) for x in line_x]
pain(pos,'X','y','DEG='+str(deg),X[:,0],y[:,0],line_x,line_y)
for pos,deg,lambdaVal in zip([334,335,336],[5,5,5],[0.1,1,10]):
model = RidgeRegression(X=X,y=y,degrees=deg,lambdaVal=lambdaVal)
w,b = model.train()
print(f'最小二乘法的矩阵方式结果为：w={w} b={b}')
line_x = [x_min+(x_max-x_min)*(i/100) for i in range(-1,102,1)]
line_y = [model.predict(x) for x in line_x]
pain(pos,'X','y','DEG='+str(deg)+', λ='+str(lambdaVal),X[:,0],y[:,0],line_x,line_y)
for pos,deg,lambdaVal in zip([337,338,339],[10,10,10],[0.1,1,10]):
model = RidgeRegression(X=X,y=y,degrees=deg,lambdaVal=lambdaVal)
w,b = model.train()
print(f'最小二乘法的矩阵方式结果为：w={w} b={b}')
line_x = [x_min+(x_max-x_min)*(i/100) for i in range(-1,102,1)]
line_y = [model.predict(x) for x in line_x]
pain(pos,'X','y','DEG='+str(deg)+', λ='+str(lambdaVal),X[:,0],y[:,0],line_x,line_y)

plt.show()