高斯核用随机梯度下降进行最小二乘法学习

import math
import numpy as np
import matplotlib.pyplot as plt

if 1:
    def generx2(sta, end, num): # 生成y=x^2的测试集
        # 随机干扰因子
        sampleNo = num
        mu = 0.01
        sigma = 0.1
        np.random.seed(0)
        s = np.random.normal(mu, sigma, sampleNo)

        X = np.linspace(sta, end, num)  # 在返回(-1, 1)范围内的等差序列
        Y = X * X + s
        x = X.reshape(-1, 1)
        y = Y.reshape(-1, 1)

        return x, y


    def generx3(sta, end, num): # 生成y=x^3的测试集
        # 随机干扰因子
        sampleNo = num
        mu = 0.01
        sigma = 0.1
        np.random.seed(0)
        s = np.random.normal(mu, sigma, sampleNo)

        X = np.linspace(sta, end, num)  # 在返回(-1, 1)范围内的等差序列
        Y = X * X * X + s
        x = X.reshape(-1, 1)
        y = Y.reshape(-1, 1)

        return x, y


    def polyfunc(x, dim):  # 多项式作为基函数的线性模型
        px = []

        for i in x:
            cell = []
            for j in range(dim):  # 基函数 y = [1, x, x^2, x^3 ... , x^9]
                cell.append(i[0] ** j)
            px.append(cell)

        px = np.matrix(px)
        print('px shape:', np.shape(px))

        return px


    def trigofunc(x, dim):  # 三角函数作为基函数的线性模型
        px = []

        for i in x:
            cell = []
            for j in range(dim):  # y = [1, sin(x), cps(x)...sin(nx),cos(nx)]
                if j == 0:
                    cell.append(1)
                else:
                    ers = j >> 1
                    cou = j & 0x1
                    if cou == 0:
                        res = math.sin(ers * i[0])
                    else:
                        res = math.cos(ers * i[0])
                    cell.append(res)
            px.append(cell)

        px = np.matrix(px)
        print('px shape:', np.shape(px))

        return px

    def corefunc(x):    # 高斯核模型
        h = 0.3
        temp = 2 * h * h
        Kx = []
        for i in x:
            cell = 0
            for j in x:
                cell = cell + (i - j) ** 2
            cell = cell * 0.5
            cell = np.exp(- (cell / temp))
            Kx.append(cell)

        Kx = np.matrix(Kx)
        print('Kx shape:', np.shape(Kx))

        return Kx

    def linear_model_test(i, dim):  #线性模型
        x, y = generx3(-1, 1, 400)

        plt.plot(x, y)

        px = trigofunc(x, dim)

        pfunc = y.T * np.linalg.pinv(px).T
        # 生成测试集
        tx, ty = generx3(-1.5, 1.5, 2000)
        tempx = trigofunc(tx, dim)
        predict = pfunc * tempx.T

        plt.plot(tx, predict.T, label=str(i))


    def core_model_test():  # 核模型
        x, y = generx2(-1, 1, 400)
        print(x.shape, y.shape)
        plt.plot(x, y)

        px = corefunc(x)
        pfunc = y * np.linalg.pinv(px)
        # 生成测试集
        tx, ty = generx2(-1, 1, 400)
        tempx = corefunc(tx)
        predict = pfunc * tempx

        plt.plot(tx, predict)


if __name__ == '__main__':
    # for i in range(10):
        # linear_model_test(i, i)
    core_model_test()
    plt.legend()
    plt.show()

梯度下降学习曲线
高斯核用随机梯度下降进行最小二乘法学习