欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页

机器学习之二分类画出决策边界

程序员文章站 2022-05-26 19:09:50
...

二分类画出决策边界

// 二分类决策边界
import numpy as np
import matplotlib.pyplot as plt

data = np.loadtxt('ex2data2.txt', delimiter=',')
x, y = np.hsplit(data, [-1, ])
X = np.insert(x, 0, 1, axis=1)


def Map(x1, x2):
    degrees = 6
    out = np.ones((x1.shape[0], 1))
    for i in range(1, degrees + 1):
        for j in range(0, i + 1):
            a1 = x1 ** (i - j)
            a2 = x2 ** (j)
            term = (a1 * a2).reshape(a1.shape[0], 1)
            out = np.hstack((out, term))
    return out


onex = Map(X[:, 1], X[:, 2])

#前向传播
def model(X, theta):
    z = X.dot(theta)
    h = 1 / (1 + np.exp(-z))
    return h

#损失函数
def Cost(theta, X, y, lamda):
    m = len(y)
    h = model(X, theta)
    cost = -np.mean(y * np.log(h) + (1 - y) * np.log(1 - h))
    reg = lamda / (2 * m) * np.sum(theta[1:] * theta[1:])
    return cost + reg

#梯度下降
def Grad(theta, X, y, lamda):
    h = model(X, theta)
    grad = (1 / m) * X.T.dot(h - y)
    reg = theta[1:]
    reg = np.insert(reg, 0, 0, axis=0)
    thetareg = lamda / m * reg
    return thetareg + grad

#训练模型
def train(theta, X, y, lamda, iters=1000, alpha=1):
    Jlist = []
    for i in range(iters):
        h = model(X, theta)
        cost = Cost(theta, X, y, lamda)
        grad = Grad(theta, X, y, lamda)
        theta = theta - alpha * grad
        if i % 200 == 0:
            print('第{}次,代价{}'.format(i, cost))
            Jlist.append(cost)
    return theta, Jlist


def comput(theta):
    h = model(onex, theta)
    tiao = np.where(h >= 0.5, 1, 0)
    jisuan = np.mean(np.equal(tiao, y))
    return jisuan


def plot_show(theta, lamda):
    size = 100
    x1 = np.linspace(-1, 1.5, size)
    x2 = np.linspace(-1, 1.5, size)
    z = np.zeros((len(x1), len(x2)))
    for i in range(len(x1)):
        for j in range(len(x2)):
            new = Map(np.array([x1[i]]), np.array([x2[j]]))
            z[i][j] = new.dot(theta)
    z = z.transpose()
    mycontour = plt.contour(x1, x2, z, [0])
    plt.clabel(mycontour, inline=1, fontsize=10, fmt='lamda%d' % lamda)


if __name__ == '__main__':
    lamda = 10
    m, n = onex.shape
    theta = np.zeros((n, 1))
    theta, cost = train(theta, onex, y, lamda)
    print(lamda, comput(theta))

    plt.scatter(X[:, 1], X[:, 2], c=y.ravel(), s=18)
    plot_show(theta, lamda)
    plt.show()

本文中数据集下载地址:
链接:https://pan.baidu.com/s/1io3aZH-QRCqnGardXOH5yg 提取码:83mu

学习讨论群请加QQ群:521364338,扫码进群领取人工智能学习资料,转载标明出处,侵权必究!