美文网首页
用 Python 实现全连接神经网络(Multi-layer P

用 Python 实现全连接神经网络(Multi-layer P

作者: bdd1b3ad7323 | 来源:发表于2017-10-13 22:26 被阅读318次

代码

import numpy as np

# 各种激活函数及导数
def sigmoid(x):
    return 1 / (1 + np.exp(-x))


def dsigmoid(y):
    return y * (1 - y)


def tanh(x):
    return np.tanh(x)


def dtanh(y):
    return 1.0 - y ** 2


def relu(y):
    tmp = y.copy()
    tmp[tmp < 0] = 0
    return tmp


def drelu(x):
    tmp = x.copy()
    tmp[tmp >= 0] = 1
    tmp[tmp < 0] = 0
    return tmp


class MLPClassifier(object):
    """多层感知机,BP 算法训练"""

    def __init__(self,
                 layers,
                 activation='tanh',
                 epochs=20, batch_size=1, learning_rate=0.01):
        """
        :param layers: 网络层结构
        :param activation: 激活函数
        :param epochs: 迭代轮次
        :param learning_rate: 学习率 
        """
        self.epochs = epochs
        self.learning_rate = learning_rate
        self.layers = []
        self.weights = []
        self.batch_size = batch_size

        for i in range(0, len(layers) - 1):
            weight = np.random.random((layers[i], layers[i + 1]))
            layer = np.ones(layers[i])
            self.layers.append(layer)
            self.weights.append(weight)
        self.layers.append(np.ones(layers[-1]))

        self.thresholds = []
        for i in range(1, len(layers)):
            threshold = np.random.random(layers[i])
            self.thresholds.append(threshold)

        if activation == 'tanh':
            self.activation = tanh
            self.dactivation = dtanh
        elif activation == 'sigomid':
            self.activation = sigmoid
            self.dactivation = dsigmoid
        elif activation == 'relu':
            self.activation = relu
            self.dactivation = drelu

    def fit(self, X, y):
        """
        :param X_: shape = [n_samples, n_features] 
        :param y: shape = [n_samples] 
        :return: self
        """
        for _ in range(self.epochs * (X.shape[0] // self.batch_size)):
            i = np.random.choice(X.shape[0], self.batch_size)
            # i = np.random.randint(X.shape[0])
            self.update(X[i])
            self.back_propagate(y[i])

    def predict(self, X):
        """
        :param X: shape = [n_samples, n_features] 
        :return: shape = [n_samples]
        """
        self.update(X)
        return self.layers[-1].copy()

    def update(self, inputs):
        self.layers[0] = inputs
        for i in range(len(self.weights)):
            next_layer_in = self.layers[i] @ self.weights[i] - self.thresholds[i]
            self.layers[i + 1] = self.activation(next_layer_in)

    def back_propagate(self, y):
        errors = y - self.layers[-1]

        gradients = [(self.dactivation(self.layers[-1]) * errors).sum(axis=0)]

        self.thresholds[-1] -= self.learning_rate * gradients[-1]
        for i in range(len(self.weights) - 1, 0, -1):
            tmp = np.sum(gradients[-1] @ self.weights[i].T * self.dactivation(self.layers[i]), axis=0)
            gradients.append(tmp)
            self.thresholds[i - 1] -= self.learning_rate * gradients[-1] / self.batch_size
        gradients.reverse()
        for i in range(len(self.weights)):
            tmp = np.mean(self.layers[i], axis=0)
            self.weights[i] += self.learning_rate * tmp.reshape((-1, 1)) * gradients[i]

测试代码

import sklearn.datasets
import numpy as np

def plot_decision_boundary(pred_func, X, y, title=None):
    """分类器画图函数,可画出样本点和决策边界
    :param pred_func: predict函数
    :param X: 训练集X
    :param y: 训练集Y
    :return: None
    """

    # Set min and max values and give it some padding
    x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
    y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
    h = 0.01
    # Generate a grid of points with distance h between them
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
    # Predict the function value for the whole gid
    Z = pred_func(np.c_[xx.ravel(), yy.ravel()])
    Z = Z.reshape(xx.shape)
    # Plot the contour and training examples
    plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
    plt.scatter(X[:, 0], X[:, 1], s=40, c=y, cmap=plt.cm.Spectral)

    if title:
        plt.title(title)
    plt.show()


def test_mlp():
    X, y = sklearn.datasets.make_moons(200, noise=0.20)
    y = y.reshape((-1, 1))
    n = MLPClassifier((2, 3, 1), activation='tanh', epochs=300, learning_rate=0.01)
    n.fit(X, y)
    def tmp(X):
        sign = np.vectorize(lambda x: 1 if x >= 0.5 else 0)
        ans = sign(n.predict(X))
        return ans

    plot_decision_boundary(tmp, X, y, 'Neural Network')

效果

tanh relu

更多机器学习代码,请访问 https://github.com/WiseDoge/plume

相关文章

  • 用 Python 实现全连接神经网络(Multi-layer P

    代码 测试代码 效果 更多机器学习代码,请访问 https://github.com/WiseDoge/plume

  • 用TensorFlow搭建一个全连接神经网络

    用TensorFlow搭建一个全连接神经网络 说明 本例子利用TensorFlow搭建一个全连接神经网络,实现对M...

  • [Machine Learning From Scratch]-

    全连接层实现代码: 全连接神经网络做线性回归 一、定义前向、后向传播本文将用numpy实现全连接层的前向过程和反向...

  • 全连接神经网络 python实现

    神经网络解决手写数字识别问题 神经网络是深度学习的基础,其强大的拟合和学习能力,让其在图像识别,人工智能方面表现十...

  • 神经网络的Python实现(三)卷积神经网络

    推荐在我的博客中给我留言,这样我会随时收到你的评论,并作出回复。 在上一篇神经网络的Python实现(二)全连接网...

  • CNN

    卷积神经网络(Convolution Neural Network) 基于全连接层和CNN的神经网络示意图 全连接...

  • 全连接卷积神经网络 FCN

    (一)全连接卷积神经网络(FCN) (1) 全连接卷积神经网络简介 FCN是深度神经网络用于语义分割的奠基性工作,...

  • 神经网络的一些细节

    今天,写下一篇文章记录神经网络训练的一些细节 话说,我接触神经网络也没有多久,自己用java来实现了一个全连接的神...

  • 18- OpenCV+TensorFlow 入门人工智能图像处理

    cnn卷积神经网络实现手写数字识别 卷积层 & 池化层实现 padding参数决定卷积核是否可以停留边缘。 全连接...

  • 卷积神经网络

    CNN 一、卷积神经网络结构 1.全连接神经网络 2.卷积神经网络 全连接层存在的问题:数据的形状被“忽视”了例如...

网友评论

      本文标题:用 Python 实现全连接神经网络(Multi-layer P

      本文链接:https://www.haomeiwen.com/subject/dqmtuxtx.html