xxx

作者: 平头哥2 | 来源:发表于2020-09-24 16:59 被阅读0次

    误差反向传播法

    import numpy as np
    
    x = np.array([[1.0, -0.5], [-2.0, 3.0]])
    
    print(x)
    # [[ 1.  -0.5]
    #  [-2.   3. ]]
    mask = (x <= 0)
    print(mask)
    # [[False  True]
    #  [ True False]]
    
    out = x.copy()
    print(out)
    # [[ 1.  -0.5]
    #  [-2.   3. ]]
    
    out[mask] = 0
    print(out)
    # [[1. 0.]
    #  [0. 3.]]
    
    import numpy as np
    
    x_dot_W = np.array([[0, 0, 0], [10, 10, 10]])
    B = np.array([1, 2, 3])
    print(x_dot_W + B)
    # [[ 1  2  3]
    #  [11 12 13]]
    
    dY = np.array([[1, 2, 3], [4, 5, 6]])
    dB = np.sum(dY, axis=0)
    print(dB)  # [5 7 9]
    

    各个层的实现

    import numpy as np
    from common_functions import softmax, cross_entropy_error
    
    
    class MulLayer:
        def __init__(self):
            self.x = None
            self.y = None
    
        def forward(self, x, y):
            self.x = x
            self.y = y
            out = x * y
            return out
    
        def backward(self, dout):
            dx = dout * self.y  # 翻转 x 和 y
            dy = dout * self.x
    
            return dx, dy
    
    
    class AddLayer:
        def __init__(self):
            pass
    
        def forward(self, x, y):
            out = x + y
            return out
    
        def backward(self, dout):
            dx = dout * 1
            dy = dout * 1
            return dx, dy
    
    
    # ReLU 实现
    class ReLU:
        def __init__(self):
            self.mask = None
    
        def forward(self, x):
            self.mask = (x <= 0)
            out = x.copy
            out[self.mask] = 0  # 将mask里面为True的值赋值为 0, mask里面为 False 的值保持不变
            return out
    
        def backward(self, dout):
            dout[self.mask] = 0
            dx = dout
            return dx
    
    
    # sigmoid 层实现
    
    class Sigmoid:
        def __init__(self):
            self.out = None
    
        def forward(self, x):
            out = 1 / (1 + np.exp(x))
            self.out = out
            return out
    
        def backward(self, dout):
            dx = dout * (1.0 - self.out) * self.out
            return dx
    
    
    class Affine:
        def __init__(self, W, b):
            self.W = W
            self.b = b
            self.x = None
            self.dW = None
            self.db = None
    
        def forward(self, x):
            self.x = x
            out = np.dot(self.x, self.W) + self.b
            return out
    
        def backward(self, dout):
            dx = np.dot(dout, self.W.T)
            self.dW = np.dot(self.x.T, dout)
            self.db = np.sum(dout, axis=0)
            return dx
    
    
    class SoftmaxWithLoss:
        def __init__(self):
            self.loss = None
            self.t = None
    
        def forward(self, x, t):
            self.t = t
            self.y = softmax(x)
            self.loss = cross_entropy_error(self.y, self.t)
            return self.loss
    
        def backward(self, dout=1):
            batch_size = self.t.shape[0]
            dx = (self.y - self.t) / batch_size  # 这里传递给前面的层的是单个数据的误差
            return dx
    

    相关文章

      网友评论

          本文标题:xxx

          本文链接:https://www.haomeiwen.com/subject/yyctuktx.html