主要内容
在这里主要是介绍下基于keras的去躁自编码器的实现,其实,对于一个普通的autoencode,简单的说就是存在一个latent空间,来实现对输入输出的加强,表征这个输入的向量,所以,输入输出的维度是一样的,但是通过autoencode可以用来实现分类等功能,主要是在网络的最后一层上加一个不同的激活函数而已,再就是16年的基于stack的堆栈自编码器,是DA的升级版本,主要是在latent space中添加多个隐藏层,针对这些隐藏层,层与层之间是对应的前层输入到后层,后层继续编码,一直到最后的一层,简单的示意图可以如下
堆栈自编码器
下面是实现的简单代码
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 18 14:49:54 2017
@author: Administrator
"""
#! /usr/bin/python
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import keras
from keras.layers import Activation, Dense, Input
from keras.layers import Conv2D, Flatten
from keras.layers import Reshape, Conv2DTranspose
from keras.models import Model
from keras.layers import GaussianNoise
from keras import backend as K
from keras.datasets import mnist
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
from keras import regularizers
import scipy.io as scio
dataFile = 'endUse.mat'
datadic = scio.loadmat(dataFile)
data = datadic['Endmatrix']
data=data.T
totalnumber=np.size(data,0)
trainsplit=int(0.8*totalnumber)
data_train=data[:trainsplit]
data_test=data[trainsplit:]
noise = np.random.normal(loc=0, scale=0.4, size=data_train.shape)
data_train_noisy = data_train + noise
noise = np.random.normal(loc=0, scale=0.4, size=data_test.shape)
data_test_noisy = data_test + noise
def contractive_autoencoder(data_train_noisy,data_train,data_test_noisy,data_test, lam=0.1):
#data_train_noisy = data_train_noisy.reshape(data_train_noisy.shape[0], -1)
M, N = data_train_noisy.shape
N_hidden = 2000
N_batch = 128
inputs = Input(shape=(N,))
encoded = Dense(N_hidden, activation='relu', name='encoded')(inputs)
outputs = Dense(N, activation='linear')(encoded)
model = Model(input=inputs, output=outputs)
def contractive_loss(y_pred, y_true):
mse = K.mean(K.square(y_true - y_pred), axis=1)
W = K.variable(value=model.get_layer('encoded').get_weights()[0]) # N x N_hidden
W = K.transpose(W) # N_hidden x N
h = model.get_layer('encoded').output
dh = h * (1 - h) # N_batch x N_hidden
# N_batch x N_hidden * N_hidden x 1 = N_batch x 1
contractive = lam * K.sum(dh**2 * K.sum(W**2, axis=1), axis=1)
return mse + contractive
model.compile(optimizer='adam', loss=contractive_loss)
model.fit(data_train_noisy,
data_train,
validation_data=(data_test_noisy, data_test),
epochs=100,
batch_size=N_batch)
return model, Model(input=inputs, output=encoded)
model, representation = contractive_autoencoder(data_train_noisy,data_train,data_test_noisy,data_test)
在这里面激活函数用的是mse加雅可比约束项。这里是在keras里面的写法
网友评论