深度理解多层感知机MLP
隐层节点数选择
选择隐藏层数和隐藏层大小的标准
一个例子不太好仅供参考
rm(list = ls())
suppressMessages(library(keras))
#install_keras()
set.seed(23404)
# suppressMessages(library(tensorflow))
# install_tensorflow()
con_data<-read.csv("~/positive_gene_expression_matrix.csv",header=F,row.names = 1)
non_con_data<-read.csv("~/negative_gene_expression_matrix.csv",header=F,row.names = 1)
con_label<-read.csv("~/positive_label.csv",header=F,row.names = 1)
non_con_label<-read.csv("~/negative_label.csv",header=F,row.names = 1)
data<-rbind(con_data,non_con_data)
data<-t(scale(t(data)))#normalization
label<-rbind(con_label,non_con_label)
smp_size <- floor(0.7 * nrow(data))
train_ind <- sample(seq_len(nrow(data)), size = smp_size)
train_data <- as.matrix(data[train_ind, ])
test_data <- as.matrix(data[-train_ind, ])
train_data_label<-as.matrix(label[row.names(train_data),])
test_data_label<-as.matrix(label[row.names(test_data),])
model <- keras_model_sequential()
model %>%
layer_dense(units = 69, activation = 'relu', input_shape = c(68)) %>%
layer_dropout(rate = 0.2) %>%
layer_dense(units = 32, activation = 'relu') %>%
layer_dropout(rate = 0.2) %>%
layer_dense(units = 16, activation = 'relu') %>%
layer_dropout(rate = 0.5) %>%
# layer_dense(units = 32, activation = 'relu') %>%
# layer_dropout(rate = 0.5) %>%
layer_dense(units = 1, activation = 'sigmoid') %>%
compile(
loss = 'binary_crossentropy',
optimizer = 'rmsprop',
metrics = c('accuracy')
)
# train
model %>% fit(train_data, train_data_label, epochs = 100, batch_size =100)
# evaluate
score = model %>% evaluate(test_data, test_data_label, batch_size=100)
网友评论