-
K.layers.Dense(layer_size, activation='relu')
:layer_size指的是输出的尺寸,如果是桶装网络的第一层那么就要指定输入尺寸input_dim =
。
def cl_logits_subgraph(layer_sizes, input_size, num_classes, keep_prob=1.):
"""Construct multiple ReLU layers with dropout and a linear layer."""
subgraph = K.models.Sequential(name='cl_logits')
for i, layer_size in enumerate(layer_sizes):
if i == 0:
subgraph.add(
K.layers.Dense(layer_size, activation='relu', input_dim=input_size))
else:
subgraph.add(K.layers.Dense(layer_size, activation='relu'))
if keep_prob < 1.:
subgraph.add(K.layers.Dropout(1. - keep_prob))
subgraph.add(K.layers.Dense(1 if num_classes == 2 else num_classes))
return subgraph
网友评论