- 只有第一个层需要指定输入的
dim
,后面的都不需要,可以看到第二个dense只有一个为32的输出了。
- 注意dense全连接层的shape都是两维。
def cl_logits_subgraph(layer_sizes, input_size, num_classes, keep_prob=1.,name = None):
"""Construct multiple ReLU layers with dropout and a linear layer."""
subgraph = K.models.Sequential(name=name)
for i, layer_size in enumerate(layer_sizes):
if i == 0:
subgraph.add(
K.layers.Dense(layer_size, activation='relu', input_dim=input_size))
else:
subgraph.add(K.layers.Dense(layer_size, activation='relu'))
if keep_prob < 1.:
subgraph.add(K.layers.Dropout(1. - keep_prob))
subgraph.add(K.layers.Dense(1 if num_classes == 2 else num_classes))
return subgraph