# “让Keras更酷一些！”：层与模型的重用技巧

x_in = Input(shape=(784,))
x = x_in

layer = Dense(784, activation='relu') # 初始化一个层，并存起来

x = layer(x) # 第一次调用
x = layer(x) # 再次调用
x = layer(x) # 再次调用

x = Dense(784, activation='relu')(x)
x = Dense(784, activation='relu')(x) # 跟前面的不共享权重
x = Dense(784, activation='relu')(x) # 跟前面的不共享权重

Keras的模型有着类似层的表现，在调用时可以用跟层一样的方式，比如：

x_in = Input(shape=(784,))
x = x_in

x = Dense(10, activation='softmax')(x)

model = Model(x_in, x) # 建立模型

x_in = Input(shape=(100,))
x = x_in

x = Dense(784, activation='relu')(x)
x = model(x) # 将模型当层一样用

model2 = Model(x_in, x)

from keras.models import clone_model

model2 = clone_model(model1)

model2.predict

model2.set_weights(K.batch_get_value(model1.weights))

class EmbeddingDense(Layer):
"""运算跟Dense一致，只不过kernel用Embedding层的embedding矩阵
"""
def __init__(self, embedding_layer, activation='softmax', **kwargs):
super(EmbeddingDense, self).__init__(**kwargs)
self.kernel = K.transpose(embedding_layer.embeddings)
self.activation = activation
self.units = K.int_shape(self.kernel)[1]

def build(self, input_shape):
super(EmbeddingDense, self).build(input_shape)
shape=(self.units,),
initializer='zeros')

def call(self, inputs):
outputs = K.dot(inputs, self.kernel)
outputs = Activation(self.activation).call(outputs)
return outputs

def compute_output_shape(self, input_shape):
return input_shape[:-1] + (self.units,)

# 用法
embedding_layer = Embedding(10000, 128)
x = embedding_layer(x) # 调用Embedding层
x = EmbeddingDense(embedding_layer)(x) # 调用EmbeddingDense层

from keras.applications.resnet50 import ResNet50
model = ResNet50(weights='imagenet')

Model(
inputs=model.input,
outputs=[
model.get_layer('res5a_branch1').output,
model.get_layer('activation_47').output,
]
)

，说明这确实是有价值的。

$$\text{inputs}\to h_1 \to h_2 \to h_3 \to h_4 \to \text{outputs}$$

$$\text{inputs} \to h_3 \to h_4 \to \text{outputs}$$

x_in = Input(shape=(100,))
x = x_in

for layer in model.layers[2:]:
x = layer(x)

model2 = Model(x_in, x)

keras/engine/network.py
run_internal_graph

def get_outputs_of(model, start_tensors, input_layers=None):
"""start_tensors为开始拆开的位置
"""
# 为此操作建立新模型
model = Model(inputs=model.input,
outputs=model.output,
name='outputs_of_' + model.name)
# 适配工作，方便使用
if not isinstance(start_tensors, list):
start_tensors = [start_tensors]
if input_layers is None:
input_layers = [
Input(shape=K.int_shape(x)[1:], dtype=K.dtype(x))
for x in start_tensors
]
elif not isinstance(start_tensors, list):
input_layers = [input_layers]
# 核心：覆盖模型的输入
model.inputs = start_tensors
model._input_layers = [x._keras_history[0] for x in input_layers]
# 适配工作，方便使用
if len(input_layers) == 1:
input_layers = input_layers[0]
# 整理层，参考自 Model 的 run_internal_graph 函数
layers, tensor_map = [], set()
for x in model.inputs:
depth_keys = list(model._nodes_by_depth.keys())
depth_keys.sort(reverse=True)
for depth in depth_keys:
nodes = model._nodes_by_depth[depth]
for node in nodes:
n = 0
for x in node.input_tensors:
if str(id(x)) in tensor_map:
n += 1
if n == len(node.input_tensors):
if node.outbound_layer not in layers:
layers.append(node.outbound_layer)
for x in node.output_tensors:
model._layers = layers # 只保留用到的层
# 计算输出
outputs = model(input_layers)
return input_layers, outputs

from keras.applications.resnet50 import ResNet50
model = ResNet50(weights='imagenet')

x, y = get_outputs_of(
model,
[
model.get_layer('bn3a_branch2c').output,
model.get_layer('bn3a_branch1').output,
]
)

model2 = Model(x, y)

model.inputs = start_tensors
model._input_layers = [x._keras_history[0] for x in input_layers]
outputs = model(input_layers)

model._input_layers

Keras是最让人赏心悦目的深度学习框架，至少到目前为止，就模型代码的可读性而言，没有之一。可能读者会提到PyTorch，诚然PyTorch也有不少可取之处，但就可读性而言，我认为是比不上Keras的。

https://kexue.fm/archives/6985

《科学空间FAQ》