一、ValueError: Unknown layer: Attention
#问题代码
model_atten_lstm = load_model('model.h5')
#出错信息:
ValueError: Unknown layer: Attention
这个错误的原因是加载的模型包含自定义层或者其他自定义类或函数,则可以通过 custom_objects 参数将它们传递给加载机制,解决思路:
from keras.models import load_model
# 假设你的模型包含一个 AttentionLayer 类的实例
model = load_model('model.h5', custom_objects={'Attention': myAttention})
#这里需要注意两点,
#1、是'Attention',这里对应了model.summary()打印的attention_1层的type
model.summary()
输出:
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) (None, 120) 0
_________________________________________________________________
embedding_1 (Embedding) (None, 120, 100) 6731200
_________________________________________________________________
lstm_1 (LSTM) (None, 120, 300) 481200
_________________________________________________________________
dropout_1 (Dropout) (None, 120, 300) 0
_________________________________________________________________
attention_1 (Attention) (None, 300) 420
_________________________________________________________________
dense_1 (Dense) (None, 256) 77056
_________________________________________________________________
dropout_2 (Dropout) (None, 256) 0
_________________________________________________________________
batch_normalization_1 (Batch (None, 256) 1024
_________________________________________________________________
dense_2 (Dense) (None, 2) 514
=================================================================
#2、AttentionLayer是class Attention实例化出来的对象
#举例
from attention import Attention
myAttention = Attention()
model = load_model('model.h5', custom_objects={'Attention': myAttention})
更多解决方法可以看如何保存keras模型——处理已保存模型中的自定义层(或其他自定义对象)
二、 TypeError: init() missing 1 required positional argument: 'step_dim'
问题代码:
#Attention定义类
from keras import backend as K
from keras.engine.topology import Layer
from keras import initializers, regularizers, constraints
class Attention(Layer):
def __init__(self, step_dim,
W_regularizer=None, b_regularizer=None,
W_constraint=None, b_constraint=None,
bias=True, **kwargs):
self.supports_masking = True
self.init = initializers.get('glorot_uniform')
self.W_regularizer = regularizers.get(W_regularizer)
self.b_regularizer = regularizers.get(b_regularizer)
self.W_constraint = constraints.get(W_constraint)
self.b_constraint = constraints.get(b_constraint)
self.bias = bias
self.step_dim = step_dim
self.features_dim = 0
super(Attention, self).__init__(**kwargs)
#super().__init__(**kwargs)
def build(self, input_shape):
assert len(input_shape) == 3
self.W = self.add_weight((input_shape[-1],),
initializer=self.init,
name='{}_W'.format(self.name),
regularizer=self.W_regularizer,
constraint=self.W_constraint)
self.features_dim = input_shape[-1]
if self.bias:
self.b = self.add_weight((input_shape[1],),
initializer='zero',
name='{}_b'.format(self.name),
regularizer=self.b_regularizer,
constraint=self.b_constraint)
else:
self.b = None
self.built = True
def compute_mask(self, input, input_mask=None):
return None
def call(self, x, mask=None):
features_dim = self.features_dim
step_dim = self.step_dim
eij = K.reshape(K.dot(K.reshape(x, (-1, features_dim)),
K.reshape(self.W, (features_dim, 1))), (-1, step_dim))
if self.bias:
eij += self.b
eij = K.tanh(eij)
a = K.exp(eij)
if mask is not None:
a *= K.cast(mask, K.floatx())
a /= K.cast(K.sum(a, axis=1, keepdims=True) + K.epsilon(), K.floatx())
a = K.expand_dims(a)
weighted_input = x * a
return K.sum(weighted_input, axis=1)
def compute_output_shape(self, input_shape):
return input_shape[0], self.features_dim
#报错代码
myAttention=Attention(step_dim=120)#这里不会报错
model_atten_lstm = load_model('model.h5',custom_objects={'Attention': myAttention})#这里会报错
解决思路:
TypeError: init() missing 1 required positional argument: 'pool2d_layer'
#change init of DePool2D
def __init__(self, pool2d_layer= MaxPooling2D, *args, **kwargs):
self._pool2d_layer = pool2d_layer
super().__init__(*args, **kwargs)
分析问题可能出在了Attention的init()方法,给Attention的init()的step_dim参数带上默认值,所以将attenten.py的Attention类代码修改为:
class Attention(Layer):
#只修改了这里
#这里暂时要将step_dim的默认值和实际使用的值保持一致才不会报错
#否则会报错为:
#ValueError: Dimensions must be equal, but are 10 and 120 for 'attention_1/add' (op: 'Add') with input shapes: [?,10], [120].
#也就是用的时候是120的话,这里默认值也得设为120才行
#暂时没找到更优雅的解决办法
def __init__(self, step_dim=120,
W_regularizer=None, b_regularizer=None,
W_constraint=None, b_constraint=None,
bias=True, **kwargs):
self.supports_masking = True
self.init = initializers.get('glorot_uniform')
self.W_regularizer = regularizers.get(W_regularizer)
self.b_regularizer = regularizers.get(b_regularizer)
self.W_constraint = constraints.get(W_constraint)
self.b_constraint = constraints.get(b_constraint)
self.bias = bias
self.step_dim = step_dim
self.features_dim = 0
super(Attention, self).__init__(**kwargs)
#super().__init__(**kwargs)
#其他代码不用修改
网友评论