''NoneType' object has no attribute '_inbound_nodes'

I have a code to forecast a time series using an attention mechanism.

Here's what I've got so far, but I'm getting an error.

def dot_product(x, kernel):
    if K.backend() == 'tensorflow':
        return K.squeeze(K.dot(x, K.expand_dims(kernel)), axis=-1)
    else:
        return K.dot(x, kernel)

class Attention(Layer):

    def __init__(self,
                 W_regularizer=None, u_regularizer=None, b_regularizer=None,
                 W_constraint=None, u_constraint=None, b_constraint=None,
                 bias=True, **kwargs):
        
        self.supports_masking = True
        self.init = tf.keras.initializers.get('glorot_uniform')

        self.W_regularizer = tf.keras.regularizers.get(W_regularizer)
        self.u_regularizer = tf.keras.regularizers.get(u_regularizer)
        self.b_regularizer = tf.keras.regularizers.get(b_regularizer)

        self.W_constraint = tf.keras.constraints.get(W_constraint)
        self.u_constraint = tf.keras.constraints.get(u_constraint)
        self.b_constraint = tf.keras.constraints.get(b_constraint)

        self.bias = bias
        super(Attention, self).__init__(**kwargs)

    def build(self, input_shape):
        assert len(input_shape) == 3

        self.W = self.add_weight(shape=(input_shape[-2], input_shape[-1],),
                                 initializer=self.init,
                                 name='{}_W'.format(self.name),
                                 regularizer=self.W_regularizer,
                                 constraint=self.W_constraint)
        if self.bias:
            self.b = self.add_weight(shape=(input_shape[-2], input_shape[-1],),
                                     initializer='zero',
                                     name='{}_b'.format(self.name),
                                     regularizer=self.b_regularizer,
                                     constraint=self.b_constraint)

        self.u = self.add_weight(shape=(input_shape[-1],),
                                 initializer=self.init,
                                 name='{}_u'.format(self.name),
                                 regularizer=self.u_regularizer,
                                 constraint=self.u_constraint)

        super(Attention, self).build(input_shape)

    def compute_mask(self, input, input_mask=None):

        return None

    def call(self, x, mask=None):
        uit = dot_product(x, self.W)

        if self.bias:
            uit += self.b

        uit = K.tanh(uit)
        ait = dot_product(uit, self.u)

        a = K.exp(ait)


        if mask is not None:

            a *= K.cast(mask, K.floatx())


        a /= K.cast(K.sum(a, axis=1, keepdims=True) + K.epsilon(), K.floatx())

        a = K.expand_dims(a)
        weighted_input = x * a
        x = K.sum(weighted_input, axis=1)
        return x


n_timesteps= 2
n_features = 1
n_outputs = 1

model_name = 'With_attetion'
path_checkpoint = "model_checkpoint.h5"
# define parameters
verbose, epochs, batch_size = 1, 200, 170

callback = keras.callbacks.EarlyStopping(monitor='loss', patience=3)

modelckpt_callback = keras.callbacks.ModelCheckpoint(monitor="val_loss",filepath=path_checkpoint,
    save_weights_only=True,
    save_best_only=True,
)
# define model
model = Sequential()
main_input = Input(shape =(n_timesteps,n_features), name = 'main_input')
con1 = Conv1D(filters=128, kernel_size=3, activation='relu', padding='same')(main_input)
bat1 = BatchNormalization()(con1)
max1 = MaxPooling1D(pool_size=3, padding='same')(bat1)
dro1 = Dropout(0.4)(max1)
con2 = Conv1D(filters=64, kernel_size=3, activation='relu', padding='same')(max1)
bat2 = BatchNormalization()(con2)
max2 = MaxPooling1D(pool_size=3, padding='same')(bat2)
dro2 = Dropout(0.2)(max2)
con_f1 = TimeDistributed(Flatten())(dro2)

model.add(RepeatVector(n_outputs))
x1 = LSTM(64, activation='tanh', return_sequences=True)(con_f1)
#model.add(Dropout(0.2))
attention_mul = Attention()(x1)
#attention_mul=tf.reshape(attention_mul, [-1, 1, 64])
attention_mul=tf.expand_dims(attention_mul, axis = 1)
t2 = TimeDistributed(Dense(64, activation='tanh'))(attention_mul)
t1 = TimeDistributed(Dense(1, activation='tanh'))(t2)
model = Model(input = main_input , output = t1)
model.compile(loss="mse", optimizer=keras.optimizers.Adam(lr=3e-6), metrics=[keras.metrics.RootMeanSquaredError()])
# fit network
model.summary()
history = model.fit([x_train], y_train, epochs=epochs, batch_size=250,validation_split=0.2, verbose=verbose,
                     callbacks=[callback, modelckpt_callback])

Then this is the error I'm getting

C:\Users\HBK\Anaconda3\lib\site-packages\ipykernel_launcher.py:51: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor("ma..., outputs=Tensor("ti...)`
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-39-a96d7b49ecfc> in <module>
     49 # t2 = TimeDistributed(Dense(64, activation='tanh'))(x4)
     50 # t1 = TimeDistributed(Dense(1, activation='tanh'))(t2)
---> 51 model = Model(input = main_input , output = t1)
     52 #model.compile(loss="mse", optimizer=keras.optimizers.Adam(lr=0.00001), metrics=["accuracy"])
     53 model.compile(loss="mse", optimizer=keras.optimizers.Adam(lr=3e-6), metrics=[keras.metrics.RootMeanSquaredError()])

~\Anaconda3\lib\site-packages\keras\legacy\interfaces.py in wrapper(*args, **kwargs)
     89                 warnings.warn('Update your `' + object_name + '` call to the ' +
     90                               'Keras 2 API: ' + signature, stacklevel=2)
---> 91             return func(*args, **kwargs)
     92         wrapper._original_function = func
     93         return wrapper

~\Anaconda3\lib\site-packages\keras\engine\network.py in __init__(self, *args, **kwargs)
     92                 'inputs' in kwargs and 'outputs' in kwargs):
     93             # Graph network
---> 94             self._init_graph_network(*args, **kwargs)
     95         else:
     96             # Subclassed network

~\Anaconda3\lib\site-packages\keras\engine\network.py in _init_graph_network(self, inputs, outputs, name, **kwargs)
    239         # Keep track of the network's nodes and layers.
    240         nodes, nodes_by_depth, layers, layers_by_depth = _map_graph_network(
--> 241             self.inputs, self.outputs)
    242         self._network_nodes = nodes
    243         self._nodes_by_depth = nodes_by_depth

~\Anaconda3\lib\site-packages\keras\engine\network.py in _map_graph_network(inputs, outputs)
   1432                   layer=layer,
   1433                   node_index=node_index,
-> 1434                   tensor_index=tensor_index)
   1435 
   1436     for node in reversed(nodes_in_decreasing_depth):

~\Anaconda3\lib\site-packages\keras\engine\network.py in build_map(tensor, finished_nodes, nodes_in_progress, layer, node_index, tensor_index)
   1419             tensor_index = node.tensor_indices[i]
   1420             build_map(x, finished_nodes, nodes_in_progress, layer,
-> 1421                       node_index, tensor_index)
   1422 
   1423         finished_nodes.add(node)

~\Anaconda3\lib\site-packages\keras\engine\network.py in build_map(tensor, finished_nodes, nodes_in_progress, layer, node_index, tensor_index)
   1419             tensor_index = node.tensor_indices[i]
   1420             build_map(x, finished_nodes, nodes_in_progress, layer,
-> 1421                       node_index, tensor_index)
   1422 
   1423         finished_nodes.add(node)

~\Anaconda3\lib\site-packages\keras\engine\network.py in build_map(tensor, finished_nodes, nodes_in_progress, layer, node_index, tensor_index)
   1391             ValueError: if a cycle is detected.
   1392         """
-> 1393         node = layer._inbound_nodes[node_index]
   1394 
   1395         # Prevent cycles.

AttributeError: 'NoneType' object has no attribute '_inbound_nodes'

The error is at this part of the code

model = Model(input = main_input , output = t1)

After the attention mechanism, I got a 2D tensor. I reshaped it with

attention_mul=tf.expand_dims(attention_mul, axis = 1)

to have a 3D tensor since time distributed layer need 3 dim.



Solution 1:[1]

You should embed tf.expand_dims() in a keras Lambda layer.

Convert:

attention_mul=tf.expand_dims(attention_mul, axis = 1)

To:

attention_mul=Lambda(lambda x: tf.expand_dims(x, axis = 1))(attention_mul)

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 BrokenBenchmark