Python ValueError:应在输入列表上调用合并层。添加()

Python ValueError:应在输入列表上调用合并层。添加(),python,keras,deep-residual-networks,Python,Keras,Deep Residual Networks,我想在最后一个激活函数中添加第三个组件,我创建了一个添加函数来添加所有X0值。但在添加此项时,会发生此错误。 这在添加ADD函数时发生 raise VALUERROR('应调用合并层' ValueError:应在输入列表上调用合并层。所有X0值是什么意思?您没有将它们保存在列表中,因此它们将丢失。“所有X0值”的“第三部分”是什么? # import the necessary packages import keras from keras.initializers import glorot

我想在最后一个激活函数中添加第三个组件,我创建了一个添加函数来添加所有X0值。但在添加此项时,会发生此错误。 这在添加ADD函数时发生

raise VALUERROR('应调用合并层'
ValueError:应在输入列表上调用合并层。

所有X0值是什么意思?您没有将它们保存在列表中,因此它们将丢失。“所有X0值”的“第三部分”是什么?
# import the necessary packages
import keras
from keras.initializers import glorot_uniform
from keras.layers import AveragePooling2D, Input, Add
from keras.models import Model
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dropout
from keras.layers.core import Dense


class SmallerVGGNet:
    @staticmethod
    def build(width, height, depth, classes, finalact):

        X1 = Input(shape=(height, width, depth))

        # # CONV => RELU => POOL
        X = Conv2D(16, (3, 3), padding="same", strides=(1, 1), name="con_layer1")(X1)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)
        X = MaxPooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        X = Conv2D(32, (3, 3), padding="same", strides=(2, 2), name="con_layer2")(X)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = Conv2D(32, (3, 3), padding="same", strides=(1, 1), name="con_layer3")(X)
        X = Activation("relu")(X)
        X = BatchNormalization(axis=3)(X)

        X = MaxPooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        # First component
        X0 = Conv2D(256, (5, 5), strides=(1, 1), padding='same', kernel_initializer=glorot_uniform(seed=0))(X)
        X0 = BatchNormalization(axis=3)(X0)
        X0 = Activation("relu")(X0)

        # (CONV => RELU) * 2 => POOL
        X = Conv2D(64, (3, 3), padding="same", strides=(2, 2), name="con_layer4")(X0)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = Conv2D(64, (3, 3), padding="same", strides=(1, 1), name="con_layer5")(X)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = AveragePooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        # Second Component
        X0 = Conv2D(512, (5, 5), strides=(1, 1), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X)
        X0 = BatchNormalization(axis=3)(X0)
        X0 = Activation("relu")(X0)

        # (CONV => RELU) * 2 => POOL
        X = Conv2D(128, (3, 3), padding="same", strides=(2, 2), name="con_layer6")(X0)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = Conv2D(128, (3, 3), padding="same", strides=(1, 1), name="con_layer7")(X)
        X = BatchNormalization(axis=3)(X)
        X = Activation("relu")(X)

        X = MaxPooling2D(pool_size=(3, 3), strides=(1, 1))(X)

        # Third Component
        X0 = Conv2D(1024, (7, 7), strides=(2, 2), padding='valid', kernel_initializer=glorot_uniform(seed=0))(X)
        X0 = BatchNormalization(axis=3)(X0)
        X0 = Dense(128, activation="relu")(X0)
        X0 = Activation("relu")(X0)

        X = Add()([X0])
        X = Flatten()(X1)
        X = BatchNormalization()(X)
        X = Dropout(0.5)(X)
        output = Dense(classes, activation=finalact)(X)

        model = Model(inputs=[X1], outputs=output)

        print(model.summary())
        return model