Warning: file_get_contents(/data/phpspider/zhask/data//catemap/7/user-interface/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Python 如何在CNN模型中添加用户定义的激活函数,而不是keras中的内置函数?_Python_Tensorflow_Keras_Cnn_Activation Function - Fatal编程技术网

Python 如何在CNN模型中添加用户定义的激活函数,而不是keras中的内置函数?

Python 如何在CNN模型中添加用户定义的激活函数,而不是keras中的内置函数?,python,tensorflow,keras,cnn,activation-function,Python,Tensorflow,Keras,Cnn,Activation Function,而不是给出sigmoidx=1/1+exp-x的sigmoid 我想要一个激活函数mish,比如mishx=x*tanhsoftplusx 我想把它用作 conv_layer1=Conv3Dfilters=8,kernel_size=3,3,5,activation='mish'input_层像这样你可能需要这个 # custom activation function def mish(x): return tf.keras.layers.Lambda(lambda x: x*tf.

而不是给出sigmoidx=1/1+exp-x的sigmoid 我想要一个激活函数mish,比如mishx=x*tanhsoftplusx 我想把它用作 conv_layer1=Conv3Dfilters=8,kernel_size=3,3,5,activation='mish'input_层像这样

你可能需要这个

# custom activation function 
def mish(x):
    return tf.keras.layers.Lambda(lambda x: x*tf.tanh(tf.math.log(1+tf.exp(x))))(x)

Dense(hidden_units, activation=mish)
你可能需要这个

# custom activation function 
def mish(x):
    return tf.keras.layers.Lambda(lambda x: x*tf.tanh(tf.math.log(1+tf.exp(x))))(x)

Dense(hidden_units, activation=mish)

您只需创建一个函数,并在参数activation下将其作为可调用函数传递给您的模型。下面是函数:

def mish(inputs):
    x = tf.nn.softplus(inputs)
    x = tf.nn.tanh(x)
    x = tf.multiply(x, inputs)
    return x
您可以将其作为激活放在一个层中:

model = tf.keras.Sequential([
            Conv2D(filters=16, kernel_size=(3, 3), strides=(1, 1), 
                   input_shape=(28, 28, 1), activation='relu'),
            MaxPool2D(pool_size=(2, 2)),
            Conv2D(filters=32, kernel_size=(3, 3), strides=(1, 1), 
                   activation='relu'),
            MaxPool2D(pool_size=(2, 2)),
            Flatten(),
            Dense(64, activation=mish), # here
            Dropout(5e-1),
            Dense(10, activation='softmax')
])
以下是培训:

import tensorflow as tf
from tensorflow import keras
import numpy as np

(xtrain, ytrain), (xtest, ytest) = keras.datasets.mnist.load_data()

xtrain = np.float32(xtrain/255)
xtest = np.float32(xtest/255)

ytrain = np.int32(ytrain)
ytest = np.int32(ytest)

def pre_process(inputs, targets):
    inputs = tf.expand_dims(inputs, -1)
    targets = tf.one_hot(targets, depth=10)
    return tf.divide(inputs, 255), targets

train_data = tf.data.Dataset.from_tensor_slices((xtrain, ytrain)).\
    take(10_000).shuffle(10_000).batch(8).map(pre_process)
test_data = tf.data.Dataset.from_tensor_slices((xtest, ytest)).\
    take(1_000).shuffle(1_000).batch(8).map(pre_process)

def mish(inputs):
    x = tf.nn.softplus(inputs)
    x = tf.nn.tanh(x)
    x = tf.multiply(x, inputs)
    return x

model = tf.keras.Sequential([
            tf.keras.layers.Conv2D(filters=16, kernel_size=(3, 3), strides=(1, 1), 
                   input_shape=(28, 28, 1), activation='relu'),
            tf.keras.layers.MaxPool2D(pool_size=(2, 2)),
            tf.keras.layers.Conv2D(filters=32, kernel_size=(3, 3), strides=(1, 1), 
                   activation='relu'),
            tf.keras.layers.MaxPool2D(pool_size=(2, 2)),
            tf.keras.layers.Flatten(),
            tf.keras.layers.Dense(64, activation=mish), 
            tf.keras.layers.Dropout(5e-1),
            tf.keras.layers.Dense(10, activation='softmax')])

model.compile(loss='categorical_crossentropy', optimizer='adam')

history = model.fit(train_data, validation_data=test_data, epochs=10)

您只需创建一个函数,并在参数activation下将其作为可调用函数传递给您的模型。下面是函数:

def mish(inputs):
    x = tf.nn.softplus(inputs)
    x = tf.nn.tanh(x)
    x = tf.multiply(x, inputs)
    return x
您可以将其作为激活放在一个层中:

model = tf.keras.Sequential([
            Conv2D(filters=16, kernel_size=(3, 3), strides=(1, 1), 
                   input_shape=(28, 28, 1), activation='relu'),
            MaxPool2D(pool_size=(2, 2)),
            Conv2D(filters=32, kernel_size=(3, 3), strides=(1, 1), 
                   activation='relu'),
            MaxPool2D(pool_size=(2, 2)),
            Flatten(),
            Dense(64, activation=mish), # here
            Dropout(5e-1),
            Dense(10, activation='softmax')
])
以下是培训:

import tensorflow as tf
from tensorflow import keras
import numpy as np

(xtrain, ytrain), (xtest, ytest) = keras.datasets.mnist.load_data()

xtrain = np.float32(xtrain/255)
xtest = np.float32(xtest/255)

ytrain = np.int32(ytrain)
ytest = np.int32(ytest)

def pre_process(inputs, targets):
    inputs = tf.expand_dims(inputs, -1)
    targets = tf.one_hot(targets, depth=10)
    return tf.divide(inputs, 255), targets

train_data = tf.data.Dataset.from_tensor_slices((xtrain, ytrain)).\
    take(10_000).shuffle(10_000).batch(8).map(pre_process)
test_data = tf.data.Dataset.from_tensor_slices((xtest, ytest)).\
    take(1_000).shuffle(1_000).batch(8).map(pre_process)

def mish(inputs):
    x = tf.nn.softplus(inputs)
    x = tf.nn.tanh(x)
    x = tf.multiply(x, inputs)
    return x

model = tf.keras.Sequential([
            tf.keras.layers.Conv2D(filters=16, kernel_size=(3, 3), strides=(1, 1), 
                   input_shape=(28, 28, 1), activation='relu'),
            tf.keras.layers.MaxPool2D(pool_size=(2, 2)),
            tf.keras.layers.Conv2D(filters=32, kernel_size=(3, 3), strides=(1, 1), 
                   activation='relu'),
            tf.keras.layers.MaxPool2D(pool_size=(2, 2)),
            tf.keras.layers.Flatten(),
            tf.keras.layers.Dense(64, activation=mish), 
            tf.keras.layers.Dropout(5e-1),
            tf.keras.layers.Dense(10, activation='softmax')])

model.compile(loss='categorical_crossentropy', optimizer='adam')

history = model.fit(train_data, validation_data=test_data, epochs=10)

Mish已经在TensorFlow插件中提供。因此,您不需要将其定义为自定义图层。有关更多详细信息,请访问此页面-

Mish已在TensorFlow插件中提供。因此,您不需要将其定义为自定义图层。有关更多详细信息,请访问本页-

我喜欢你的答案的一个方面是全面性。太棒了-非常感谢。我能够测试你的答案,而且它也很有效。我喜欢你答案的一个方面是全面性。太棒了-非常感谢。我能够测试你的答案,它也能工作。