Python Keras+;Tensorflow模型优化:TypeError:clone_Model()获得意外的关键字参数';克隆功能';

Python Keras+;Tensorflow模型优化:TypeError:clone_Model()获得意外的关键字参数';克隆功能';,python,tensorflow,keras,tensorflow-model-analysis,Python,Tensorflow,Keras,Tensorflow Model Analysis,我正在尝试Tensorflow模型优化,以便修剪一个简单的神经网络。这是我的密码: from __future__ import absolute_import, division, print_function, unicode_literals, unicode_literals import tensorflow as tf from tensorflow import keras import numpy as np import matplotlib.pyplot as plt fa

我正在尝试Tensorflow模型优化,以便修剪一个简单的神经网络。这是我的密码:

from __future__ import absolute_import, division, print_function, unicode_literals, unicode_literals
import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib.pyplot as plt

fashion_mnist = keras.datasets.fashion_mnist

(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()

class_names = ['T-shirt/top', 'Trouser', 'Pullover', 'Dress', 'Coat',
               'Sandal', 'Shirt', 'Sneaker', 'Bag', 'Ankle boot']

train_images = train_images / 255.0
test_images = test_images / 255.0

model = keras.Sequential([
    keras.layers.Flatten(input_shape=(28, 28)),
    keras.layers.Dense(128, activation=tf.nn.relu),
    keras.layers.Dense(10, activation=tf.nn.softmax)
])

import tensorflow_model_optimization as tfmot


pruning_schedule = tfmot.sparsity.keras.PolynomialDecay(
                        initial_sparsity=0.0, final_sparsity=0.5,
                        begin_step=2000, end_step=4000)

model_for_pruning = tfmot.sparsity.keras.prune_low_magnitude(model, pruning_schedule=pruning_schedule)


model_for_pruning.compile(optimizer='adam',
          loss='sparse_categorical_crossentropy',
          metrics=['accuracy'])

from tensorflow.keras.callbacks import TensorBoard


tensorboard=TensorBoard(log_dir='D:\Python\logs', histogram_freq=0,  
          write_graph=True, write_images=True)

model_for_pruning.fit(train_images, train_labels, epochs=5,callbacks=tensorboard)


#tensorboard --logdir D:\Python\logs 
我得到以下错误:

File "<ipython-input-1-8f75575649d2>", line 52, in <module>
    model_for_pruning = tfmot.sparsity.keras.prune_low_magnitude(model, pruning_schedule=pruning_schedule)

  File "C:\Users\Rubens\Anaconda3\lib\site-packages\tensorflow_model_optimization\python\core\sparsity\keras\prune.py", line 152, in prune_low_magnitude
    to_prune, input_tensors=None, clone_function=_add_pruning_wrapper)

TypeError: clone_model() got an unexpected keyword argument 'clone_function'
这是文件
prune.py
的结尾,属于Tensorflow模型优化(注意
clone\u函数=\u strip\u pruning\u wrapper
):

包括的所有库都是最新的。关于如何克服这个错误有什么想法吗


提前谢谢

我找到了答案。有一个棘手的解决办法:除了将代码修复为:

from tensorflow_model_optimization.sparsity import keras as sparsity

pruning_params = {
      'pruning_schedule': sparsity.PolynomialDecay(initial_sparsity=0.50,
                                                   final_sparsity=0.90,
                                                   begin_step=3,
                                                   end_step=end_step,
                                                   frequency=100)
}

pruned_model = tf.keras.Sequential([
    sparsity.prune_low_magnitude(
        l.Conv2D(32, 5, padding='same', activation='relu'),
        input_shape=input_shape,
        **pruning_params),
    l.MaxPooling2D((2, 2), (2, 2), padding='same'),
    l.BatchNormalization(),
    sparsity.prune_low_magnitude(
        l.Conv2D(64, 5, padding='same', activation='relu'), **pruning_params),
    l.MaxPooling2D((2, 2), (2, 2), padding='same'),
    l.Flatten(),
    sparsity.prune_low_magnitude(l.Dense(1024, activation='relu'),
                                 **pruning_params),
    l.Dropout(0.4),
    sparsity.prune_low_magnitude(l.Dense(num_classes, activation='softmax'),
                                 **pruning_params)
])
。。。我必须重新启动Jupyter内核以消除进一步的错误,例如
Conv2D没有属性“kernel”
,如GitHub所示:

  def _strip_pruning_wrapper(layer):
    if isinstance(layer, pruning_wrapper.PruneLowMagnitude):
      # The _batch_input_shape attribute in the first layer makes a Sequential
      # model to be built. This makes sure that when we remove the wrapper from
      # the first layer the model's built state preserves.
      if not hasattr(layer.layer, '_batch_input_shape') and hasattr(
          layer, '_batch_input_shape'):
        layer.layer._batch_input_shape = layer._batch_input_shape
      return layer.layer
    return layer

  return keras.models.clone_model(
      model, input_tensors=None, clone_function=_strip_pruning_wrapper)
from tensorflow_model_optimization.sparsity import keras as sparsity

pruning_params = {
      'pruning_schedule': sparsity.PolynomialDecay(initial_sparsity=0.50,
                                                   final_sparsity=0.90,
                                                   begin_step=3,
                                                   end_step=end_step,
                                                   frequency=100)
}

pruned_model = tf.keras.Sequential([
    sparsity.prune_low_magnitude(
        l.Conv2D(32, 5, padding='same', activation='relu'),
        input_shape=input_shape,
        **pruning_params),
    l.MaxPooling2D((2, 2), (2, 2), padding='same'),
    l.BatchNormalization(),
    sparsity.prune_low_magnitude(
        l.Conv2D(64, 5, padding='same', activation='relu'), **pruning_params),
    l.MaxPooling2D((2, 2), (2, 2), padding='same'),
    l.Flatten(),
    sparsity.prune_low_magnitude(l.Dense(1024, activation='relu'),
                                 **pruning_params),
    l.Dropout(0.4),
    sparsity.prune_low_magnitude(l.Dense(num_classes, activation='softmax'),
                                 **pruning_params)
])