Tensorflow 如何为TFX服务创建模型配置文件?

Tensorflow 如何为TFX服务创建模型配置文件?,tensorflow,tensorflow-serving,tfx,Tensorflow,Tensorflow Serving,Tfx,我保存了一组从tf.keras.layers.Layer继承的自定义模型实例。我想用TFX服务为他们服务,这需要我有一个model\u config文件 我想知道如何根据这本书来创造这个。现在我有下面的代码,我相信这更多的是关于我自己的砖块,而不是我应该做什么 model_server_config = model_server_config_pb2.ModelServerConfig() #Create a config to add to the list of served mo

我保存了一组从
tf.keras.layers.Layer
继承的自定义模型实例。我想用TFX服务为他们服务,这需要我有一个
model\u config
文件

我想知道如何根据这本书来创造这个。现在我有下面的代码,我相信这更多的是关于我自己的砖块,而不是我应该做什么

model_server_config = model_server_config_pb2.ModelServerConfig()

    #Create a config to add to the list of served models
    config_list = model_server_config_pb2.ModelConfigList()       
        
    for i in range(0,len(trainable_unit_name)): # add models one by one to the model config.    
        model_name  = name[i]
        base_path = "/models/{}".format(name[i])
        
        one_config = config_list.config.add()
        
        one_config.name           = model_name
        one_config.base_path      = base_path
        one_config.model_platform ="tensorflow"
        
    model_server_config.model_config_list.MergeFrom(config_list)
    with open(C.CONF_FILEPATH, 'w+') as f:        
        f.write("model_config_list {" + config_list.__str__() + "}") #manually wrap it around "model_config_list { .." because this is the required format by TFX Serving.

链接中提供了有关Tensorflow服务配置的完整信息

您的问题的答案,即如何为
服务
创建
模型配置文件
,可在此找到

有关PB文件的更多信息,请参阅此

提供下面的
Github代码
,以防上面的代码不起作用:

syntax = "proto3";

package tensorflow.serving;
option cc_enable_arenas = true;

import "google/protobuf/any.proto";
import "tensorflow_serving/config/logging_config.proto";
import "tensorflow_serving/sources/storage_path/file_system_storage_path_source.proto";

// The type of model.    
enum ModelType {
  MODEL_TYPE_UNSPECIFIED = 0 [deprecated = true];
  TENSORFLOW = 1 [deprecated = true];
  OTHER = 2 [deprecated = true];
};

// Common configuration for loading a model being served.
message ModelConfig {
  // Name of the model.
  string name = 1;
      
  repeated string alias = 9;

  string base_path = 2;

  // Type of model.
  // TODO(b/31336131): DEPRECATED. Please use 'model_platform' instead.
  ModelType model_type = 3 [deprecated = true];

  // Type of model (e.g. "tensorflow").
  //
  // (This cannot be changed once a model is in serving.)
  string model_platform = 4;

  reserved 5;

  // Version policy for the model indicating which version(s) of the model to
  // load and make available for serving simultaneously.
  // The default option is to serve only the latest version of the model.
  //
  // (This can be changed once a model is in serving.)
  FileSystemStoragePathSourceConfig.ServableVersionPolicy model_version_policy =
      7;

  // String labels to associate with versions of the model, allowing inference
  // queries to refer to versions by label instead of number. Multiple labels
  // can map to the same version, but not vice-versa.
  map<string, int64> version_labels = 8;

  // Configures logging requests and responses, to the model.
  //
  // (This can be changed once a model is in serving.)
  LoggingConfig logging_config = 6;
}

// Static list of models to be loaded for serving.
message ModelConfigList {
  repeated ModelConfig config = 1;
}

// ModelServer config.
message ModelServerConfig {
  // ModelServer takes either a static file-based model config list or an Any
  // proto representing custom model config that is fetched dynamically at
  // runtime (through network RPC, custom service, etc.).
  oneof config {
    ModelConfigList model_config_list = 1;
    google.protobuf.Any custom_model_config = 2;
  }
}
syntax=“proto3”;
包装tensorflow.service;
选项cc_enable_arenas=true;
导入“google/protobuf/any.proto”;
导入“tensorflow_serving/config/logging_config.proto”;
导入“tensorflow\u服务/sources/storage\u path/file\u system\u storage\u path\u source.proto”;
//模型的类型。
枚举模型类型{
模型_类型_未指定=0[已弃用=true];
TENSORFLOW=1[已弃用=true];
其他=2[已弃用=真];
};
//加载正在服务的模型的通用配置。
消息模型配置{
//模型的名称。
字符串名称=1;
重复字符串别名=9;
字符串基路径=2;
//模型类型。
//TODO(b/31336131):已弃用。请改用“model_platform”。
ModelType model_type=3[已弃用=true];
//模型类型(例如“tensorflow”)。
//
//(一旦模型投入使用,则无法更改此选项。)
串模型_平台=4;
保留5个;
//模型的版本策略,指示要更新的模型版本
//加载并同时提供服务。
//默认选项是仅提供模型的最新版本。
//
//(这可以在模型投入使用后更改。)
FileSystemStoragePathSourceConfig.ServableVersionPolicy模型\u版本\u策略=
7.
//与模型版本关联的字符串标签,允许推断
//按标签而不是数字引用版本的查询。多个标签
//可以映射到同一版本,但不能映射到同一版本。
地图版本_标签=8;
//配置对模型的日志记录请求和响应。
//
//(这可以在模型投入使用后更改。)
LoggingConfig logging_config=6;
}
//要加载以提供服务的模型的静态列表。
消息模型配置列表{
重复的ModelConfig=1;
}
//ModelServer配置。
消息模型服务器配置{
//ModelServer采用基于静态文件的模型配置列表或任意
//proto表示在上动态获取的自定义模型配置
//运行时(通过网络RPC、自定义服务等)。
配置之一{
模型配置列表模型配置列表=1;
google.protobuf.Any自定义_model_config=2;
}
}