Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/tensorflow/5.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Python 无法使用tensorflow 1.15保存universal lite的tensorflow hub模型_Python_Tensorflow_Tensorflow Hub - Fatal编程技术网

Python 无法使用tensorflow 1.15保存universal lite的tensorflow hub模型

Python 无法使用tensorflow 1.15保存universal lite的tensorflow hub模型,python,tensorflow,tensorflow-hub,Python,Tensorflow,Tensorflow Hub,使用universal lite 2()的集线器模型训练数据后,我无法保存模型 我已将模型保存到本地 path ="C:/Users/771556/Downloads/ModelLiteUSE" #url ="https://tfhub.dev/google/universal-sentence-encoder-lite/2" liteModule = hub.Module(path) input_placeholde

使用universal lite 2()的集线器模型训练数据后,我无法保存模型

我已将模型保存到本地

path ="C:/Users/771556/Downloads/ModelLiteUSE"  #url ="https://tfhub.dev/google/universal-sentence-encoder-lite/2"  liteModule = hub.Module(path)

input_placeholder = tf.sparse_placeholder(tf.int64, shape=[None, None])  encodings = liteModule(  inputs=dict(  values=input_placeholder.values,  indices=input_placeholder.indices,  dense_shape=input_placeholder.dense_shape))

with tf.Session() as sess:  spm_path = sess.run(liteModule(signature="spm_path"))

sp = spm.SentencePieceProcessor()  sp.Load(spm_path)  print("SentencePiece model loaded at {}.".format(spm_path))

def process_to_IDs_in_sparse_format(sp, sentences):    ids = [sp.EncodeAsIds(x) for x in sentences]  max_len = max(len(x) for x in ids)  dense_shape=(len(ids), max_len)  values=[item for sublist in ids for item in sublist]  indices=[[row,col] for row in range(len(ids)) for col in range(len(ids[row]))]  return (values, indices, dense_shape)

def embed(input):  values, indices, dense_shape = process_to_IDs_in_sparse_format(sp, input)  # Reduce logging output.  logging.set_verbosity(logging.ERROR)  with tf.Session() as session:  session.run([tf.global_variables_initializer(), tf.tables_initializer()])  message_embeddings = session.run(  encodings,  feed_dict={input_placeholder.values: values,  input_placeholder.indices: indices,  input_placeholder.dense_shape: dense_shape})  return message_embeddings  
tf.saved_model.save(TrainModel,'D:/liteTrainmodel')
训练数据

TrainModel= embed(file_data.text)
保存模式

path ="C:/Users/771556/Downloads/ModelLiteUSE"  #url ="https://tfhub.dev/google/universal-sentence-encoder-lite/2"  liteModule = hub.Module(path)

input_placeholder = tf.sparse_placeholder(tf.int64, shape=[None, None])  encodings = liteModule(  inputs=dict(  values=input_placeholder.values,  indices=input_placeholder.indices,  dense_shape=input_placeholder.dense_shape))

with tf.Session() as sess:  spm_path = sess.run(liteModule(signature="spm_path"))

sp = spm.SentencePieceProcessor()  sp.Load(spm_path)  print("SentencePiece model loaded at {}.".format(spm_path))

def process_to_IDs_in_sparse_format(sp, sentences):    ids = [sp.EncodeAsIds(x) for x in sentences]  max_len = max(len(x) for x in ids)  dense_shape=(len(ids), max_len)  values=[item for sublist in ids for item in sublist]  indices=[[row,col] for row in range(len(ids)) for col in range(len(ids[row]))]  return (values, indices, dense_shape)

def embed(input):  values, indices, dense_shape = process_to_IDs_in_sparse_format(sp, input)  # Reduce logging output.  logging.set_verbosity(logging.ERROR)  with tf.Session() as session:  session.run([tf.global_variables_initializer(), tf.tables_initializer()])  message_embeddings = session.run(  encodings,  feed_dict={input_placeholder.values: values,  input_placeholder.indices: indices,  input_placeholder.dense_shape: dense_shape})  return message_embeddings  
tf.saved_model.save(TrainModel,'D:/liteTrainmodel')
错误


请正确设置您的格式。如何设置,如果您知道,请共享代码。请查找Colab url:@ZayedRais,我已经查看了您的Colab,发现没有这样的模型。您可以定义一个模型,然后尝试保存它。如果不想定义模型。你能解释一下你到底想达到什么目的吗?这样我们就可以帮助你了?谢谢