Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/python/301.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Python 不成功的TensorSliceReader构造函数_Python_Tensorflow - Fatal编程技术网

Python 不成功的TensorSliceReader构造函数

Python 不成功的TensorSliceReader构造函数,python,tensorflow,Python,Tensorflow,我正在尝试为一个学校项目制作一个使用Python的聊天机器人。我是根据以下视频制作的:。我在尝试运行代码时遇到错误。以下是我收到的错误消息: Traceback (most recent call last): File "main.py", line 88, in <module> model.load(".\model.tflearn") File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib

我正在尝试为一个学校项目制作一个使用Python的聊天机器人。我是根据以下视频制作的:。我在尝试运行代码时遇到错误。以下是我收到的错误消息:

Traceback (most recent call last):
  File "main.py", line 88, in <module>
    model.load(".\model.tflearn")
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\models\dnn.py", line 308, in load
    self.trainer.restore(model_file, weights_only, **optargs)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\helpers\trainer.py", line 490, in restore
    self.restorer.restore(self.session, model_file)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1686, in restore
    {self.saver_def.filename_tensor_name: save_path})
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 895, in run
    run_metadata_ptr)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 1128, in _run
    feed_dict_tensor, options, run_metadata)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 1344, in _do_run
    options, run_metadata)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\client\session.py", line 1363, in _do_call
    raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.NotFoundError: Unsuccessful TensorSliceReader constructor: Failed to find any matching files for C:\Users\Sachu\Documents\chatbot\model.tflearn
         [[Node: save_1/RestoreV2_2 = RestoreV2[dtypes=[DT_FLOAT], _device="/job:localhost/replica:0/task:0/device:CPU:0"](_arg_save_1/Const_0_0, save_1/RestoreV2_2/tensor_names, save_1/RestoreV2_2/shape_and_slices)]]

Caused by op 'save_1/RestoreV2_2', defined at:
  File "main.py", line 80, in <module>
    model = tflearn.DNN(net)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\models\dnn.py", line 65, in __init__
    best_val_accuracy=best_val_accuracy)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tflearn\helpers\trainer.py", line 147, in __init__
    allow_empty=True)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1239, in __init__
    self.build()
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1248, in build
    self._build(self._filename, build_save=True, build_restore=True)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 1284, in _build
    build_save=build_save, build_restore=build_restore)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 765, in _build_internal
    restore_sequentially, reshape)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 428, in _AddRestoreOps
    tensors = self.restore_op(filename_tensor, saveable, preferred_shard)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\training\saver.py", line 268, in restore_op
    [spec.tensor.dtype])[0])
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\ops\gen_io_ops.py", line 1113, in restore_v2
    shape_and_slices=shape_and_slices, dtypes=dtypes, name=name)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\framework\op_def_library.py", line 787, in _apply_op_helper
    op_def=op_def)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\framework\ops.py", line 3160, in create_op
    op_def=op_def)
  File "C:\Users\Sachu\AppData\Local\Programs\Python\Python36\lib\site-packages\tensorflow\python\framework\ops.py", line 1625, in __init__
    self._traceback = self._graph._extract_stack()  # pylint: disable=protected-access

NotFoundError (see above for traceback): Unsuccessful TensorSliceReader constructor: Failed to find any matching files for C:\Users\Sachu\Documents\chatbot\model.tflearn
         [[Node: save_1/RestoreV2_2 = RestoreV2[dtypes=[DT_FLOAT], _device="/job:localhost/replica:0/task:0/device:CPU:0"](_arg_save_1/Const_0_0, save_1/RestoreV2_2/tensor_names, save_1/RestoreV2_2/shape_and_slices)]]

请帮我做这个。提前谢谢你

这个文件存在吗?C:\Users\Sachu\Documents\chatbot\model.tflearnI已选中,但它不在那里。这更有意义,谢谢,但是您知道如何获取model.tflearn文件吗?教程的第3部分是关于训练NN并根据结果构建model.tflearn文件。。。
import nltk
from nltk.stem.lancaster import LancasterStemmer
stemmer = LancasterStemmer()

import numpy
import tflearn
import tensorflow
import random
import json
import pickle

nltk.download('punkt')

with open("intents.json") as file:
    data = json.load(file)

try:
    with open("data.pickle", "wb") as f:
        words, labels, training, output = pickle.load(f)
except:
    words = []
    labels = []
    docs_x = []
    docs_y = []

    for intent in data["intents"]:
        for pattern in intent["patterns"]:
            wrds = nltk.word_tokenize(pattern)
            words.extend(wrds)
            docs_x.append(wrds)
            docs_y.append(intent["tag"])

        if intent["tag"] not in labels:
            labels.append(intent["tag"])

    words = [stemmer.stem(w.lower()) for w in words if w != "?"]
    words = sorted(list(set(words)))

    labels = sorted(labels)

    training = []
    output = []

    out_empty = [0 for _ in range(len(labels))]

    for x, doc in enumerate(docs_x):
        bag = []

        wrds = [stemmer.stem(w.lower()) for w in doc]

        for w in words:
            if w in wrds:
                bag.append(1)
            else:
                bag.append(0)

        output_row = out_empty[:]
        output_row[labels.index(docs_y[x])] = 1

        training.append(bag)
        output.append(output_row)


    training = numpy.array(training)
    output = numpy.array(output)

    with open("data.pickle", "wb") as f:
        pickle.dump((words, labels, training, output), f)

tensorflow.reset_default_graph()

net = tflearn.input_data(shape=[None, len(training[0])])
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, 8)
net = tflearn.fully_connected(net, len(output[0]), activation="softmax")
net = tflearn.regression(net)

model = tflearn.DNN(net)
'''
try:
    model.load("model.tflearn")
except:
    model.fit(training, output, n_epoch=1000, batch_size=8, show_metric=True)
    model.save("model.tflearn")
'''
model.load(".\model.tflearn")
model.fit(training, output, n_epoch=1000, batch_size=8, show_metric=True)
model.save("model.tflearn")

def bag_of_words(s, words):
    bag = [0 for _ in range(len(words))]

    s_words = nltk.word_tokenize(s)
    s_words = [stemmer.stem(word.lower()) for word in s_words]

    for se in s_words:
        for i, w in enumerate(words):
            if w == se:
                bag[i] = 1

    return numpy.array(bag)


def chat():
    print("Start talking with the bot (type quit to stop)!")
    while True:
        inp = input("You: ")
        if inp.lower() == "quit":
            break

        results = model.predict([bag_of_words(inp, words)])
        results_index = numpy.argmax(results)
        tag = labels[results_index]

        for tg in data["intents"]:
            if tg['tag'] == tag:
                responses = tg['responses']

        print(random.choice(responses))

chat()