Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/python/339.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Python 如何在流程之间正确共享Manager指令_Python_Dictionary_Python Multiprocessing_Multiprocessing Manager - Fatal编程技术网

Python 如何在流程之间正确共享Manager指令

Python 如何在流程之间正确共享Manager指令,python,dictionary,python-multiprocessing,multiprocessing-manager,Python,Dictionary,Python Multiprocessing,Multiprocessing Manager,我想做的是在Process的子类之间共享一个字典,当一个进程更新字典时,另一个进程被通知使用它。下面的代码说明了这一点,其中MyProducer开始填充字典,并且在每次迭代中触发一个事件来通知MyConsumer处理字典。除了MyConsumer中的字典为空的部分之外,其他部分都可以正常工作 from multiprocessing import Process, Manager, Event class MyProducer(Process): increment = 0 d

我想做的是在
Process
的子类之间共享一个字典,当一个进程更新字典时,另一个进程被通知使用它。下面的代码说明了这一点,其中
MyProducer
开始填充字典,并且在每次迭代中触发一个事件来通知
MyConsumer
处理字典。除了
MyConsumer
中的字典为空的部分之外,其他部分都可以正常工作

from multiprocessing import Process, Manager, Event

class MyProducer(Process):
    increment = 0
    def __init__(self, dictionary, event):
        Process.__init__(self)
        self.dictionary = dictionary
        self.event = event
    
    def run(self):
        while self.increment < 20:
            self.dictionary[self.increment]=self.increment+10
            self.increment = self.increment + 1
            print("From producer: ", self.dictionary)
            self.event.set()
            while self.event.is_set() is True:
                increment = self.increment
                increment = increment + 1
        
class MyConsumer(Process):
    def __init__(self, dictionary, event):
        Process.__init__(self)
        self.dictionary = dictionary
        self.event = event
        
    
    def run(self):
        while True:
            self.event.wait()
            print("From consumer: ", self.dictionary)
            self.event.clear()
            

            
if __name__ == "__main__":

    with Manager() as manager:
        state_dict = manager.dict()
        state_ready = Event()
        producerprocess = MyProducer(state_dict, state_ready)
        consumerprocess = MyConsumer(state_dict, state_ready)
        producerprocess.start()
        consumerprocess.start()    
来自多处理导入进程、管理器、事件的

类别MyProducer(过程):
增量=0
定义初始化(自我、字典、事件):
进程。初始化(自)
self.dictionary=字典
self.event=事件
def运行(自):
当自增量<20时:
self.dictionary[self.increment]=self.increment+10
自增量=自增量+1
打印(“来自制作人:”,self.dictionary)
self.event.set()
self.event.is_set()为True时:
增量=自增量
增量=增量+1
类消费者(流程):
定义初始化(自我、字典、事件):
进程。初始化(自)
self.dictionary=字典
self.event=事件
def运行(自):
尽管如此:
self.event.wait()
打印(“来自消费者:”,self.dictionary)
self.event.clear()
如果名称=“\uuuuu main\uuuuuuuu”:
使用Manager()作为管理器:
state_dict=manager.dict()
state_ready=Event()
producerprocess=MyProducer(state\u dict,state\u ready)
consumerprocess=MyConsumer(state\u dict,state\u ready)
producerprocess.start()
consumerprocess.start()用户进程
输出是

Process MyProducer-2:
Traceback (most recent call last):
  File "/usr/lib/python3.8/multiprocessing/managers.py", line 827, in _callmethod
    conn = self._tls.connection
AttributeError: 'ForkAwareLocal' object has no attribute 'connection'

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/usr/lib/python3.8/multiprocessing/process.py", line 315, in _bootstrap
    self.run()
  File "main.py", line 13, in run
    self.dictionary[self.increment]=self.increment+10
  File "<string>", line 2, in __setitem__
  File "/usr/lib/python3.8/multiprocessing/managers.py", line 831, in _callmethod
    self._connect()
  File "/usr/lib/python3.8/multiprocessing/managers.py", line 818, in _connect
    conn = self._Client(self._token.address, authkey=self._authkey)
  File "/usr/lib/python3.8/multiprocessing/connection.py", line 502, in Client
    c = SocketClient(address)
  File "/usr/lib/python3.8/multiprocessing/connection.py", line 630, in SocketClient
    s.connect(address)
FileNotFoundError: [Errno 2] No such file or directory

处理MyProducer-2:
回溯(最近一次呼叫最后一次):
文件“/usr/lib/python3.8/multiprocessing/managers.py”,第827行,在调用方法中
连接=自连接
AttributeError:“ForkAwareLocal”对象没有属性“connection”
在处理上述异常期间,发生了另一个异常:
回溯(最近一次呼叫最后一次):
文件“/usr/lib/python3.8/multiprocessing/process.py”,第315行,在引导程序中
self.run()
文件“main.py”,第13行,在运行中
self.dictionary[self.increment]=self.increment+10
文件“”,第2行,在集合项中__
文件“/usr/lib/python3.8/multiprocessing/managers.py”,第831行,在调用方法中
self._connect()
文件“/usr/lib/python3.8/multiprocessing/managers.py”,第818行,在_connect中
conn=self.\u客户端(self.\u token.address,authkey=self.\u authkey)
客户端中第502行的文件“/usr/lib/python3.8/multiprocessing/connection.py”
c=SocketClient(地址)
SocketClient中的文件“/usr/lib/python3.8/multiprocessing/connection.py”,第630行
s、 连接(地址)
FileNotFoundError:[Errno 2]没有这样的文件或目录
更新 我的目的是理解为什么字典不能处理流程子类。我知道你能在网上找到的所有有效案例。实际上我有一个很好的解决方案,用queue替换dict,我想了解dict为什么不工作

from multiprocessing import Process, Queue, Event

class MyProducer(Process):
    increment = 0
    def __init__(self, queue, event):
        Process.__init__(self)
        self.queue = queue
        self.event = event
    
    def run(self):
        while self.increment < 20:
            self.queue.put([self.increment,self.increment+10])
            self.increment = self.increment + 1
            print("From producer: ", self.queue.qsize())
            self.event.set()
            while self.event.is_set() is True:
                increment = self.increment
                increment = increment + 1
        
class MyConsumer(Process):
    def __init__(self, queue, event):
        Process.__init__(self)
        self.queue = queue
        self.event = event
        
    def run(self):
        while True:
            self.event.wait()
            print("From consumer: ", self.queue.qsize())
            self.event.clear()
            

if __name__ == "__main__":
  state_queue = Queue()
  state_ready = Event()
  producerprocess = MyProducer(state_queue, state_ready)
  consumerprocess = MyConsumer(state_queue, state_ready)
  producerprocess.start()
  consumerprocess.start()  

来自多处理导入进程、队列、事件的

类别MyProducer(过程):
增量=0
定义初始化(自我、队列、事件):
进程。初始化(自)
self.queue=队列
self.event=事件
def运行(自):
当自增量<20时:
self.queue.put([self.increment,self.increment+10])
自增量=自增量+1
打印(“来自生产者:,self.queue.qsize())
self.event.set()
self.event.is_set()为True时:
增量=自增量
增量=增量+1
类消费者(流程):
定义初始化(自我、队列、事件):
进程。初始化(自)
self.queue=队列
self.event=事件
def运行(自):
尽管如此:
self.event.wait()
打印(“来自消费者:,self.queue.qsize())
self.event.clear()
如果名称=“\uuuuu main\uuuuuuuu”:
state_queue=queue()
state_ready=Event()
producerprocess=MyProducer(状态为队列,状态为就绪)
consumerprocess=MyConsumer(状态\队列,状态\就绪)
producerprocess.start()
consumerprocess.start()用户进程

仅供参考,我看到这个更简单的程序也会导致类似的死亡:

from multiprocessing import Process, Manager, Event

class MyProducer(Process):
    def __init__(self, value, event):
        Process.__init__(self)
        self.val = value
        self.event = event

    def run(self):
        print("at producer start", self.val.value)
        self.val.value = 42
        self.event.set()

class MyConsumer(Process):
    def __init__(self, value, event):
        Process.__init__(self)
        self.val = value
        self.event = event

    def run(self):
        self.event.wait()
        print("From consumer: ", self.val.value)
                        
if __name__ == "__main__":
    with Manager() as manager:
        state_value = manager.Value('i', 666)
        state_ready = Event()
        producerprocess = MyProducer(state_value, state_ready)
        consumerprocess = MyConsumer(state_value, state_ready)
        producerprocess.start()
        consumerprocess.start()
这意味着,从
管理器
获取的任何类型的对象,如果作为属性附加到对象,mp必须在工作进程中“通过魔法”构造,则都不会有效地重构。连接到
管理器
服务器进程所需的信息似乎丢失了(无论是Linux-y系统上的套接字还是Windows上的命名管道)

您可以提交一个bug报告,但在此之前除了重写代码以不使用管理器或将管理器对象显式地传递给函数之外,没有什么可以做的

一个bug报告可以有两种解决方案:(1)使它“工作”;或者,(2)更改代码以在尝试创建此类对象时引发异常

另一种可能性(未尝试):如果您仅在Linux上运行,您可以跳过
\uuuu name\uuuu==“\uuuuu main\uuuuu”
测试,并希望
管理器
连接信息能够存活
fork()

编辑 我在Python项目的跟踪器上打开了一个问题,在这里:

变通办法 在处理Python问题报告中的内容时,这里的“问题”似乎不是关于如何设置的问题,而是在您的代码中忽略了干净地关闭工作人员的需要。只需在代码末尾添加这一行(您关心的
dict
版本):

这就足够了,现在在我的盒子上(Win10Python3.8.5),它会产生您期望的输出。然而,它挂在前面
    producerprocess.join()
import multiprocessing as mp

P, C, F = 1, 2, 4 # bit flags for state values

# Unusual synchronization appears to be wanted here:
# After a producer makes a mutation, it must not make another
# before the consumer acts on it.  So we'll say we're in state
# P when the producer is allowed to mutate, and in state C
# when there's a mutation for the consumer to process.  Another
# state - F (for "finished") - tells the consumer it's time to
# quit. The producer stops on its own when it gets tired of
# mutating ;-)
class State:
    def __init__(self):
        # Initial state is empty - everyone is blocked.
        # Note that we do our own locking around the shared
        # memory, via the condition variable's mutex, so
        # it would be pure waste for the Value to have
        # its own lock too.
        self.state = mp.Value('B', 0, lock=False)
        self.changed = mp.Condition()

    # Wait for state to change to one of the states in the
    # flag mask `what`.  Return the bit flag of the state
    # that succeeded.
    def waitfor(self, what):
        with self.changed:
            while not (self.state.value & what):
                self.changed.wait()
            return self.state.value

    # Force state to (bit flag) `what`, and notify waiters
    # to wake up and see whether it's the state they're
    # waiting for.
    def setwhat(self, what):
        with self.changed:
            self.state.value = what
            self.changed.notify_all()

class Base(mp.Process):
    def __init__(self, dictionary, state):
        super().__init__()
        self.dictionary = dictionary
        self.state = state

class MyProducer(Base):
    def __init__(self, *args):
        super().__init__(*args)
        self.increment = 0

    def run(self):
        while self.increment < 20:
            self.state.waitfor(P)
            self.dictionary[self.increment] = self.increment + 10
            self.state.setwhat(C)
            # Whether the producer or the consumer prints the dict
            # first isn't forced - and, indeed, they can both print at
            # the same time, producing garbled output.  Move the
            # print() above the setwhat(C) to force the producer
            # to print first, if desired.
            print("From producer: ", self.dictionary)
            self.increment += 1

class MyConsumer(Base):
    def run(self):
        while self.state.waitfor(C | F) != F:
            print("From consumer: ", self.dictionary)
            self.state.setwhat(P)

def main():
    with mp.Manager() as manager:
        state_dict = manager.dict()
        state_state = State()
        producerprocess = MyProducer(state_dict, state_state)
        consumerprocess = MyConsumer(state_dict, state_state)
        producerprocess.start()
        consumerprocess.start()

        # The producer is blocked waiting for state P, and the
        # consumer is blocked waiting for state C (or F). The
        # loop here counts down 5 seconds, so you can verify
        # by eyeball that the waits aren't "busy" (they consume
        # essentially no CPU cycles).
        import time
        for i in reversed(range(5)):
            time.sleep(1)
            print(i)

        state_state.setwhat(P) # tell the producer to start!
        producerprocess.join() # and wait for it to finish
        # wait for the consumer to finish eating the last mutation
        state_state.waitfor(P)
        # tell the consumer we're all done
        state_state.setwhat(F)
        consumerprocess.join()    

if __name__ == "__main__":
    main()