Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/python/301.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
使用Python时,多处理无法将LDAP对象共享给子进程_Python_Multiprocessing_Python Ldap_Python Multiprocessing - Fatal编程技术网

使用Python时,多处理无法将LDAP对象共享给子进程

使用Python时,多处理无法将LDAP对象共享给子进程,python,multiprocessing,python-ldap,python-multiprocessing,Python,Multiprocessing,Python Ldap,Python Multiprocessing,我将有一个子进程发送LDAP查询,另一个子进程检索响应。 在两个进程之间共享LDAP对象存在问题。有人能告诉我一个解决办法吗 import ldap from multiprocessing import Process def send_ldap(ldap_conn): print ldap_conn def receive_ldap(ldap_conn): print ldap_conn def main(): ldap_conn = ldap.initial

我将有一个子进程发送LDAP查询,另一个子进程检索响应。 在两个进程之间共享LDAP对象存在问题。有人能告诉我一个解决办法吗

import ldap
from multiprocessing import Process


def send_ldap(ldap_conn):
    print ldap_conn

def receive_ldap(ldap_conn):
    print ldap_conn

def main():
    ldap_conn = ldap.initialize('ldap://abc:12345')
    ldap_sender = Process(target=send_ldap, args=(ldap_conn,))
    ldap_receiver = Process(target=receive_ldap, args=(ldap_conn,))
    ldap_sender.start()


if __name__ == '__main__':
    main()
pickle模块出现错误:

    Traceback (most recent call last):
  File "t.py", line 22, in <module>
    main()
  File "t.py", line 16, in main
    ldap_sender.start()
  File "c:\python27\lib\multiprocessing\process.py", line 130, in start
    self._popen = Popen(self)
  File "c:\python27\lib\multiprocessing\forking.py", line 277, in __init__
    dump(process_obj, to_child, HIGHEST_PROTOCOL)
  File "c:\python27\lib\multiprocessing\forking.py", line 199, in dump
    ForkingPickler(file, protocol).dump(obj)
  File "c:\python27\lib\pickle.py", line 224, in dump
    self.save(obj)
  File "c:\python27\lib\pickle.py", line 331, in save
    self.save_reduce(obj=obj, *rv)
  File "c:\python27\lib\pickle.py", line 419, in save_reduce
    save(state)
  File "c:\python27\lib\pickle.py", line 286, in save
    f(self, obj) # Call unbound method with explicit self
  File "c:\python27\lib\pickle.py", line 649, in save_dict
    self._batch_setitems(obj.iteritems())
  File "c:\python27\lib\pickle.py", line 681, in _batch_setitems
    save(v)
  File "c:\python27\lib\pickle.py", line 286, in save
    f(self, obj) # Call unbound method with explicit self
  File "c:\python27\lib\pickle.py", line 548, in save_tuple
    save(element)
  File "c:\python27\lib\pickle.py", line 286, in save
    f(self, obj) # Call unbound method with explicit self
  File "c:\python27\lib\pickle.py", line 725, in save_inst
    save(stuff)
  File "c:\python27\lib\pickle.py", line 286, in save
    f(self, obj) # Call unbound method with explicit self
  File "c:\python27\lib\pickle.py", line 649, in save_dict
    self._batch_setitems(obj.iteritems())
  File "c:\python27\lib\pickle.py", line 681, in _batch_setitems
    save(v)
  File "c:\python27\lib\pickle.py", line 286, in save
    f(self, obj) # Call unbound method with explicit self
  File "c:\python27\lib\pickle.py", line 725, in save_inst
    save(stuff)
  File "c:\python27\lib\pickle.py", line 286, in save
    f(self, obj) # Call unbound method with explicit self
  File "c:\python27\lib\pickle.py", line 649, in save_dict
    self._batch_setitems(obj.iteritems())
  File "c:\python27\lib\pickle.py", line 681, in _batch_setitems
    save(v)
  File "c:\python27\lib\pickle.py", line 331, in save
    self.save_reduce(obj=obj, *rv)
Traceback (most recent call last):
  File "c:\python27\lib\pickle.py", line 396, in save_reduce
  File "<string>", line 1, in <module>
      File "c:\python27\lib\multiprocessing\forking.py", line 381, in main
save(cls)
      File "c:\python27\lib\pickle.py", line 286, in save
self = load(from_parent)
      File "c:\python27\lib\pickle.py", line 1378, in load
f(self, obj) # Call unbound method with explicit self
  File "c:\python27\lib\pickle.py", line 748, in save_global
    return Unpickler(file).load()
  File "c:\python27\lib\pickle.py", line 858, in load
    (obj, module, name))
pickle.PicklingError: Can't pickle <type 'thread.lock'>: it's not found as thread.lock
dispatch[key](self)
  File "c:\python27\lib\pickle.py", line 880, in load_eof
    raise EOFError
EOFError
回溯(最近一次呼叫最后一次):
文件“t.py”,第22行,在
main()
文件“t.py”,第16行,主
ldap_sender.start()
文件“c:\python27\lib\multiprocessing\process.py”,第130行,在开始处
self.\u popen=popen(self)
文件“c:\python27\lib\multiprocessing\forking.py”,第277行,在\uuu init中__
转储(进程对象、到子进程、最高\u协议)
文件“c:\python27\lib\multiprocessing\forking.py”,第199行,在转储中
ForkingPickler(文件、协议).dump(obj)
文件“c:\python27\lib\pickle.py”,第224行,位于转储文件中
自我保存(obj)
文件“c:\python27\lib\pickle.py”,第331行,保存
自我保存(obj=obj,*rv)
文件“c:\python27\lib\pickle.py”,第419行,在save\u reduce中
保存(状态)
文件“c:\python27\lib\pickle.py”,第286行,保存
f(self,obj)#用显式self调用未绑定方法
保存目录中第649行的文件“c:\python27\lib\pickle.py”
self.\u batch\u setitems(obj.iteritems())
文件“c:\python27\lib\pickle.py”,第681行,在批处理设置项中
保存(v)
文件“c:\python27\lib\pickle.py”,第286行,保存
f(self,obj)#用显式self调用未绑定方法
文件“c:\python27\lib\pickle.py”,第548行,在save\u tuple中
保存(元素)
文件“c:\python27\lib\pickle.py”,第286行,保存
f(self,obj)#用显式self调用未绑定方法
文件“c:\python27\lib\pickle.py”,第725行,在save\u inst中
保存(东西)
文件“c:\python27\lib\pickle.py”,第286行,保存
f(self,obj)#用显式self调用未绑定方法
保存目录中第649行的文件“c:\python27\lib\pickle.py”
self.\u batch\u setitems(obj.iteritems())
文件“c:\python27\lib\pickle.py”,第681行,在批处理设置项中
保存(v)
文件“c:\python27\lib\pickle.py”,第286行,保存
f(self,obj)#用显式self调用未绑定方法
文件“c:\python27\lib\pickle.py”,第725行,在save\u inst中
保存(东西)
文件“c:\python27\lib\pickle.py”,第286行,保存
f(self,obj)#用显式self调用未绑定方法
保存目录中第649行的文件“c:\python27\lib\pickle.py”
self.\u batch\u setitems(obj.iteritems())
文件“c:\python27\lib\pickle.py”,第681行,在批处理设置项中
保存(v)
文件“c:\python27\lib\pickle.py”,第331行,保存
自我保存(obj=obj,*rv)
回溯(最近一次呼叫最后一次):
文件“c:\python27\lib\pickle.py”,第396行,在save\u reduce中
文件“”,第1行,在
文件“c:\python27\lib\multiprocessing\forking.py”,第381行,在main中
保存(cls)
文件“c:\python27\lib\pickle.py”,第286行,保存
self=加载(从父级)
加载文件“c:\python27\lib\pickle.py”,第1378行
f(self,obj)#用显式self调用未绑定方法
文件“c:\python27\lib\pickle.py”,第748行,在save\u global中
返回Unpickler(file.load())
加载文件“c:\python27\lib\pickle.py”,第858行
(对象、模块、名称))
pickle.PicklingError:无法pickle:未将其作为thread.lock找到
调度[键](自身)
文件“c:\python27\lib\pickle.py”,第880行,在load\u eof中
提高采收率
伊奥费罗

问题在于
ldap\u conn
对象不可拾取,这是在Windows中的进程之间发送它所必需的。它不可拾取,因为它在内部使用了一些不可拾取的
线程。锁定
对象。图书馆实际上声称提供了一个应该是可拾取的,但它坏了,也不可拾取。不过,我们可以通过将其子类化并修复bug来修复此问题(它会在酸洗之前错过移除一个内部锁):


现在,对象将被pickle处理好,这意味着它应该可以用于
多处理
目的。

我还解决了
重新连接ldapObject
不可拾取的问题。我遇到了这个问题,除了我定义的自定义类而不是LDAP对象。我的类包含一组需要子进程访问的不同数据对象,包括multiprocessing.lock,它是两个线程操作的一部分。有什么解决办法吗?我能把句柄传递给每个需要访问的对象吗?(我真的不明白为什么/如何在这里使用pickle,我也不熟悉LDAP)
from ldap.ldapobject import ReconnectLDAPObject
from multiprocessing import Process
import ldap

class PicklableLDAPObject(ReconnectLDAPObject):
    def __getstate__(self):
        d = ReconnectLDAPObject.__getstate__(self)
        del d['_reconnect_lock']
        return d

    def __setstate__(self, d):
        self._reconnect_lock = ldap.LDAPLock(desc='reconnect lock within %s' % (repr(self)))
        ReconnectLDAPObject.__setstate__(self, d)

def send_ldap(ldap_conn):
    print ldap_conn

def receive_ldap(ldap_conn):
    print ldap_conn

def main():
    #ldap_conn = ldap.initialize('ldap://abc:12345')
    ldap_conn = PicklableLDAPObject('ldap://abc:12345')
    ldap_sender = Process(target=send_ldap, args=(ldap_conn,))
    ldap_receiver = Process(target=receive_ldap, args=(ldap_conn,))
    ldap_sender.start()