Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/python/286.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
循环中的python多线程_Python_Multithreading_For Loop_Proxy_Python Requests - Fatal编程技术网

循环中的python多线程

循环中的python多线程,python,multithreading,for-loop,proxy,python-requests,Python,Multithreading,For Loop,Proxy,Python Requests,我想我对线程是如何工作的有一个非常基本的理解,但是因为我不知道那么多,所以我不能理解这个。我想有一个大约10个线程的池限制,但棘手的是我不知道如何让它逐行读取 proxies = { 'http': 'http://123.10.210.213:9999', 'https': 'http://123.10.210.213:9999' } def create_proxy_lst(txt): print(""" #############################

我想我对线程是如何工作的有一个非常基本的理解,但是因为我不知道那么多,所以我不能理解这个。我想有一个大约10个线程的池限制,但棘手的是我不知道如何让它逐行读取

proxies = {
    'http': 'http://123.10.210.213:9999',
    'https': 'http://123.10.210.213:9999'
}


def create_proxy_lst(txt):
    print("""
########################################
#       WORKING    |      NOT WORKING  #
########################################
        """)
    proxy_list = []
    with open(txt) as f:
        for line in f:
            proxy_list.append(line.strip('\n'))
    return proxy_list


def check_proxy(website="https://google.com/"):
    working = 0
    not_working = 0
    total = 0
    lst = create_proxy_lst("uncheckedproxys.txt")
    for proxy in lst:
        try:
            proxies["https"] = "http://" + proxy
            proxies["http"] = "http://" + proxy
            r = requests.get(website, timeout=1, proxies=proxies)
            if r.status_code == 200:
                print("%s" % proxy)
                working += 1
                total += 1
                os.system("title Working: " + str(working) + "\t     Not working " + str(not_working) + "     ✔" + "    Total:   " + str(total) + "/" + str(len(lst)))
        except Exception:
            print("\t\t    %s" % proxy)
            not_working += 1
            total += 1
            os.system("title Working: " + str(working) + "\t     Not working " + str(not_working) + "     ✖" + "    Total:   " + str(total) + "/" + str(len(lst)))

将代理放入,然后启动10个线程从队列中读取代理

就你而言:

from Queue import Queue
from threading import Thread

def worker(proxy_queue):
    while not proxy_queue.empty():
        proxy = proxy_queue.get()

        working = 0
        not_working = 0
        total = 0

        try:
            proxies["https"] = "http://" + proxy
            proxies["http"] = "http://" + proxy
            r = requests.get(website, timeout=1, proxies=proxies)
            if r.status_code == 200:
                print("%s" % proxy)
                working += 1
                total += 1
                os.system("title Working: " + str(working) + "\t     Not working " + str(not_working) + "     ✔" + "    Total:   " + str(total) + "/" + str(len(lst)))
        except Exception:
            print("\t\t    %s" % proxy)
            not_working += 1
            total += 1
            os.system("title Working: " + str(working) + "\t     Not working " + str(not_working) + "     ✖" + "    Total:   " + str(total) + "/" + str(len(lst)))

if __name__ == '__main__':
    # Build a queue
    proxy_queue = Queue()

    # Put these proxies into the queue
    with open("uncheckedproxys.txt") as f:
        for line in f:
            proxy_queue.put(line.strip())

    # Create thread pool
    thread_pool = [Thread(target=worker, args=proxy_queue) for i in range(10)]

    # Start threads
    for thread in thread_pool:
        thread.start()

将代理放入,然后启动10个线程从队列中读取代理

就你而言:

from Queue import Queue
from threading import Thread

def worker(proxy_queue):
    while not proxy_queue.empty():
        proxy = proxy_queue.get()

        working = 0
        not_working = 0
        total = 0

        try:
            proxies["https"] = "http://" + proxy
            proxies["http"] = "http://" + proxy
            r = requests.get(website, timeout=1, proxies=proxies)
            if r.status_code == 200:
                print("%s" % proxy)
                working += 1
                total += 1
                os.system("title Working: " + str(working) + "\t     Not working " + str(not_working) + "     ✔" + "    Total:   " + str(total) + "/" + str(len(lst)))
        except Exception:
            print("\t\t    %s" % proxy)
            not_working += 1
            total += 1
            os.system("title Working: " + str(working) + "\t     Not working " + str(not_working) + "     ✖" + "    Total:   " + str(total) + "/" + str(len(lst)))

if __name__ == '__main__':
    # Build a queue
    proxy_queue = Queue()

    # Put these proxies into the queue
    with open("uncheckedproxys.txt") as f:
        for line in f:
            proxy_queue.put(line.strip())

    # Create thread pool
    thread_pool = [Thread(target=worker, args=proxy_queue) for i in range(10)]

    # Start threads
    for thread in thread_pool:
        thread.start()