美文网首页
Python多进程

Python多进程

作者: 人生苦短啊 | 来源:发表于2018-09-05 11:22 被阅读0次

1. 多进程实现斐波那契

对于耗费cpu的操作,多进程由于多线程, 我拿斐波那契进行比较发现多进程速度远远高于多线程

# 多线程
import time
from concurrent.futures import ThreadPoolExecutor, as_completed

def fib(n):
    if n<=2:
        return 1
    return fib(n-1)+fib(n-2)

if __name__ == "__main__":
    with ThreadPoolExecutor(2) as executor:
        all_task = [executor.submit(fib, (num)) for num in range(38)]      # 39s
        start_time = time.time()
        for future in as_completed(all_task):
            data = future.result()
            print("exe result: {}".format(data))

        print("last time is: {}".format(time.time()-start_time))
# 多进程
import time
from concurrent.futures import ThreadPoolExecutor, as_completed


def fib(n):
    if n<=2:
        return 1
    return fib(n-1)+fib(n-2)

if __name__ == "__main__":
    with ProcessPoolExecutor(2) as executor:
        all_task = [executor.submit(fib, (num)) for num in range(38)]      # 19
        start_time = time.time()
        for future in as_completed(all_task):
            data = future.result()
            print("exe result: {}".format(data))

        print("last time is: {}".format(time.time()-start_time))
    start_time = time.time()

2. 使用进程池

import multiprocessing

#多进程编程
import time
def get_html(n):
    time.sleep(n)
    print("sub_progress success")
    return n

if __name__ == "__main__":
pool = multiprocessing.Pool(multiprocessing.cpu_count())
 #使用线程池**********************************************************
    result = pool.apply_async(get_html, args=(3,))
    #等待所有任务完成
    pool.close()
    pool.join()

    print(result.get())   # sub_progress success
                          #3
# ********************************************************************
# imap 会按顺序打印
    for result in pool.imap(get_html, [1,5,3]):
        print("{} sleep success".format(result))
# sub_progress success
# 1 sleep success
# sub_progress success
# sub_progress success
# 5 sleep success
# 3 sleep success
# ********************************************************************
# imap_unordered 按时间顺序打印
    for result in pool.imap_unordered(get_html, [1,5,3]):
        print("{} sleep success".format(result))

# sub_progress success
# 1 sleep success
# sub_progress success
# 3 sleep success
# sub_progress success
# 5 sleep success

3. 进程使用Queue

进程的Queue在multiprocessing下才好用

import time
from multiprocessing import Process, Queue, Manager, Pool, Pipe
def producer(queue):
    queue.put("a")
    time.sleep(2)

def consumer(queue):
    time.sleep(2)
    data = queue.get()
    print(data)

if __name__=="__main__":
    queue = Queue(10)
    my_producter = Process(target=producer, args=(queue,))
    my_consumer = Process(target=consumer, args=(queue,))
    my_producter.start()
    my_consumer.start()
    my_producter.join()
    my_consumer.join()  # a

4. 共享变量不适用于多进程

#共享全局变量不能适用于多进程编程,可以适用于多线程
import time
from multiprocessing import Process, Queue, Manager, Pool, Pipe

def producer(a):
    a += 100
    time.sleep(2)

def consumer(a):
    time.sleep(2)
    print(a)

if __name__ == "__main__":
    a = 1
    my_producer = Process(target=producer, args=(a,))
    my_consumer = Process(target=consumer, args=(a,))
    my_producer.start()
    my_consumer.start()
    my_producer.join()
    my_consumer.join()  
    # 1

5. 通过pipe实现进程间通信

# pipe性能高于queue

def producer(pipe):
    pipe.send("fz")


def consumer(pipe):
    print(pipe.recv())


if __name__== "__main__":
    receice_pipe, send_pipe = Pipe()
    # pipe只能适用于两个进程
    my_producer = Process(target=producer, args=(send_pipe,))
    my_consumer = Process(target=consumer, args=(receice_pipe,))

    my_producer.start()
    my_consumer.start()
    my_producer.join()
    my_consumer.join()
    # fz

6. 两个进程共同操作一个dict

除了dict managers包下包含了所有python的类型可以选择

import time
from multiprocessing import Process, Queue, Manager, Pool, Pipe

def add_data(p_dict, key, value):
    p_dict[key]=value

if __name__=="__main__":
    progress_dict = Manager().dict()

    first_progress = Process(target=add_data, args=(progress_dict, "fz",21))
    second_progress = Process(target=add_data, args=(progress_dict, "xk",19))

    first_progress.start()
    second_progress.start()
    first_progress.join()
    second_progress.join()
    print(progress_dict)

相关文章

网友评论

      本文标题:Python多进程

      本文链接:https://www.haomeiwen.com/subject/ivnowftx.html