美文网首页
多线程多进程多协程(转载)

多线程多进程多协程(转载)

作者: 言午日尧耳总 | 来源:发表于2022-04-05 17:33 被阅读0次

    多线程多进程多协程(转载)

    多线程

    1. 定义一个函数
    def my_func(a,b):
        do_something(a,b)
    
    1. 创建线程
    import threading
    
    t = threading.Thread(target=my_func, args=(a, b))
    
    1. 启动线程
    t.start()
    
    1. 等待结束
    t.join()
    

    队列

    • queue.Queue是线程安全的
    import queue
    
    q = queue.Queue()
    
    # 添加与获取
    q.put(time)
    item = q.get()
    
    # 查看状态
    q.qsize()
    q.empty()
    q.full()
    

    线程安全(锁)

    1. try-finally
    import threading
    
    lock = threading.Lock()
    lock.acquire()
    try:
        # do something
    finally:
        lock.release()
    
    1. with
    import threading
    
    lock = threading.Lock()
    with lock:
        # do something
    

    线程池

    1. map函数,结果预入参顺序对应
    from concurrent.futures import ThreadPoolExecutor
    
    arg_list = []
    
    with ThreadPoolExecutor() as pool:
        results = pool.map(my_func, arg_list)
    
        for result in results:
            print(results)
    
    1. submit函数,as_completed顺序可按完成顺序
    from concurrent.futures import ThreadPoolExecutor, as_completed
    
    arg_list = []
    
    with ThreadPoolExecutor() as pool:
        futures = [pool.submit(my_func, arg) for arg in arg_list]
    
        # 按输入顺序
        for future in futures:
            print(future.result())
    
        # 按完成顺序
        for future in as_completed(futures):
            print(future.result())
    

    Flask中使用线程池

    import time
    from concurrent.futures import ThreadPoolExecutor
    from flask import Flask
    
    app = Flask(__name__)
    pool = ThreadPoolExecutor()
    
    
    def do_1():
        time.sleep(1)
        return 'do_1'
    
    
    def do_2():
        time.sleep(1)
        return 'do_2'
    
    
    def do_3():
        time.sleep(1)
        return 'do_3'
    
    
    @app.route("/")
    def index():
        result_1 = pool.submit(do_1)
        result_2 = pool.submit(do_2)
        result_3 = pool.submit(do_3)
        return {
            '1': result_1.result(),
            '2': result_2.result(),
            '3': result_3.result(),
        }
    
    
    if __name__ == "__main__":
        app.run()
    

    多进程

    图片截图自 蚂蚁学Python Bilibili 03:00

    multiprocessing.png

    Flask使用多进程

    import time
    from concurrent.futures import ProcessPoolExecutor
    from flask import Flask
    
    app = Flask(__name__)
    
    
    def do_1():
        time.sleep(1)
        return 'do_1'
    
    
    def do_2():
        time.sleep(1)
        return 'do_2'
    
    
    def do_3():
        time.sleep(1)
        return 'do_3'
    
    
    @app.route("/")
    def index():
        result_1 = pool.submit(do_1)
        result_2 = pool.submit(do_2)
        result_3 = pool.submit(do_3)
        return {
            '1': result_1.result(),
            '2': result_2.result(),
            '3': result_3.result(),
        }
    
    
    if __name__ == "__main__":
        pool = ProcessPoolExecutor()
        app.run()
    

    协程:asyncio、await

    import asyncio
    import aiohttp
    
    loop = asyncio.get_event_loop()
    
    
    async def get_url(url):
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as resp:
                result = await resp.text()
                print(f"url:{url},{len(result)}")
    
    
    urls = [f"https://www.cnblogs.com/#p{page}" for page in range(1, 50 + 1)]
    
    tasks = [loop.create_task(get_url(url)) for url in urls]
    
    loop.run_until_complete(asyncio.wait(tasks))
    
    

    控制asyncio并发数

    1. try-finally
    import asyncio
    sem = asyncio.Semaphore(10)
    
    await sem.acquire()
    try:
        # do something
    finally:
        sem.release()
    
    1. with
    import asyncio
    sem = asyncio.Semaphore(10)
    
    async with sem:
        # do something
    
    • 举例
    import asyncio
    import aiohttp
    
    loop = asyncio.get_event_loop()
    
    # 限制10个并发
    semaphore = asyncio.Semaphore(10)
    
    
    async def get_url(url):
        async with semaphore:
            async with aiohttp.ClientSession() as session:
                async with session.get(url) as resp:
                    result = await resp.text()
                    print(f"url:{url},{len(result)}")
    
    
    urls = [f"https://www.cnblogs.com/#p{page}" for page in range(1, 50 + 1)]
    
    tasks = [loop.create_task(get_url(url)) for url in urls]
    
    loop.run_until_complete(asyncio.wait(tasks))
    
    

    相关文章

      网友评论

          本文标题:多线程多进程多协程(转载)

          本文链接:https://www.haomeiwen.com/subject/wpnksrtx.html