Python学习_进程multiprocessing & 多进程 & 协程

进程的简单用法:

#!/usr/bin/env python
# -*- coding:utf-8 -*-
from multiprocessing import Process
import time


# 多进程 multiprocessing 并非是python的一个模块,而是python中多进程管理的一个包,
# 在学习的时候可以与threading这个模块作类比
def func(name):
    for i in range(3):
        time.sleep(1000)
        print("hello", name)


if __name__ == "__main__":
    p = Process(target=func, args=("guolei",))
    # 类似 线程(thread) 的 setDaemon()
    # p.daemon = True
    p.start()
    # p.join()
    p2 = Process(target=func, args=("xiaming",))
    # p2.daemon = True
    p2.start()
    # p2.join()
    print("2222222")

 

进程间的数据是无法共享的:

#!/usr/bin/env python
# -*- coding:utf-8 -*-
import multiprocessing
import threading

LI = []


# 进程之间是独立的, 默认数据是无法被共享的
def append_to_li(item, p_name):
    LI.append(item)
    print(p_name, LI, id(LI))


if __name__ == '__main__':
    for i in range(10):
        # process 数据不共享
        p = multiprocessing.Process(target=append_to_li, args=(i, "p_name" + str(i),))
        # thread 数据是共享的
        # p = threading.Thread(target=append_to_li, args=(i, "p_name" + str(i),))
        p.start()
    print("end ", LI, id(LI))

 

使用Array 和 Manager 可以实现进程之间的通讯:

#!/usr/bin/env python
# -*- coding:utf-8 -*-
from multiprocessing import Process, Array, Manager
import time


def Foo(i, temp):
    # time.sleep(2)
    temp[i] = 100 + i
    for item in temp:
        print(i, "----->", item)


def Foo2(i, dic):
    dic[i] = 100 + i
    print(dic.values())


if __name__ == '__main__':
    temp = Array("i", [11, 22, 33, 44])
    for i in range(2):
        p = Process(target=Foo, args=(i, temp,))
        p.start()
    for i in temp:
        print(i)

    # m = Manager()
    # dic = m.dict()
    #
    # for i in range(2):
    #     p = Process(target=Foo2, args=(i, dic,))
    #     p.start()
    #     # join()必须有,因为子进程在运行过程中,主进程已经结束了
    #     # p.join()
    # time.sleep(10)

 

Python的 multiprocessing 模块自带进程池功能,可直接使用:

#!/usr/bin/env python
# -*- coding:utf-8 -*-
from multiprocessing import Pool
import time


# 进程池demo
def foo(i):
    time.sleep(1)
    return i + " hello"


def bar(arg):
    print(arg)


if __name__ == '__main__':
    pool = Pool(5)

    # print(pool.apply_async(func=foo, args=(1,)).get())

    for i in range(10):
        # 每一个任务是排队进行的, apply()是阻塞的 里面有 进程.join()
        # pool.apply(foo, (str(i),))
        # 每一个任务都并发执行, 可以设置回调函数, apply_async()是异步非阻塞的 里面有 daemon = True
        pool.apply_async(func=foo, args=("process_" + str(i),), callback=bar)
        print("11111111111111111111111")

    print("end")
    # pool.close()
    # pool.join()  # 进程池中的进程执行完毕后再关闭,如果注释,那么程序直接关闭

 

协程gevent简单用法:

#!/usr/bin/env python
# -*- coding:utf-8 -*-
from greenlet import greenlet
from gevent import monkey;

monkey.patch_all()  # 这一句必须要,猴子补丁
import gevent
import requests


def foo(url):
    print("Get: %s" % url)
    resp = requests.get(url)
    data = resp.text
    print("%d bytes received from %s" % (len(data), url))


# 协程可以一次发出多个请求,适用于需要等待的网络I/O操作
gevent.joinall([
    gevent.spawn(foo, "https://www.python.org/"),
    gevent.spawn(foo, "https://www.yahoo.com/"),
    gevent.spawn(foo, "https://github.com/")
])

# 协程高级模块gevent
# def foo():
#     print("11111111111")
#     gevent.sleep(0)
#     print("22222222222")
#
#
# def bar():
#     print("33333333333")
#     gevent.sleep(0)
#     print("44444444444")
#
#
# gevent.joinall([
#     gevent.spawn(foo),
#     gevent.spawn(bar),
# ])

# 协程模块greenlet, 功能很弱, 一般不用这个, 使用高级模块gevent【基于greenlet】
# def test1():
#     print(12)
#     gr2.switch()
#     print(34)
#     gr2.switch()
#
#
# def test2():
#     print(56)
#     gr1.switch()
#     print(78)
#
#
# gr1 = greenlet(test1)
# gr2 = greenlet(test2)
#
# gr1.switch()

 

posted @ 2019-03-21 17:19  錦衣夜行  阅读(172)  评论(0编辑  收藏  举报