协程--代码注释篇

# 1.协程
# 2.如何实现在两个函数之间的切换?

# def func1():
# print(1)
# yield
# print(3)
# yield
#
# def func2():
# g = func1()
# next(g)
# print(2)
# next(g)
# print(4)
#
# func2()

# def consumer():
# while True:
# n = yield
# print('消费了一个包子%s'%n)
#
# def producer():
# g = consumer()
# next(g)
# for i in range(10):
# print('生产了包子%s'%i)
# g.send(i)
#
# producer()
# import time
# from greenlet import greenlet # 在单线程中切换状态的模块
# def eat1():
# print('吃鸡腿1')
# g2.switch()
# time.sleep(5)
# print('吃鸡翅2')
# g2.switch()
#
# def eat2():
# print('吃饺子1')
# g1.switch()
# time.sleep(3)
# print('白切鸡')
#
# g1 = greenlet(eat1)
# g2 = greenlet(eat2)
# g1.switch()
# gevent内部封装了greenlet模块



# #串行执行
# import time
# def consumer(res):
# '''任务1:接收数据,处理数据'''
# pass
#
# def producer():
# '''任务2:生产数据'''
# res=[]
# for i in range(100000000):
# res.append(i)
# return res
#
# start=time.time()
# #串行执行
# res=producer()
# consumer(res) #写成consumer(producer())会降低执行效率
# stop=time.time()
# print(stop-start) #1.5536692142486572
#
#
#
# #基于yield并发执行
# import time
# def consumer():
# '''任务1:接收数据,处理数据'''
# while True:
# x=yield
#
# def producer():
# '''任务2:生产数据'''
# g=consumer()
# next(g)
# for i in range(100000000):
# g.send(i)
#
# start=time.time()
# #基于yield保存状态,实现两个任务直接来回切换,即并发的效果
# #PS:如果每个任务中都加上打印,那么明显地看到两个任务的打印是你一次我一次,即并发执行的.
# producer()
#
# stop=time.time()
# print(stop-start)

# 在代码之间切换执行 反而会降低效率
# 切换 不能规避IO时间

# 如果 在同一个程序中 有IO的情况下 才切换 会让效率提高很多
# yield greenlet 都不能在切换的时候 规避IO时间

效率对比

from gevent import monkey;monkey.patch_all()
import time # time socket urllib requests
import gevent # greenlet gevent在切换程序的基础上又实现了规避IO

def task(args):
time.sleep(1)
print(args)

def sync_func(): # 同步
for i in range(10):
task(i)

def async_func(): # 异步
g_l = []
for i in range(10):
g_l.append(gevent.spawn(task,i)) # 给写成任务传参数
gevent.joinall(g_l)

start = time.time()
sync_func()
print(time.time() - start)

start = time.time()
async_func()
print(time.time() - start)
爬取网页信息
from gevent import monkey;monkey.patch_all()
import time
import gevent
import requests

# 爬取网页
# 10个网页
# 协程函数去发起10个网页的爬取任务
def get_url(url):
res = requests.get(url)
print(url,res.status_code,len(res.text))

url_lst =[
'http://www.sohu.com',
'http://www.baidu.com',
'http://www.qq.com',
'http://www.python.org',
'http://www.cnblogs.com',
'http://www.mi.com',
'http://www.apache.org',
'https://www.taobao.com',
'http://www.360.com',
'http://www.7daysinn.cn/'
]

start = time.time()
for url in url_lst:
get_url(url)
print(time.time() - start)
爬取网页信息效率
from gevent import monkey;monkey.patch_all()
import time
import gevent
import requests

# 爬取网页
# 10个网页
# 协程函数去发起10个网页的爬取任务
def get_url(url):
res = requests.get(url)
print(url,res.status_code,len(res.text))

url_lst =[
'http://www.sohu.com',
'http://www.baidu.com',
'http://www.qq.com',
'http://www.python.org',
'http://www.cnblogs.com',
'http://www.mi.com',
'http://www.apache.org',
'https://www.taobao.com',
'http://www.360.com',
'http://www.7daysinn.cn/'
]

g_lst = []
start = time.time()
for url in url_lst:
g = gevent.spawn(get_url,url)
g_lst.append(g)
gevent.joinall(g_lst)
print(time.time() - start)

posted on 2018-03-05 16:39  杜杜精灵  阅读(200)  评论(0编辑  收藏  举报

导航