单线程多任务异步爬虫

import requests
import aiohttp
import time
import asyncio
s = time.time()
urls = [
    'http://127.0.0.1:5000/bobo',
    'http://127.0.0.1:5000/jay'
]

# async def get_request(url):
#     page_text = requests.get(url).text
#     return page_text
async def get_request(url):
   async with aiohttp.ClientSession() as s:
       async with await s.get(url=url) as response:
           page_text = await response.text()
           print(page_text)
   return page_text
tasks = []
for url in urls:
    c = get_request(url)
    task = asyncio.ensure_future(c)
    tasks.append(task)

loop = asyncio.get_event_loop()
loop.run_until_complete(asyncio.wait(tasks))

print(time.time()-s)

  

posted @ 2020-01-16 20:02  koala_dz  阅读(90)  评论(0编辑  收藏  举报