python-异步爬虫,写入数据库

这里使用aiohttp。

import aiohttp
import asyncio
import nest_asyncio
nest_asyncio.apply()



async def excude_many(db, sql, *parameters, **kwparameters):
    async with db.pool.acquire() as conn:
        async with conn.cursor() as cur:
            try:
                await cur.executemany(sql, parameters or kwparameters)
            except Exception:
                await conn.ping()
                await cur.executemany(sql, kwparameters or parameters)



async def get_res_json(url, **kwargs):
    async with aiohttp.ClientSession() as session:
        async with session.get(url, **kwargs) as response:
            data = await response.json()
            return data

async def write_db(loop):
    tasks = []
    id_list = range(1, 50)
    for id in id_list:
        url = "http://www.baidu.com/player/{}".format(id)
        tasks.append(asyncio.create_task(get_res_json(url)))
    a = loop.run_until_complete(asyncio.wait(tasks))

    result = list(map(lambda x:x.result(), a[0]))  #获取50次请求的结果
    await excude_many(db, sql = "INSERT INTO ...", *result)




if __name__ == "__main__":
    db,cursor = pass
    loop = asyncio.get_event_loop()
    b = loop.run_until_complete(write_db(loop))





posted @ 2021-03-25 13:28  做个笔记  阅读(189)  评论(0编辑  收藏  举报