异步(协程)爬虫扒光一篇小说

异步爬虫扒光一篇小说

# url记录
# 章节id
# http://dushu.baidu.com/api/pc/getCatalog?data={book_id:4306063500}

# 第一章
# http://dushu.baidu.com/api/pc/getChapterContent?data={book_id:4306063500,cid:4306063500|11348571,need_bookinfo:1}

import requests
import aiohttp
import asyncio
import json
import aiofiles

url = ''
# html = requests.get(url)
'''
步骤:
1.同步操作,获取所有章节名称
2.异步操作,获取章节具体内容
'''


async def downwenzhang(cid,bid,title):
    # 准备url
    data={
        'book_id':bid,
        'cid':f"{bid}|{cid}",
        'need_bookinfo': 1
    }
    data=json.dumps(data)
    url=f'http://dushu.baidu.com/api/pc/getChapterContent?data={data}'
    # 开始请求数据
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            dic = await resp.json()

            async with aiofiles.open(title+".txt",mode='w',encoding='utf-8') as f:
                await f.write(dic['data']['novel']['content'])

async def getCatalog(url):
    resp=requests.get(url)
    txt=resp.json()
    tasks = []
    print(txt)
    for item in txt["data"]["novel"]["items"]:
        title = item["title"]
        cid = item["cid"]
        tasks.append(downwenzhang(cid, "4306063500", title))
    await asyncio.wait(tasks)


if __name__ == '__main__':
    bid = "4306063500"
    url ='http://dushu.baidu.com/api/pc/getCatalog?data={"book_id":"4306063500"}'
    asyncio.run(getCatalog(url))
posted @ 2021-10-01 13:57  那个商同学  阅读(138)  评论(0编辑  收藏  举报