PythonBaby

导航

统计

爬虫-国内疫情数据统计

复制代码
import requests
import re
import json
from openpyxl import Workbook

def export(page_source, id='getAreaStat'):
    re_script = re.search(f'script id=\"{id}\".+?/script' ,page_source)
    str_json = re.search(r'\[.+\]', re_script.group()).group()
    items = json.loads(str_json)
    #with open(f'{id}.json', 'w', encoding='utf8') as f:
        #f.write(str_json)    
    if items:
        wb = Workbook()
        ws0 = wb.active
        ws0.title = '全国'
        ws0.append([str(k) for k,v in items[0].items() if k!='cities'])
        for item in items:
            ws = wb.create_sheet(item['provinceName'])    
            if item['cities']:
                headers = list(item['cities'][0].keys())
                ws.append(headers)
                for city in item['cities']:
                    ws.append(list(city.values()))        
            ws0.append([str(v) for k,v in item.items() if k!='cities'])
        wb.save(f'{id}.xlsx')
        wb.close()

def main():
    headers = {
        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
        'Accept-Encoding': 'gzip, deflate, br',
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Connection': 'keep-alive',
        'Host': 'ncov.dxy.cn',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36',
    }
    res = requests.get('https://ncov.dxy.cn/ncovh5/view/pneumonia', headers=headers)
    res.encoding = res.apparent_encoding 
    export(res.text, 'getAreaStat')
    export(res.text, 'fetchRecentStatV2')

if __name__ == '__main__':
    main()
复制代码

 

posted on   PythonBaby  阅读(16)  评论(0编辑  收藏  举报

相关博文:
阅读排行:
· 10年+ .NET Coder 心语 ── 封装的思维:从隐藏、稳定开始理解其本质意义
· 地球OL攻略 —— 某应届生求职总结
· 提示词工程——AI应用必不可少的技术
· Open-Sora 2.0 重磅开源!
· 周边上新:园子的第一款马克杯温暖上架
点击右上角即可分享
微信分享提示