Python 爬虫练习低调163疫情数据辽宁地图
#encoding=utf-8 import re import time from fake_useragent import UserAgent from pyecharts.charts import Map from pyecharts import options as opts import requests import json ua = UserAgent() headers = {'User-Agent': ua.random} url = "https://c.m.163.com/ug/api/wuhan/app/index/feiyan-data-list?t=1580469818264" #爬取疫情数据 def geturl(url): try: response = requests.get(url,headers=headers) print(response.status_code) if response.status_code == 200: content_field = json.loads(response.text) list_datas_1 = content_field['data']['list'] return list_datas_1 else: print('返回代码:'+response.status_code) return None except Exception as e: print('此页有问题!',e) return None #制作疫情地图 def makemap(dict): # 省和直辖市 province_distribution = dict value=province_distribution.values() # maptype='china' 只显示全国直辖市和省级 title = str(int(time.strftime("%Y%m%d"))-1)+"辽宁疫情地图" map = Map() map.set_global_opts( title_opts=opts.TitleOpts(title=title), visualmap_opts=opts.VisualMapOpts(max_=200, is_piecewise=True, pieces=[ {"max": 30, "min": 16, "label": ">16", "color": "#780707"}, #数据范围分段,分颜色,可以根据数据大小具体分配大小 {"max": 15, "min": 11, "label": "15-11", "color": "#8A0808"}, {"max": 10, "min": 9, "label": "10-9", "color": "#B40404"}, {"max": 8, "min": 7, "label": "8-7", "color": "#CD1111"}, {"max": 6, "min": 5, "label": "6-5", "color": "#DF0101"}, {"max": 4, "min": 3, "label": "4-3", "color": "#F68181"}, {"max": 2, "min": 1, "label": "2-1", "color": "#F5A9A9"}, {"max": 0, "min": 0, "label": "0", "color": "#FFFFFF"}, ], ) #最大数据范围,分段 ) map.add(title, data_pair=province_distribution.items(), maptype="辽宁", is_roam=True) map.render('辽宁疫情地图.html') #生成本省疫情列表 def makedict(list): dict1 = {} for item in list: for k,v in item.items(): if( v == "辽宁"): dict1[item['name']+'市'] = int(item["confirm"]) return dict1 if __name__ == '__main__': list_data = geturl(url) print(list_data) dict_data = makedict(list_data) print("-----分割线-----") print(dict_data) makemap(dict_data)