【Python爬虫】 学习笔记 -- post请求的方法(Cookie反爬)
import urllib.request import urllib.parse url = "https://fanyi.baidu.com/v2transapi?from=en&to=zh" #反爬的话Cookie才是关键,需要视情况保留 headers ={ 'Cookie': 'BIDUPSID=B31F5F9983DA808DF774AF4E8234E368; PSTM=1609043037; __yjs_duid=1_d9b208ce4d03f407cb27ba1eb23c3d451620136684497; BAIDUID=BCD6CBDE11877C7667C1B11964ED9609:FG=1; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; MCITY=-131%3A; BDUSS=XphWEsycEQwVk9xRHBNVGQ4fjJVdExuVnV2MnAzaTEybTBWRzFJZDFDa2pLS3RoRVFBQUFBJCQAAAAAAAAAAAEAAADkIPTTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACObg2Ejm4NhM; BDUSS_BFESS=XphWEsycEQwVk9xRHBNVGQ4fjJVdExuVnV2MnAzaTEybTBWRzFJZDFDa2pLS3RoRVFBQUFBJCQAAAAAAAAAAAEAAADkIPTTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACObg2Ejm4NhM; delPer=0; PSINO=7; BDRCVFR[feWj1Vr5u3D]=I67x6TjHwwYf0; H_PS_PSSID=34837_34945_34441_34068_31660_34712_34584_34518_34705_34917_34815_26350_34971; BA_HECTOR=al8ga50l01818l85p71goa28b0q; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; __yjs_st=2_ZjEzNzkyODk0ZjFlZmZlOTQ0MWNlMjIyN2YxYTUwZGJiYWM2NWNjYTU5ZWU5MjA3ZjA1MGQwYTExMDU1N2FlYWYyODhiNDdiMjZiMjIwYjA1NGI4YTZhZGM3ZjgwYmY5YzdiZjBhOTUzZWQ3ZWM4YzUzNDM3NzMwMmI0ZWU2ZTBkM2QzNjEwODcxMDA4YTQ1ZDBlMzg0NDA4Yzg1MTJiYTRlODRjM2NmZTMxZWIyMzNjMjMzMmZlYWZjODMwYmUyYTY0MmZlYzM3NjI4MGY0ZDdmYjFlNzFhYTU3MzJkMzRmNTYzNTNhYWU1Zjg2ZTc2NDhmMDE1OWUwNmJhYTg2YV83XzBjYWRkYzhj; ab_sr=1.0.1_ZmE4NDgwYTRiYzc2OWE3MGQxMzUzMDM2MDc1MDc0YmM0MTk1ZjhiNDU4NzgzZWRjNDYzYTE1MjE0YjJlNWU3NTFkMDNmYTMyMTJkMjU1ODBkNmE2YWQ4MjZkZThjMjhhNjYzNjczMjZlNWZiMmY2Zjg3YzVkMDhkNTA1OWY3M2YyOWNjNWUzNTBkMGYwMTI4MmE2ZTg2YWQwYTJhYTNjMmFhZjczZmI3YzA1ZGNlYTAwYTBiMDAwYmZjMTgzMTFj; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1636108556,1636108565; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1636108565' } data={ 'from': 'en', 'to': 'zh', 'query': 'love', 'simple_means_flag': '3', 'sign': '198772.518981', 'token': '2329c7301da5db601b7c0b683f605c77', 'domain': 'common' } # post请求的参数必须经过编码,并且调用encode方法 data = urllib.parse.urlencode(data).encode('utf-8') # 请求对象的定制 request = urllib.request.Request(url=url,data=data,headers=headers) # 模拟浏览器向服务器发送请求 response = urllib.request.urlopen(request) # 获取响应数据 content = response.read().decode('utf-8') import json obj = json.loads(content) print(obj)
前ICPC算法竞赛退役选手|现摸鱼ing