爬虫之requests 高级用法
1. 文件上传
import requests files = {'file': open('favicon.ico', 'rb')} r = requests.post("http://httpbin.org/post", files=files) print(r.text)
{ "args": {}, "data": "", "files": { "file": "data:application/octet-stream;base64,AAAAAA...=" }, "form": {}, "headers": { "Accept": "*/*", "Accept-Encoding": "gzip, deflate", "Content-Length": "6665", "Content-Type": "multipart/form-data; boundary=809f80b1a2974132b133ade1a8e8e058", "Host": "httpbin.org", "User-Agent": "python-requests/2.10.0" }, "json": null, "origin": "60.207.237.16", "url": "http://httpbin.org/post" }
2. Cookies
import requests r = requests.get("https://www.baidu.com") print(r.cookies) for key, value in r.cookies.items(): print(key + '=' + value)
<RequestsCookieJar[<Cookie BDORZ=27315 for .baidu.com/>, <Cookie __bsi=13533594356813414194_00_14_N_N_2_0303_C02F_N_N_N_0 for .www.baidu.com/>]> BDORZ=27315 __bsi=13533594356813414194_00_14_N_N_2_0303_C02F_N_N_N_0
import requests cookies = 'q_c1=31653b264a074fc9a57816d1ea93ed8b|1474273938000|1474273938000; d_c0="AGDAs254kAqPTr6NW1U3XTLFzKhMPQ6H_nc=|1474273938"; __utmv=51854390.100-1|2=registration_date=20130902=1^3=entry_date=20130902=1;a_t="2.0AACAfbwdAAAXAAAAso0QWAAAgH28HQAAAGDAs254kAoXAAAAYQJVTQ4FCVgA360us8BAklzLYNEHUd6kmHtRQX5a6hiZxKCynnycerLQ3gIkoJLOCQ==";z_c0=Mi4wQUFDQWZid2RBQUFBWU1DemJuaVFDaGNBQUFCaEFsVk5EZ1VKV0FEZnJTNnp3RUNTWE10ZzBRZFIzcVNZZTFGQmZn|1474887858|64b4d4234a21de774c42c837fe0b672fdb5763b0' jar = requests.cookies.RequestsCookieJar() headers = { 'Host': 'www.zhihu.com', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36' } for cookie in cookies.split(';'): key, value = cookie.split('=', 1) jar.set(key, value) r = requests.get("http://www.zhihu.com", cookies=jar, headers=headers) print(r.text)
3. 会话维持
import requests s = requests.Session() s.get('http://httpbin.org/cookies/set/number/123456789') r = s.get('http://httpbin.org/cookies') print(r.text)
{ "cookies": { "number": "123456789" } }
import requests requests.get('http://httpbin.org/cookies/set/number/123456789') r = requests.get('http://httpbin.org/cookies') print(r.text)
4. SSL证书验证
import requests response = requests.get('https://www.12306.cn') print(response.status_code)
import requests response = requests.get('https://www.12306.cn', verify=False) print(response.status_code)
import requests from requests.packages import urllib3 urllib3.disable_warnings() response = requests.get('https://www.12306.cn', verify=False) print(response.status_code)
import requests response = requests.get('https://www.12306.cn', cert=('/path/server.crt', '/path/key')) print(response.status_code)
5. 代理设置
import requests proxies = { "http": "http://10.10.1.10:3128", "https": "http://10.10.1.10:1080", } requests.get("https://www.taobao.com", proxies=proxies)
当然,直接运行这个实例可能不行,因为这个代理可能是无效的,请换成自己的有效代理试验一下。
若代理需要使用HTTP Basic Auth,可以使用类似http://user:password@host:port这样的语法来设置代理,示例如下:
import requests proxies = { "http": "http://user:password@10.10.1.10:3128/", } requests.get("https://www.taobao.com", proxies=proxies)
除了基本的HTTP代理外,requests还支持SOCKS协议的代理。
首先,需要安装socks这个库:
pip3 install 'requests[socks]'
然后就可以使用SOCKS协议代理了,示例如下:
import requests proxies = { 'http': 'socks5://user:password@host:port', 'https': 'socks5://user:password@host:port' } requests.get("https://www.taobao.com", proxies=proxies)
6. 超时设置
import requests r = requests.get("https://www.taobao.com", timeout = 1) print(r.status_code)
实际上,请求分为两个阶段,即连接(connect)和读取(read)。
上面设置的timeout
将用作连接和读取这二者的timeout
总和。
如果要分别指定,就可以传入一个元组:
r = requests.get('https://www.taobao.com', timeout=(5,11, 30))
7. 身份认证
import requests r = requests.post('http://localhost/Admin/Public/login.html', data=({'uid':'admin', 'pwd':'admin123'})) print(r.status_code) print(r.text)
8. Prepared Request
from requests import Request, Session url = 'http://httpbin.org/post' data = { 'name': 'germey' } headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36' } s = Session() req = Request('POST', url, data=data, headers=headers) prepped = s.prepare_request(req) r = s.send(prepped) print(r.text)
这里我们引入了Request
,然后用url
、data
和headers
参数构造了一个Request
对象,这时需要再调用Session
的prepare_request()
方法将其转换为一个Prepared Request对象,然后调用send()
方法发送即可,运行结果如下:
{ "args": {}, "data": "", "files": {}, "form": { "name": "germey" }, "headers": { "Accept": "*/*", "Accept-Encoding": "gzip, deflate", "Connection": "close", "Content-Length": "11", "Content-Type": "application/x-www-form-urlencoded", "Host": "httpbin.org", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116 Safari/537.36" }, "json": null, "origin": "182.32.203.166", "url": "http://httpbin.org/post" }