Python 爬虫 多用户代理

import re
import urllib.request
import random
from docx import Document
document = Document()
document.add_heading('糗事百科',1)
uapools=[
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; TencentTraveler 4.0)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; 360SE)',
'User-Agent:Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'
]
def ua(uapools):
thisua=random.choice(uapools)
print(thisua)
headers=('User-Agent',thisua)
opn=urllib.request.build_opener()
opn.addheaders=[headers]
urllib.request.install_opener(opn)
for i in range(0,12):
ua(uapools)
data = urllib.request.urlopen('https://www.qiushibaike.com/8hr/page/'+str(i+1)+'/') .read().decode()
pat='<div class="content">.*?<span>(.*?)</span>.*?</div>'

rst=re.compile(pat,re.S).findall(data)

for d in rst:
try:
document.add_paragraph(d)
print(d)
document.add_paragraph('- - ' * 10)
except ValueError as err:
pass
document.save("D:\\data\\糗事百科段子2.docx")
posted @ 2017-11-22 19:30  沧海一粒水  阅读(197)  评论(0编辑  收藏  举报