【爬虫】项目篇-使用selenium、requests爬取天猫“华为手机”的商品评价
使用selenium#
from selenium.webdriver import Chrome,ChromeOptions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
import pymongo
import time
import random
class GetCookies():
def GetCookies(self):
username="ho"
password="hong5"
#login_url="https://login.tmall.com/"
login_url="https://login.taobao.com/member/login.jhtml?redirectURL=http%3A%2F%2Flist.tmall.com%2Fsearch_product.htm%3Fq%3D%25E8%258B%25B9%25E6%259E%259C%26type%3Dp%26vmarket%3D%26spm%3D875.7931836%252FB.a2227oh.d100%26from%3Dmallfp..pc_1_searchbutton&uuid=9b1b940679de3c3820589302ff75920b"
driver.get(login_url)
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//*[@id="fm-login-id"]')))
login_name=driver.find_element_by_xpath('//*[@id="fm-login-id"]')
login_name.click()
login_name.send_keys(username)
time.sleep(random.randrange(5,7))
login_passwd=driver.find_element_by_xpath('//*[@id="fm-login-password"]')
login_passwd.click()
login_passwd.send_keys(password)
time.sleep(random.randrange(5,7))
driver.find_element_by_xpath('//*[@id="login-form"]/div[4]/button').click()
search_url="https://detail.tmall.com/item.htm?spm=a220m.1000858.1000725.1.193a74a7Gjoc16&id=656168531109&skuId=4902176242110&user_id=1917047079&cat_id=2&is_b=1&rn=96d6ce4c6e59b759d99176e5933c5e1f"
driver.get(search_url)
class TamllComment():
def GetCommentData(self):
goods_url="https://detail.tmall.com/item.htm?spm=a220m.1000858.1000725.1.193a74a7Gjoc16&id=656168531109&skuId=4902176242110&user_id=1917047079&cat_id=2&is_b=1&rn=96d6ce4c6e59b759d99176e5933c5e1f"
driver.get(goods_url)
username="honey5730"
password="hong12345"
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, '//*[@id="fm-login-id"]')))
login_name=driver.find_element_by_xpath('//*[@id="fm-login-id"]')
login_name.click()
login_name.send_keys(username)
time.sleep(random.randrange(5,7))
login_passwd=driver.find_element_by_xpath('//*[@id="fm-login-password"]')
login_passwd.click()
login_passwd.send_keys(password)
time.sleep(random.randrange(5,7))
driver.find_element_by_xpath('//*[@id="login-form"]/div[4]/button').click()
def SaveAsMongo(self):
pass
if __name__ == '__main__':
options = ChromeOptions()
options.add_argument(
'user-agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36"')
driver = Chrome(options=options)
driver.execute_cdp_cmd("Page.addScriptToEvaluateOnNewDocument", {
"source": """
Object.defineProperty(navigator, 'webdriver', {
get: () => undefined
})
"""
})
#获取cookies
#cookies=GetCookies().GetCookies()
cookies=[{'domain': '.taobao.com', 'expiry': 1653840540, 'httpOnly': False, 'name': 'tfstk', 'path': '/', 'secure': False, 'value': 'cQnABVw_LQA0_Bw-LqLoflTjbiphayBYstNOXLwWsq2FZdsOfs2mxDCKIEwaTSpR.'}, {'domain': '.taobao.com', 'expiry': 2269008541, 'httpOnly': False, 'name': 'cna', 'path': '/', 'sameSite': 'None', 'secure': True, 'value': ]
使用requests#
import requests
from fake_useragent import UserAgent
import time
import random
import json
import redis
import openpyxl
def get_comment_data(start_page,end_page):
url="https://rate.tmall.com/list_detail_rate.htm?"
headers={
'user-agent':UserAgent().ie,
'cookie':'miid=4159704271564039423; cna=PMcSGlbGqDMCAXBvBSW1loSM; lid=honey5; t=d33712a517f185cc4bc07f7e794e1c6a; tracknick=honey5; .',
'referer':'https://detail.tmall.com/item.htm?spm=a220m.1000858.1000725.1.47d174a7SwNZMX&id=656168531109&skuId=4902176242110&areaId=350100&user_id=1917047079&cat_id=2&is_b=1&rn=2a13cc7d543f8f0ff8e7d9492fc4d3b9'
}
while start_page<=end_page:
params={
'itemId':'656168531109',
'sellerId':'1917047079',
'order':'3',
'currentPage':start_page
}
source=requests.get(url,headers=headers,params=params).text
#print(source)
#解析数据
parse_comment_data(source)
# with open('iphone%d.txt'%start_page,'w',encoding='utf-8') as file:
# file.write(source)
time.sleep(random.randint(5, 8))
start_page+=1
def parse_comment_data(source):
comment_data=source.replace("jsonp128(","").replace(")","").replace("\n","")
comment_data=json.loads(comment_data)
for data in comment_data["rateDetail"]["rateList"]:
#用户名
username=data['displayUserNick']
#商品类型
goods_type=data['auctionSku']
#评论
content=data['rateContent']
#日期
date=data['rateDate']
# 追加评论和日期
try:
add_content = data['appendComment']['content']
add_content_date = data['appendComment']['commentTime']
except:
add_content = ""
add_content_date=""
print(username,goods_type,content,date,add_content,add_content_date)
datalist.append([username,goods_type,content,date,add_content,add_content_date])
def save_as_redis(datalist):
client = redis.Redis(host="localhost", port=6379, decode_responses=True, db=0)
for data in datalist:
data_dict = dict(zip(colnames, data))
client.rpush('Tmall_iphone',data_dict)
client.close()
def save_as_excel():
wb = openpyxl.Workbook()
ws = wb.active
ws.append(colnames)
for data in datalist:
ws.append(data)
wb.save('Tmall_iphone.xlsx')
wb.close()
if __name__ == '__main__':
datalist=[]
colnames=['用户名','商品类型','评论内容','日期','追评','追评日期']
#爬取iphone 1-7页的评论
get_comment_data(1,7)
print(datalist)
save_as_excel()
分类:
爬虫
Buy me a cup of coffee ☕.
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 震惊!C++程序真的从main开始吗?99%的程序员都答错了
· winform 绘制太阳,地球,月球 运作规律
· 【硬核科普】Trae如何「偷看」你的代码?零基础破解AI编程运行原理
· 上周热点回顾(3.3-3.9)
· 超详细:普通电脑也行Windows部署deepseek R1训练数据并当服务器共享给他人