from bs4 import BeautifulSoup
import re
import requests
import pandas
## pa pufangwang
class down(object):
def __init__(self):
self.calls = []
self.urls = []
self.nums = 0
def get_down_urls(self):
self.urls = []
htmls =["http://www.0594.com/list-117---1_90-90_120-2----3361---1.html?pb=&od=&hasphoto=1&ft=0"
,'http://www.0594.com/list-117---1_90-90_120-2----3361---2.html?pb=&od=&hasphoto=1&ft=0']
for html in htmls:
req = requests.get(html)
bea = BeautifulSoup(req.text)
find_url = bea.find_all("div",class_="houseList")
urlss = BeautifulSoup(str(find_url))
kkk = urlss.find_all("a")
self.nums = int(len(kkk))
for eatch in kkk:
self.urls.append(eatch.get("href"))
set(self.urls)
def get_down_data(self,straget):
self.calls=[]
html = straget # 网页
req = requests.get(html) # 获取本地相应
bea = BeautifulSoup(req.text) # 创建实例
allss = bea.find_all("div",class_="inforTxt")
dls1 = BeautifulSoup(str(allss))
dl = dls1.find_all("dl")
for eatch in dl:
self.calls.append(eatch.text.replace("\n", ""))
print(self.calls)
ds = down()
ds.get_down_urls()
dss =list(set(ds.urls))
'''for i in range(ds.nums):
ds.get_down_data(ds.urls[i])
'''
for i in range(ds.nums):
ds.get_down_data(dss[i])
年与时驰,意与日去,遂成枯落,
多不接世,悲守穷庐,将复何及。