爬取安居客上芜湖房价
程序员文章站
2022-05-22 17:02:08
忘记在哪儿百度到的代码,学习一下import requestsfrom bs4 import BeautifulSoupimport timeheaders={'User-Agent':'Baiduspider'}total=[]def get_loupan(url): try: res=reque... ......
忘记在哪儿百度到的代码,学习一下
import requests
from bs4 import beautifulsoup
import time
headers={'user-agent':'baiduspider'}
total=[]
def get_loupan(url):
try:
res=requests.get(url,headers=headers)
soup=beautifulsoup(res.text,'html.parser')
titles=soup.find_all('span',class_='items-name')
title=list(map(lambda x:x.text,titles))
dizhis=soup.find_all('span',class_='list-map')
dizhi=list(map(lambda x:x.text,dizhis))
diqus=soup.find_all('span',class_='list-map')
diqu=list(map(lambda x:x.text.split('\xa0')[1],diqus))
mianjis_quan=soup.find_all('a',class_='huxing')
mianji_quan=list(map(lambda x:x.text,mianjis_quan))
mianjis=soup.find_all('a',class_='huxing')
mianji=list(map(lambda x:x.text.split('\t')[-1].strip(),mianjis))
jiages=soup.find_all('a',class_='favor-pos')
jiage=list(map(lambda x:x.p.text,jiages))
for tit,dizhi,diqu,mianq,mianj,jiage in zip(title,dizhi,diqu,mianji_quan,mianji,jiage):
info={'标题':tit,
'地址':dizhi,
'地区':diqu,
'面积(全)':mianq,
'面积':mianj,
'价格':jiage}
total.append(info)
except:
print('')
return total
if __name__ == '__main__':
for i in range(1,21):
url='https://wuh.fang.anjuke.com/loupan/all/p{}/'.format(i)
get_loupan(url)
print('第{}页抓取完毕'.format(i))
time.sleep(1)
import pandas as pd
df=pd.dataframe(total)
df.to_excel('安居客.xls')