python爬虫浏览器伪装和设置代理ip
程序员文章站
2023-12-30 18:59:58
...
1.python爬虫浏览器伪装
#导入urllib.request模块
import urllib.request
#设置请求头
headers=("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0")
#创建一个opener
opener=urllib.request.build_opener()
#将headers添加到opener中
opener.addheaders=[headers]
#将opener安装为全局
urllib.request.install_opener(opener)
#用urlopen打开网页
data=urllib.request.urlopen(url).read().decode('utf-8','ignore')
2.python使用代理ip
#定义代理ip,多个代理ip,随机使用
iplist = ['219.223.251.173:3128','203.174.112.13:3128','122.72.18.34:80']
#设置代理
proxy=urllib.request.ProxyHandle({'http':iplist[random.randint(0,len(iplist))]})
#创建一个opener
opener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle)
#将opener安装为全局
urllib.request.install_opener(opener)
#用urlopen打开网页
data=urllib.request.urlopen(url).read().decode('utf-8','ignore')
3.python同时设置代理ip和浏览器模拟
'''
使用代理访问
'''
import urllib.request
import random
url = 'http://www.whatismyip.com.tw'
#创建一个iplist,随机使用ip
iplist = ['219.223.251.173:3128','203.174.112.13:3128','122.72.18.34:80']
#创建一个代理opener
proxy_support = urllib.request.ProxyHandler({'http':iplist[random.randint(0, len(iplist))]})
opener = urllib.request.build_opener(proxy_support)
#添加浏览器的伪装头部
opener.addheaders = [('User-Agent','Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:58.0) Gecko/20100101 Firefox/58.0')]
#使用代理opener访问url
response = opener.open(url)
html = response.read().decode('utf-8')
print(html)