Python3实现的爬虫爬取数据并存入mysql数据库操作示例
程序员文章站
2022-05-29 18:52:11
本文实例讲述了python3实现的爬虫爬取数据并存入mysql数据库操作。分享给大家供大家参考,具体如下:
爬一个电脑客户端的订单。罗总推荐,抓包工具用的是httpana...
本文实例讲述了python3实现的爬虫爬取数据并存入mysql数据库操作。分享给大家供大家参考,具体如下:
爬一个电脑客户端的订单。罗总推荐,抓包工具用的是httpanalyzerstdv7,与chrome自带的f12类似。客户端有接单大厅,罗列所有订单的简要信息。当单子被接了,就不存在了。我要做的是新出订单就爬取记录到我的数据库zyc里。
设置每10s爬一次。
抓包工具页面如图:
首先是爬虫,先找到数据存储的页面,再用正则爬出。
# -*- coding:utf-8 -*- import re import requests import pymysql #python3的mysql模块,python2 是mysqldb import datetime import time def getresults(): requests.adapters.default_retries = 5 #有时候报错,我在网上找的不知道啥意思,好像也没用。 reg = [r'"id":(.*?),', r'"order_no":"(.*?)",', r'"order_title":"(.*?)",', r'"publish_desc":"(.*?)",', r'"game_area":"(.*?)\\/(.*?)\\/(.*?)",', r'"order_current":"(.*?)",', r'"order_content":"(.*?)",', r'"order_hours":(.*?),', r'"order_price":"(.*?)",', r'"add_price":"(.*?)",', r'"safe_money":"(.*?)",', r'"speed_money":"(.*?)",', r'"order_status_desc":"(.*?)",', r'"order_lock_desc":"(.*?)",', r'"cancel_type_desc":"(.*?)",', r'"kf_status_desc":"(.*?)",', r'"is_show_pwd":(.*?),', r'"game_pwd":"(.*?)",', r'"game_account":"(.*?)",', r'"game_actor":"(.*?)",', r'"left_hours":"(.*?)",', r'"created_at":"(.*?)",', r'"account_id":"(.*?)",', r'"mobile":"(.*?)",', r'"contact":"(.*?)",', r'"qq":"(.*?)"},'] results=[] try: for l in range(1,2): #页码 proxy = {'http':'61.135.155.82:443'} #代理ip html = requests.get('https://www.dianjingbaozi.com/api/dailian/soldier/hall?access_token=3ef3abbea1f6cf16b2420eb962cf1c9a&dan_end=&dan_start=&game_id=2&kw=&order=price_desc&page=%d'%l+'&pagesize=30&price_end=0&price_start=0&server_code=000200000000&sign=ca19072ea0acb55a2ed2486d6ff6c5256c7a0773×tamp=1511235791&type=public&type_id=%20http/1.1',proxies=proxy) # 用get的方式访问。网页解码成中文。接单大厅页。 # html=html.content.decode('utf-8') outcome_reg_order_no = re.findall(r'"order_no":"(.*?)","game_area"', html) #获取订单编号,因为订单详情页url与订单编号有关。 for j in range(len(outcome_reg_order_no)): html_order = requests.get('http://www.lpergame.com/api/dailian/order/detail?access_token=eb547a14bad97e1ee5d835b32cb83ff1&order_no=' +outcome_reg_order_no[j] + '&sign=c9b503c0e4e8786c2945dc0dca0fabfa1ca4a870×tamp=1511146154 http/1.1',proxies=proxy) #订单详细页 html_order=html_order.content.decode('utf-8') # print(html_order) outcome_reg = [] for i in range(len(reg)):#每条订单 outcome = re.findall(reg[i], html_order) if i == 4: for k in range(len(outcome)): outcome_reg.extend(outcome[k]) else: outcome_reg.extend(outcome) results.append(outcome_reg) #结果集 return results except: time.sleep(5) #有时太频繁会报错。 print("失败") pass
根据爬虫结果建表,这里变量名要准确。并且要设置唯一索引,使每次爬的只有新订单入库。
def mysql_create(): mysql_host = '' mysql_db = 'zyc' mysql_user = 'zyc' mysql_password = '' mysql_port = 3306 db = pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, password=mysql_password, db=mysql_db,charset='utf8') # 连接数据库编码注意是utf8,不然中文结果输出会乱码 sql_create = "create table dumplings (id char(10),order_no char(50),order_title varchar(265),publish_desc varchar(265),game_name varchar(265),"\ "game_area varchar(265),game_area_distinct varchar(265),order_current varchar(3908),order_content varchar(3908),order_hours char(10)," \ "order_price float(10),add_price float(10),safe_money float(10),speed_money float(10),order_status_desc varchar(265),"\ "order_lock_desc varchar(265),cancel_type_desc varchar(265),kf_status_desc varchar(265),is_show_pwd tinyint,game_pwd char(50),"\ "game_account varchar(265),game_actor varchar(265),left_hours varchar(265),created_at varchar(265),account_id char(50),"\ "mobile varchar(265),mobile2 varchar(265),contact varchar(265),contact2 varchar(265),qq varchar(265),"\ "primary key (`id`),unique key `no`(`order_no`))engine=innodb auto_increment=12 default charset=utf8" sql_key="create unique index id on dumplings(id)" cursor = db.cursor() cursor.execute("drop table if exists dumplings") cursor.execute(sql_create)# 执行sql语句 cursor.execute(sql_key) db.close() # 关闭数据库连
把数据导入mysql,注意编码和字段之间的匹配。
def intomysql(results): mysql_host = '' mysql_db = 'zyc' mysql_user = 'zyc' mysql_password = '' mysql_port = 3306 db = pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, password=mysql_password, db=mysql_db,charset='utf8') # 连接数据库编码注意是utf8,不然中文结果输出会乱码 cursor = db.cursor() for j in range(len(results)): try: sql = "insert into dumplings(id,order_no,order_title,publish_desc ,game_name," \ "game_area,game_area_distinct,order_current,order_content,order_hours," \ "order_price,add_price,safe_money,speed_money,order_status_desc," \ "order_lock_desc,cancel_type_desc,kf_status_desc,is_show_pwd,game_pwd," \ "game_account,game_actor,left_hours,created_at,account_id," \ "mobile,mobile2,contact,contact2,qq) values (" for i in range(len(results[j])): sql = sql + "'" + results[j][i] + "'," sql = sql[:-1] + ")" sql = sql.encode('utf-8') cursor.execute(sql) db.commit() except:pass db.close()
每十秒运行一次。
mysql_create() i=0 while true: results = getresults() intomysql(results) i=i+1 print("爬虫次数:",i) time.sleep(10)
结果如图:
更多关于python相关内容可查看本站专题:《python socket编程技巧总结》、《python正则表达式用法总结》、《python数据结构与算法教程》、《python函数使用技巧总结》、《python字符串操作技巧汇总》、《python+mysql数据库程序设计入门教程》及《python常见数据库操作技巧汇总》
希望本文所述对大家python程序设计有所帮助。
上一篇: 论团结