网络爬虫2
程序员文章站
2022-05-04 11:42:42
...
import MySQLdb
import urllib
import webbrowser as web
import json
conn=MySQLdb.connect(host="localhost",user="root",passwd="sf123456",port=3306,charset="utf8")
cur=conn.cursor()
#cur.execute('create database if not exists stock_db')
conn.select_db("db_stock")
for i in range(1,4):
print i
url="http://q.10jqka.com.cn/interface/stock/fl/zdf/desc/"+str(i)+"/hsb/quote"
content=urllib.urlopen(url).read()
open("E:\\data\\stock\\stock_b.json","w").write(content)
#web.open_new_tab("E:\\data\\stock\\stock0617.json")
f=file("E:\\data\\stock\\stock_b.json")
s=json.load(f)
length=len(s['data'])
for i in range(0,length):
sql = "INSERT INTO stock_information_b(cje,cjl,hsl,jk,jlr,rtime,stockcode,\
stockid,stockname,zde,zdf,zdj,zgj,zs,zxj)values('%s','%s','%s','%s',\
'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')"%(s['data'][i]['cje'],s['data'][i]['cjl'],\
s['data'][i]['hsl'],s['data'][i]['jk'],s['data'][i]['jlr'],s['data'][i]['rtime'],\
s['data'][i]['stockcode'],s['data'][i]['stockid'],s['data'][i]['stockname'],s['data'][i]['zde'],\
s['data'][i]['zdf'],s['data'][i]['zdj'],s['data'][i]['zgj'],s['data'][i]['zs'],s['data'][i]['zxj']
)
cur.execute(sql)
conn.commit()
print "hello"
# except:
# print e
# conn.rollback()
conn.close()
import urllib
import webbrowser as web
url="http://q.10jqka.com.cn/hk/hzcf/"
content=urllib.urlopen(url).read()
open("E:\\data\\stock\\hengSengIndex0623.html","w").write(content)
web.open_new_tab("E:\\data\\stock\\hengSengIndex0623.html")
转载于:https://blog.51cto.com/10226243/1664918