python多进程、多线程、协程向mysql插入10000条数据
程序员文章站
2022-04-08 09:13:27
...
使用python多进程、多线程、协程向mysql插入10000条数据
使用futures的ProcessPoolExecutor进程池
import pymysql
import time, requests
from concurrent.futures import ProcessPoolExecutor
def data_handler(urls):
conn = pymysql.connect(host='172.18.3.204',user='root',password='xinwei',database='btree',charset='utf8')
cursor = conn.cursor()
for i in range(urls[0],urls[1]):
sql = 'insert into aaa(sid,name,email) values(%s,%s,concat(%s,"hael","@163"));'
res = cursor.execute(sql,[i,"root",i])
conn.commit()
cursor.close()
conn.close()
def run():
urls = [(1,2000),(2001,5000),(5001,8000),(8001,10000)]
with ProcessPoolExecutor() as excute:
excute.map(data_handler,urls) ##ProcessPoolExecutor 提供的map函数,可以直接接受可迭代的参数,并且结果可以直接for循环取出
if __name__ == '__main__':
start_time = time.time()
run()
stop_time = time.time()
print('run time is %s' % (stop_time - start_time))
使用协程gevent执行
from gevent import monkey;monkey.patch_all()
import gevent
import requests
import time
import pymysql
def data_handler(anum,num):
conn = pymysql.connect(host='172.18.3.204',user='root',password='xinwei',database='btree',charset='utf8')
cursor = conn.cursor()
for i in range(anum,num):
sql = 'insert into aaa(sid,name,email) values(%s,%s,concat(%s,"hael","@163"));'
res = cursor.execute(sql,[i,"root",i])
conn.commit()
cursor.close()
conn.close()
start_time=time.time()
gevent.joinall([
gevent.spawn(data_handler,1,2000),
gevent.spawn(data_handler,2001,5000),
gevent.spawn(data_handler,5001,8000),
gevent.spawn(data_handler,8001,10000),
])
stop_time=time.time()
print('run time is %s' %(stop_time-start_time))