jdbc 多线程公用一个connection PreparedStatement
最近做一个数据下载接口,功能是这样的:拿n个基金的id,轮训接口下载每一个基金id的详细回来录入自己的数据库。n>10000, 所以采用多线程下载入库。ok。。问题来了。当我采用200根线程同时做这个事情的时候。 调用接口没有问题,在入库的时候connection的创建我有两种想法:
第一:200根线程公用一个connection和PreparedStatement。因为我觉得200根线程的入库操作都是一样的。sql语句都一样。 唯一不同的是给sql参数赋值的过程。(也许会有人说。是否会有线程安全的问题。实践证明并没有引发异常。。。这也是我的一个疑问。 求解。。。。),还有一方面的考虑是,频繁的打开,关闭连接也是会造成了一定的内存消耗。
第二:用线程池管理connection,每根线程拥有自己的connection。(这个不用说,大部分首先都会考虑用线程池,但这里也会有一个问题。当启动200根线程,而连接池*我的连接假设只有50个,那么也就是意味着有150根线程处于等的状态,这就照成整个接口全部执行完消耗的时间增大。)
希望有人能为我解答上面两种方案的优劣。!
代码是这样的。
public class DownloadTask implements Runnable {
private final Connection con;
public DownloadTask(Connection con) {
this.con = con;
}
public void run() {
// Long downloadLong = System.currentTimeMillis();
List<MonitorEntity> monitorEntitys = new ArrayList<MonitorEntity>();
//调用接口下载详细
for (String id : idsList) {
MonitorEntity entity = getMonitorEntity(id);
monitorEntitys.add(entity);
}
// System.out.println("i ="+i+" download api over "+(System.currentTimeMillis()-downloadLong)/1000+" s");
Long timeLong = System.currentTimeMillis();
Connection connection = con;
PreparedStatement ps = null;
try {
// connection.setAutoCommit(false);
ps = connection.prepareStatement(sqlString);
for (MonitorEntity entity : monitorEntitys) {
ps.setString(1, entity.getApi_name());
ps.setString(2, entity.getId());
ps.setString(3, entity.getId_type());
ps.setString(4, entity.getRequest_url());
ps.setInt(5, entity.getHttp_code());
ps.setInt(6, entity.getRes_code());
ps.setInt(7, entity.getRes_time());
ps.setString(8, entity.getRes_msg());
ps.setString(9, entity.getResult());
ps.setString(10, entity.getLast_update_time());
ps.setString(11, entity.getRequest_url());
ps.setInt(12, entity.getHttp_code());
ps.setInt(13, entity.getRes_code());
ps.setInt(14, entity.getRes_time());
ps.setString(15, entity.getRes_msg());
ps.setString(16, entity.getResult());
ps.setString(17, entity.getLast_update_time());
ps.addBatch();
}
ps.executeBatch();
// connection.commit();
} catch (Exception e) {
log.error("insert or update database error", e);
} finally {
jdbcUtil.closePreparedStatement(ps);
}
System.out.println("i ="+i+" insertOrUpdate database over "+(System.currentTimeMillis()-timeLong)/1000+" s");
countdown.countDown();
}
一个类实现Runnable接口,在这个这个类构造函数中增加一个参数Connection con,然后再主线程 new 子线程的时候吧Connection 作为参数传递进来。
这里只是公用一个connection, 效率很慢,一部分线程只用不到1s钟就执行完了。 但越到后面的线程执行的时间却越长,我个人认为是一个connection只能new出一定的PreparedStatement ,所以导致后面的线程必须等前面的线程执行完,释放掉PreparedStatement ,才能创建新的PreparedStatement 。不知道这样认为是否正确。
后来我想,如果是这样干脆把PreparedStatement 作为参数传递进来。是否能快呢?
代码如下:
public class DownloadTask implements Runnable {
static Log log = LogFactory.getLog(DownloadTask.class);
private final JDBCUtil jdbcUtil;
private final Connection con;
private final List<String> idsList;
private final String url_head;
private final String url_param;
private final String sqlString;
private final String api_name;
private final String id_type;
CountDownLatch countdown;
private final PreparedStatement ps;
private final int i;
static SimpleDateFormat sf = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
public DownloadTask(JDBCUtil jdbcUtil, Connection con,
List<String> idsList, String url_head, String url_param,
String sqlString, String api_name, String id_type,CountDownLatch countdown,int i,PreparedStatement ps) {
this.jdbcUtil = jdbcUtil;
this.con = con;
this.idsList = idsList;
this.url_head = url_head;
this.url_param = url_param;
this.sqlString = sqlString;
this.api_name = api_name;
this.id_type = id_type;
this.countdown = countdown;
this.i = i ;
this.ps = ps;
}
private MonitorEntity getMonitorEntity(String id) {
MonitorEntity entity = new MonitorEntity();
String url = url_head + id + url_param;
Long startTimeLong = System.currentTimeMillis();
String[] responseString;
try {
responseString = DownloadData.download(url);
} catch (Exception e) {
log.error("download error url="+url,e);
return null;
}
Long endTimeLong = System.currentTimeMillis();
Long restimeLong = endTimeLong - startTimeLong;
entity.setId(id);
entity.setId_type(id_type);
entity.setApi_name(api_name);
entity.setRequest_url(url);
entity.setHttp_code(Integer.parseInt(responseString[0]));
entity.setRes_time(restimeLong.intValue());
String resultString = responseString[1];
try {
JSONObject jsonObject = JSONObject.fromObject(resultString);
JSONObject statusJsonObject = jsonObject.getJSONObject("status");
entity.setRes_code(statusJsonObject.getInt("code"));
entity.setRes_msg(statusJsonObject.getString("message"));
entity.setResult(jsonObject.getJSONObject("data").toString());
} catch (Exception e) {
log.error("analysis jsonobject error ", e);
entity.setRes_code(-1);
entity.setRes_msg("analysis jsonobject error");
entity.setResult("error");
}
entity.setLast_update_time(sf.format(new Date()));
return entity;
}
public void run() {
Long timeLong = System.currentTimeMillis();
// Long downloadLong = System.currentTimeMillis();
List<MonitorEntity> monitorEntitys = new ArrayList<MonitorEntity>();
for (String id : idsList) {
MonitorEntity entity = getMonitorEntity(id);
monitorEntitys.add(entity);
}
// System.out.println("i ="+i+" download api over "+(System.currentTimeMillis()-downloadLong)/1000+" s");
// Connection connection = jdbcUtil.getConnection();
//PreparedStatement ps = null;
try {
// connection.setAutoCommit(false);
//ps = connection.prepareStatement(sqlString);
for (MonitorEntity entity : monitorEntitys) {
ps.setString(1, entity.getApi_name());
ps.setString(2, entity.getId());
ps.setString(3, entity.getId_type());
ps.setString(4, entity.getRequest_url());
ps.setInt(5, entity.getHttp_code());
ps.setInt(6, entity.getRes_code());
ps.setInt(7, entity.getRes_time());
ps.setString(8, entity.getRes_msg());
ps.setString(9, entity.getResult());
ps.setString(10, entity.getLast_update_time());
ps.setString(11, entity.getRequest_url());
ps.setInt(12, entity.getHttp_code());
ps.setInt(13, entity.getRes_code());
ps.setInt(14, entity.getRes_time());
ps.setString(15, entity.getRes_msg());
ps.setString(16, entity.getResult());
ps.setString(17, entity.getLast_update_time());
ps.addBatch();
}
ps.executeBatch();
// connection.commit();
} catch (Exception e) {
log.error("insert or update database error", e);
} finally {
//jdbcUtil.closePreparedStatement(ps);//由主线程控制关闭
//jdbcUtil.closeConnection(connection);//由主线程控制关闭
}
System.out.println("i ="+i+" insertOrUpdate database over "+(System.currentTimeMillis()-timeLong)/1000+" s");
countdown.countDown();
}
}
测试结果是入库的时间快了一倍,每一个线程的入库时间基本相同。(如果用多线程一部分线程回慢,因为要等需要的connection)。。
求解释。。。 多线程下公用一个PreparedStatement ,是否回引发线程安全的问题。理论上上面应该会引发才对。但运行结果。。却没有。。。 还有一个隐形问题。。 多线程公用一个PreparedStatement ,在赋值时是否会有问题。比如:A线程吧值赋到B线程去了。 反正异常是没有报。
主线程代码如下:
package com.morningstar.api;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.morningstar.api.ApiConfig;
import com.morningstar.api.util.JDBCParameterCallBack;
import com.morningstar.api.util.JDBCUtil;
public class Application {
static Log log = LogFactory.getLog(Application.class);
private static String sqlString = "(api_name,id,id_type,request_url,http_code,state_code,response_time,state_msg,result,last_update) "
+ "value(?,?,?,?,?,?,?,?,?,?)"
+ " on duplicate key update request_url=?,http_code=?,state_code=?,response_time=?,"
+ "state_msg=?,result=?,last_update=?";
public static void main(String[] args) throws Exception {
ApiConfig apiConfig = new ApiConfig();
String url_param = "?username=" + apiConfig.getUsername()
+ "&password=" + apiConfig.getPassword() + "&format=json";
//获取ids集合
Map<String, List<String>> idTypeMap = apiConfig.getIdsMap();
//任务数
int taskCount = apiConfig.getTask_count();
JDBCUtil jdbcUtil = new JDBCUtil(apiConfig.getJdbc_url(),
apiConfig.getJdbc_name(), apiConfig.getJdbc_pass(),
apiConfig.getJdbc_driver(), apiConfig.getMax_conn(),
apiConfig.getMin_conn());
Connection con = null;
//API名字集合
List<String> apiNames = apiConfig.getApi_names();
//获取创建表sql
String createTableSql = apiConfig.getCreateTableSql();
Connection connection = jdbcUtil.getConnection();
for (String apiname : apiNames) {
String url_head = apiConfig.getApi_url() + apiname + "/";
jdbcUtil.insertOrUpdate("CREATE TABLE IF NOT EXISTS t_monitor_"+apiname+createTableSql);
String sql = "insert t_monitor_"+apiname+sqlString;
PreparedStatement ps = null;
ps = connection.prepareStatement(sql);
for (String idtype : idTypeMap.keySet()) {
Long starttimeLong = System.currentTimeMillis();
List<String> idsList = idTypeMap.get(idtype);
log.info("start download "+apiname+" " + idtype + " list size:"
+ idsList.size());
int number = idsList.size() / taskCount;
int rem = idsList.size() % taskCount;
if (rem != 0) {
taskCount++;
}
CountDownLatch countdown = new CountDownLatch(taskCount);
for (int i = 0; i < taskCount; i++) {
int startIndex = i * number;
int endIndex = (i + 1) * number > idsList.size() ? idsList
.size() : (i + 1) * number;
List<String> ids = idsList.subList(startIndex, endIndex);
DownloadTask downloadTask = new DownloadTask(jdbcUtil, con,
ids, url_head + idtype + "/", url_param, sql,
apiname, idtype, countdown, i,ps);
Thread thread = new Thread(downloadTask);
thread.start();
}
try {
long timeout = 60;
countdown.await(timeout, TimeUnit.MINUTES);
} catch (InterruptedException e) {
log.error("thread is interrupted", e);
}
Long endtimeLong = System.currentTimeMillis();
log.info("end download "+apiname+" " + idtype + " list size:" + idsList.size()
+ " over time:" + (endtimeLong - starttimeLong) / 1000
+ "s");
}
jdbcUtil.closePreparedStatement(ps);
}
jdbcUtil.closeConnection(connection);
}
}