sqoop api
程序员文章站
2022-05-25 14:21:38
...
logger.info("start to export data");
// 开始时间
Long beginTime = System.currentTimeMillis();
try{
System.setProperty("HADOOP_USER_NAME","hddtmn");
Configuration conf = new Configuration();
conf.set("fs.default.name", HDFS);
//conf.set("hadoop.job.ugi", "hadoop,hadoop");
conf.set("mapred.job.tracker", JOB_TRACKER);
//运行sqoop程序需要额外jar支持
addTmpJar(jarDir,conf);
//addTmpJar("file:/D:/hadoop/sqoop-1.4.5.jar",conf);
String[] arg = new String[1];
ExportTool exporter = new ExportTool();
Sqoop sqoop = new Sqoop(exporter);
sqoop.setConf(conf);
//多个文件执行
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
Path path = new Path(hdfsDir);
FileStatus[] FileStatus = fs.listStatus(path);
for (FileStatus fileStatus : FileStatus) {
String type = fileStatus.isDir() ? "dir" : "file";
String permission = fileStatus.getPermission().toString();
short replication = fileStatus.getReplication();
String group = fileStatus.getGroup();
long len = fileStatus.getLen();
String filepath = fileStatus.getPath().toString();
logger.info(type + "\t" + replication + "\t"+ permission + "\t" + group + "\t" + len + "\t" + filepath + "\t");
if(filepath.contains("part-r")){
List<String> para = getSqoopPara(filepath, tableName, columns, splitStr);
arg = para.toArray(new String[0]);
Sqoop.runSqoop(sqoop, arg);
}
}
}catch(Exception e){
e.printStackTrace();
logger.info(e.getMessage());
}
// 结束时间
Long endTime = System.currentTimeMillis();
logger.info("总共:" + (endTime - beginTime) / 1000 + "秒");// 计算时间
// 开始时间
Long beginTime = System.currentTimeMillis();
try{
System.setProperty("HADOOP_USER_NAME","hddtmn");
Configuration conf = new Configuration();
conf.set("fs.default.name", HDFS);
//conf.set("hadoop.job.ugi", "hadoop,hadoop");
conf.set("mapred.job.tracker", JOB_TRACKER);
//运行sqoop程序需要额外jar支持
addTmpJar(jarDir,conf);
//addTmpJar("file:/D:/hadoop/sqoop-1.4.5.jar",conf);
String[] arg = new String[1];
ExportTool exporter = new ExportTool();
Sqoop sqoop = new Sqoop(exporter);
sqoop.setConf(conf);
//多个文件执行
FileSystem fs = FileSystem.get(URI.create(HDFS), conf);
Path path = new Path(hdfsDir);
FileStatus[] FileStatus = fs.listStatus(path);
for (FileStatus fileStatus : FileStatus) {
String type = fileStatus.isDir() ? "dir" : "file";
String permission = fileStatus.getPermission().toString();
short replication = fileStatus.getReplication();
String group = fileStatus.getGroup();
long len = fileStatus.getLen();
String filepath = fileStatus.getPath().toString();
logger.info(type + "\t" + replication + "\t"+ permission + "\t" + group + "\t" + len + "\t" + filepath + "\t");
if(filepath.contains("part-r")){
List<String> para = getSqoopPara(filepath, tableName, columns, splitStr);
arg = para.toArray(new String[0]);
Sqoop.runSqoop(sqoop, arg);
}
}
}catch(Exception e){
e.printStackTrace();
logger.info(e.getMessage());
}
// 结束时间
Long endTime = System.currentTimeMillis();
logger.info("总共:" + (endTime - beginTime) / 1000 + "秒");// 计算时间
上一篇: sqoop1.4.5的安装
下一篇: Spree 开源的Rails电子商务平台
推荐阅读
-
Alex的Hadoop菜鸟教程:第8课Sqoop1导入Hbase以及Hive
-
记录下Flarum论坛API设计 flash小游戏 加载api flash as3.0 api flash api下
-
PHP7之Mongodb API使用详解
-
手机话费充值数据接口_API接口 话费充值话费接口话费api手机话费充值手机话费充值api
-
利用谷歌 Translate API制作自己的翻译脚本_php实例
-
android通过google api获取天气信息示例
-
arcgis api for javascript学习-FeatureTable使用
-
ArcGIS API for Javascript (六)
-
arcgis api for javascript学习
-
VueCLI使用ArcGIS API for JavaScript(二)