欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页  >  数据库

HBase之普通BulkLoad

程序员文章站 2022-05-29 08:42:46
...

为了保持MapReduce架构清晰,同时保留Map和Reduce结构。以便后续扩展。PS:写入HFile的时候,qualifier必须有序。 Mapper: import com.google.common.base.Strings;import org.apache.hadoop.hbase.io.ImmutableBytesWritable;import org.apache.hadoop.io.L

为了保持MapReduce架构清晰,同时保留Map和Reduce结构。以便后续扩展。PS:写入HFile的时候,qualifier必须有序。

Mapper:

import com.google.common.base.Strings;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import yeepay.util.HBaseUtil;

public class LoadMapper extends Mapper {

    protected void map(LongWritable key, Text value, Context context) {

        try {

            String line = value.toString();

            if (Strings.isNullOrEmpty(line)) {
                return;
            }

            String[] arr = line.split("\t", 9);

            if (arr.length != 9) {
                throw new RuntimeException("line.splite() not == 9");
            }

            if (arr.length 

Reducer

import com.google.common.base.Splitter;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;

public class LoadReducer extends Reducer {

    final static String[] fileds = new String[]{
            "ID",
            "A_ACCOUNT_ID",
            "A_TRX_ID",
            "P_ID",
            "P_TRXORDER_ID",
            "P_FRP_ID",
            "O_PRODUCTCAT",
            "O_RECEIVER_ID",
            "O_REQUESTID"
    };

    @Override
    public void reduce(ImmutableBytesWritable rowkey, Iterable values, Context context) throws java.io.IOException, InterruptedException {

//        super.setID(stringArray[0]);
//        this.A_ACCOUNT_ID = stringArray[1];
//        this.A_TRX_ID = stringArray[2];
//        this.P_ID = stringArray[3];
//        this.P_TRXORDER_ID = stringArray[4];
//        this.P_FRP_ID = stringArray[5];
//        this.O_PRODUCTCAT = stringArray[6];
//        this.O_RECEIVER_ID = stringArray[7];
//        this.O_REQUESTID = stringArray[8];

        try {
            Text vv = values.iterator().next();
            String vs = vv.toString();

            Splitter splitter = Splitter.on("\t").limit(9);

            Iterable iterable = splitter.split(vs);
            Iterator iterator = iterable.iterator();
//            String[] arr = vs.split("\\t", 9);

            int i = 0;
//            Put put = new Put(rowkey.get());

            /**
             *       值的写入必须按照顺序。
             */
            Map map = new TreeMap();
            while (iterator.hasNext()) {
                map.put(fileds[i++], iterator.next());
            }

            for (Map.Entry entry : map.entrySet()) {

                KeyValue kv = new KeyValue(rowkey.copyBytes(), Bytes.toBytes("f"), entry.getKey().getBytes(), 0L, entry.getValue().getBytes());
                context.write(rowkey, kv);


            }

        } catch (Exception e) {
            new RuntimeException(e);
        }


    }

}
Job&BulkLoad
package yeepay.load;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import yeepay.util.HdfsUtil;
import yeepay.util.YeepayConstant;

import java.util.Date;

public abstract class AbstractJobBulkLoad {
    public static Configuration conf = HBaseConfiguration.create();

    public void run(String[] args) throws Exception {
        if (args.length