1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18 package org.apache.hadoop.hbase.mapreduce;
19
20 import java.io.IOException;
21
22 import org.apache.hadoop.hbase.KeyValue;
23 import org.apache.hadoop.hbase.client.Put;
24 import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.BadTsvLineException;
25 import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.ParsedLine;
26 import org.apache.hadoop.hbase.util.Bytes;
27
28
29
30
31
32
33 public class TsvImporterCustomTestMapperForOprAttr extends TsvImporterMapper {
34 @Override
35 protected void populatePut(byte[] lineBytes, ParsedLine parsed, Put put, int i)
36 throws BadTsvLineException, IOException {
37 KeyValue kv;
38 kv = new KeyValue(lineBytes, parsed.getRowKeyOffset(), parsed.getRowKeyLength(),
39 parser.getFamily(i), 0, parser.getFamily(i).length, parser.getQualifier(i), 0,
40 parser.getQualifier(i).length, ts, KeyValue.Type.Put, lineBytes, parsed.getColumnOffset(i),
41 parsed.getColumnLength(i));
42 if (parsed.getIndividualAttributes() != null) {
43 String[] attributes = parsed.getIndividualAttributes();
44 for (String attr : attributes) {
45 String[] split = attr.split(ImportTsv.DEFAULT_ATTRIBUTES_SEPERATOR);
46 if (split == null || split.length <= 1) {
47 throw new BadTsvLineException("Invalid attributes seperator specified" + attributes);
48 } else {
49 if (split[0].length() <= 0 || split[1].length() <= 0) {
50 throw new BadTsvLineException("Invalid attributes seperator specified" + attributes);
51 }
52 put.setAttribute(split[0], Bytes.toBytes(split[1]));
53 }
54 }
55 }
56 put.add(kv);
57 }
58 }