1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.util;
20
21 import org.apache.hadoop.conf.Configuration;
22 import org.apache.hadoop.fs.FileSystem;
23 import org.apache.hadoop.fs.Path;
24 import org.apache.hadoop.hbase.KeyValue;
25 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
26 import org.apache.hadoop.hbase.io.hfile.HFile;
27 import org.apache.hadoop.hbase.io.hfile.HFileContext;
28 import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
29 import org.apache.hadoop.hbase.regionserver.StoreFile;
30
31 import java.io.IOException;
32
33
34
35
36 public class HFileTestUtil {
37
38
39
40
41
42 public static void createHFile(
43 Configuration configuration,
44 FileSystem fs, Path path,
45 byte[] family, byte[] qualifier,
46 byte[] startKey, byte[] endKey, int numRows) throws IOException
47 {
48 HFileContext meta = new HFileContextBuilder().build();
49 HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
50 .withPath(fs, path)
51 .withFileContext(meta)
52 .create();
53 long now = System.currentTimeMillis();
54 try {
55
56 for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, numRows-2)) {
57 KeyValue kv = new KeyValue(key, family, qualifier, now, key);
58 writer.append(kv);
59 }
60 } finally {
61 writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY,
62 Bytes.toBytes(System.currentTimeMillis()));
63 writer.close();
64 }
65 }
66 }