1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapreduce;
20
21 import static org.junit.Assert.assertEquals;
22 import static org.junit.Assert.assertTrue;
23
24 import java.io.IOException;
25 import java.util.ArrayList;
26 import java.util.Arrays;
27 import java.util.HashSet;
28 import java.util.List;
29 import java.util.Set;
30 import java.util.UUID;
31
32 import org.apache.commons.logging.Log;
33 import org.apache.commons.logging.LogFactory;
34 import org.apache.hadoop.conf.Configurable;
35 import org.apache.hadoop.conf.Configuration;
36 import org.apache.hadoop.fs.FSDataOutputStream;
37 import org.apache.hadoop.fs.FileStatus;
38 import org.apache.hadoop.fs.FileSystem;
39 import org.apache.hadoop.fs.Path;
40 import org.apache.hadoop.hbase.Cell;
41 import org.apache.hadoop.hbase.CellUtil;
42 import org.apache.hadoop.hbase.HBaseTestingUtility;
43 import org.apache.hadoop.hbase.HConstants;
44 import org.apache.hadoop.hbase.testclassification.LargeTests;
45 import org.apache.hadoop.hbase.TableNotFoundException;
46 import org.apache.hadoop.hbase.client.HTable;
47 import org.apache.hadoop.hbase.client.Result;
48 import org.apache.hadoop.hbase.client.ResultScanner;
49 import org.apache.hadoop.hbase.client.Scan;
50 import org.apache.hadoop.hbase.util.Bytes;
51 import org.apache.hadoop.io.Text;
52 import org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputFilesFilter;
53 import org.apache.hadoop.mapreduce.Job;
54 import org.apache.hadoop.util.GenericOptionsParser;
55 import org.apache.hadoop.util.Tool;
56 import org.apache.hadoop.util.ToolRunner;
57 import org.junit.AfterClass;
58 import org.junit.BeforeClass;
59 import org.junit.Test;
60 import org.junit.experimental.categories.Category;
61
62 @Category(LargeTests.class)
63 public class TestImportTsv implements Configurable {
64
65 protected static final Log LOG = LogFactory.getLog(TestImportTsv.class);
66 protected static final String NAME = TestImportTsv.class.getSimpleName();
67 protected static HBaseTestingUtility util = new HBaseTestingUtility();
68
69
70
71
72
73 protected static final String DELETE_AFTER_LOAD_CONF = NAME + ".deleteAfterLoad";
74
75
76
77
78 protected static final String FORCE_COMBINER_CONF = NAME + ".forceCombiner";
79
80 private final String FAMILY = "FAM";
81
82 public Configuration getConf() {
83 return util.getConfiguration();
84 }
85
86 public void setConf(Configuration conf) {
87 throw new IllegalArgumentException("setConf not supported");
88 }
89
90 @BeforeClass
91 public static void provisionCluster() throws Exception {
92 util.startMiniCluster();
93 util.startMiniMapReduceCluster();
94 }
95
96 @AfterClass
97 public static void releaseCluster() throws Exception {
98 util.shutdownMiniMapReduceCluster();
99 util.shutdownMiniCluster();
100 }
101
102 @Test
103 public void testMROnTable() throws Exception {
104 String table = "test-" + UUID.randomUUID();
105
106
107 String[] args = new String[] {
108 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
109 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
110 table
111 };
112
113 util.createTable(table, FAMILY);
114 doMROnTableTest(util, FAMILY, null, args, 1);
115 util.deleteTable(table);
116 }
117
118 @Test
119 public void testMROnTableWithTimestamp() throws Exception {
120 String table = "test-" + UUID.randomUUID();
121
122
123 String[] args = new String[] {
124 "-D" + ImportTsv.COLUMNS_CONF_KEY
125 + "=HBASE_ROW_KEY,HBASE_TS_KEY,FAM:A,FAM:B",
126 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
127 table
128 };
129 String data = "KEY,1234,VALUE1,VALUE2\n";
130
131 util.createTable(table, FAMILY);
132 doMROnTableTest(util, FAMILY, data, args, 1);
133 util.deleteTable(table);
134 }
135
136
137 @Test
138 public void testMROnTableWithCustomMapper()
139 throws Exception {
140 String table = "test-" + UUID.randomUUID();
141
142
143 String[] args = new String[] {
144 "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapper",
145 table
146 };
147
148 util.createTable(table, FAMILY);
149 doMROnTableTest(util, FAMILY, null, args, 3);
150 util.deleteTable(table);
151 }
152
153 @Test
154 public void testBulkOutputWithoutAnExistingTable() throws Exception {
155 String table = "test-" + UUID.randomUUID();
156
157
158 Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
159 String[] args = new String[] {
160 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
161 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
162 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
163 table
164 };
165
166 doMROnTableTest(util, FAMILY, null, args, 3);
167 util.deleteTable(table);
168 }
169
170 @Test
171 public void testBulkOutputWithAnExistingTable() throws Exception {
172 String table = "test-" + UUID.randomUUID();
173
174
175 Path hfiles = new Path(util.getDataTestDirOnTestFS(table), "hfiles");
176 String[] args = new String[] {
177 "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B",
178 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
179 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
180 table
181 };
182
183 util.createTable(table, FAMILY);
184 doMROnTableTest(util, FAMILY, null, args, 3);
185 util.deleteTable(table);
186 }
187
188 @Test
189 public void testJobConfigurationsWithTsvImporterTextMapper() throws Exception {
190 String table = "test-" + UUID.randomUUID();
191 Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
192 String INPUT_FILE = "InputFile1.csv";
193
194 String[] args =
195 new String[] {
196 "-D" + ImportTsv.MAPPER_CONF_KEY
197 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
198 "-D" + ImportTsv.COLUMNS_CONF_KEY
199 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
200 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=,",
201 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table,
202 INPUT_FILE
203 };
204 GenericOptionsParser opts = new GenericOptionsParser(util.getConfiguration(), args);
205 args = opts.getRemainingArgs();
206 Job job = ImportTsv.createSubmittableJob(util.getConfiguration(), args);
207 assertTrue(job.getMapperClass().equals(TsvImporterTextMapper.class));
208 assertTrue(job.getReducerClass().equals(TextSortReducer.class));
209 assertTrue(job.getMapOutputValueClass().equals(Text.class));
210 }
211
212 @Test
213 public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
214 String table = "test-" + UUID.randomUUID();
215 String FAMILY = "FAM";
216 Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table),"hfiles");
217
218 String[] args =
219 new String[] {
220 "-D" + ImportTsv.MAPPER_CONF_KEY
221 + "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
222 "-D" + ImportTsv.COLUMNS_CONF_KEY
223 + "=HBASE_ROW_KEY,FAM:A,FAM:B",
224 "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
225 "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(), table
226 };
227 String data = "KEY\u001bVALUE4\u001bVALUE8\n";
228 doMROnTableTest(util, FAMILY, data, args, 4);
229 }
230
231 @Test(expected = TableNotFoundException.class)
232 public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
233 String table = "test-" + UUID.randomUUID();
234 String[] args =
235 new String[] { table, "/inputFile" };
236
237 Configuration conf = new Configuration(util.getConfiguration());
238 conf.set(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A");
239 conf.set(ImportTsv.BULK_OUTPUT_CONF_KEY, "/output");
240 conf.set(ImportTsv.CREATE_TABLE_CONF_KEY, "no");
241 ImportTsv.createSubmittableJob(conf, args);
242 }
243
244 @Test(expected = TableNotFoundException.class)
245 public void testMRWithoutAnExistingTable() throws Exception {
246 String table = "test-" + UUID.randomUUID();
247 String[] args =
248 new String[] { table, "/inputFile" };
249
250 Configuration conf = new Configuration(util.getConfiguration());
251 ImportTsv.createSubmittableJob(conf, args);
252 }
253
254 protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
255 String data, String[] args) throws Exception {
256 return doMROnTableTest(util, family, data, args, 1);
257 }
258
259
260
261
262
263
264
265
266
267 protected static Tool doMROnTableTest(HBaseTestingUtility util, String family,
268 String data, String[] args, int valueMultiplier)
269 throws Exception {
270 String table = args[args.length - 1];
271 Configuration conf = new Configuration(util.getConfiguration());
272
273
274 FileSystem fs = FileSystem.get(conf);
275 Path inputPath = fs.makeQualified(new Path(util.getDataTestDirOnTestFS(table), "input.dat"));
276 FSDataOutputStream op = fs.create(inputPath, true);
277 if (data == null) {
278 data = "KEY\u001bVALUE1\u001bVALUE2\n";
279 }
280 op.write(Bytes.toBytes(data));
281 op.close();
282 LOG.debug(String.format("Wrote test data to file: %s", inputPath));
283
284 if (conf.getBoolean(FORCE_COMBINER_CONF, true)) {
285 LOG.debug("Forcing combiner.");
286 conf.setInt("min.num.spills.for.combine", 1);
287 }
288
289
290 List<String> argv = new ArrayList<String>(Arrays.asList(args));
291 argv.add(inputPath.toString());
292 Tool tool = new ImportTsv();
293 LOG.debug("Running ImportTsv with arguments: " + argv);
294 assertEquals(0, ToolRunner.run(conf, tool, argv.toArray(args)));
295
296
297
298
299 boolean createdHFiles = false;
300 String outputPath = null;
301 for (String arg : argv) {
302 if (arg.contains(ImportTsv.BULK_OUTPUT_CONF_KEY)) {
303 createdHFiles = true;
304
305 outputPath = arg.split("=")[1];
306 break;
307 }
308 }
309
310 if (createdHFiles)
311 validateHFiles(fs, outputPath, family);
312 else
313 validateTable(conf, table, family, valueMultiplier);
314
315 if (conf.getBoolean(DELETE_AFTER_LOAD_CONF, true)) {
316 LOG.debug("Deleting test subdirectory");
317 util.cleanupDataTestDirOnTestFS(table);
318 }
319 return tool;
320 }
321
322
323
324
325 private static void validateTable(Configuration conf, String tableName,
326 String family, int valueMultiplier) throws IOException {
327
328 LOG.debug("Validating table.");
329 HTable table = new HTable(conf, tableName);
330 boolean verified = false;
331 long pause = conf.getLong("hbase.client.pause", 5 * 1000);
332 int numRetries = conf.getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 5);
333 for (int i = 0; i < numRetries; i++) {
334 try {
335 Scan scan = new Scan();
336
337 scan.addFamily(Bytes.toBytes(family));
338 ResultScanner resScanner = table.getScanner(scan);
339 for (Result res : resScanner) {
340 assertTrue(res.size() == 2);
341 List<Cell> kvs = res.listCells();
342 assertTrue(CellUtil.matchingRow(kvs.get(0), Bytes.toBytes("KEY")));
343 assertTrue(CellUtil.matchingRow(kvs.get(1), Bytes.toBytes("KEY")));
344 assertTrue(CellUtil.matchingValue(kvs.get(0), Bytes.toBytes("VALUE" + valueMultiplier)));
345 assertTrue(CellUtil.matchingValue(kvs.get(1), Bytes.toBytes("VALUE" + 2 * valueMultiplier)));
346
347 }
348 verified = true;
349 break;
350 } catch (NullPointerException e) {
351
352
353 }
354 try {
355 Thread.sleep(pause);
356 } catch (InterruptedException e) {
357
358 }
359 }
360 table.close();
361 assertTrue(verified);
362 }
363
364
365
366
367 private static void validateHFiles(FileSystem fs, String outputPath, String family)
368 throws IOException {
369
370
371 LOG.debug("Validating HFiles.");
372 Set<String> configFamilies = new HashSet<String>();
373 configFamilies.add(family);
374 Set<String> foundFamilies = new HashSet<String>();
375 for (FileStatus cfStatus : fs.listStatus(new Path(outputPath), new OutputFilesFilter())) {
376 String[] elements = cfStatus.getPath().toString().split(Path.SEPARATOR);
377 String cf = elements[elements.length - 1];
378 foundFamilies.add(cf);
379 assertTrue(
380 String.format(
381 "HFile ouput contains a column family (%s) not present in input families (%s)",
382 cf, configFamilies),
383 configFamilies.contains(cf));
384 for (FileStatus hfile : fs.listStatus(cfStatus.getPath())) {
385 assertTrue(
386 String.format("HFile %s appears to contain no data.", hfile.getPath()),
387 hfile.getLen() > 0);
388 }
389 }
390 }
391 }
392