1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.mapreduce;
20
21 import static org.junit.Assert.assertArrayEquals;
22 import static org.junit.Assert.assertEquals;
23 import static org.junit.Assert.assertTrue;
24 import static org.junit.Assert.fail;
25
26 import java.io.IOException;
27 import java.util.TreeMap;
28
29 import org.apache.hadoop.conf.Configuration;
30 import org.apache.hadoop.fs.FSDataOutputStream;
31 import org.apache.hadoop.fs.FileStatus;
32 import org.apache.hadoop.fs.FileSystem;
33 import org.apache.hadoop.fs.Path;
34 import org.apache.hadoop.hbase.HBaseTestingUtility;
35 import org.apache.hadoop.hbase.HColumnDescriptor;
36 import org.apache.hadoop.hbase.HConstants;
37 import org.apache.hadoop.hbase.HTableDescriptor;
38 import org.apache.hadoop.hbase.client.Connection;
39 import org.apache.hadoop.hbase.client.ConnectionFactory;
40 import org.apache.hadoop.hbase.NamespaceDescriptor;
41 import org.apache.hadoop.hbase.TableName;
42 import org.apache.hadoop.hbase.TableNotFoundException;
43 import org.apache.hadoop.hbase.client.HTable;
44 import org.apache.hadoop.hbase.client.Table;
45 import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
46 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
47 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
48 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
49 import org.apache.hadoop.hbase.io.hfile.HFile;
50 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
51 import org.apache.hadoop.hbase.regionserver.BloomType;
52 import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
53 import org.apache.hadoop.hbase.testclassification.LargeTests;
54 import org.apache.hadoop.hbase.util.Bytes;
55 import org.apache.hadoop.hbase.util.HFileTestUtil;
56 import org.junit.AfterClass;
57 import org.junit.BeforeClass;
58 import org.junit.Rule;
59 import org.junit.Test;
60 import org.junit.experimental.categories.Category;
61 import org.junit.rules.TestName;
62
63
64
65
66
67
68 @Category(LargeTests.class)
69 public class TestLoadIncrementalHFiles {
70 @Rule
71 public TestName tn = new TestName();
72
73 private static final byte[] QUALIFIER = Bytes.toBytes("myqual");
74 private static final byte[] FAMILY = Bytes.toBytes("myfam");
75 private static final String NAMESPACE = "bulkNS";
76
77 static final String EXPECTED_MSG_FOR_NON_EXISTING_FAMILY = "Unmatched family names found";
78 static final int MAX_FILES_PER_REGION_PER_FAMILY = 4;
79
80 private static final byte[][] SPLIT_KEYS = new byte[][] {
81 Bytes.toBytes("ddd"),
82 Bytes.toBytes("ppp")
83 };
84
85 static HBaseTestingUtility util = new HBaseTestingUtility();
86
87 @BeforeClass
88 public static void setUpBeforeClass() throws Exception {
89 util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,"");
90 util.getConfiguration().setInt(
91 LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY,
92 MAX_FILES_PER_REGION_PER_FAMILY);
93
94 util.getConfiguration().set(HConstants.RPC_CODEC_CONF_KEY,
95 KeyValueCodecWithTags.class.getCanonicalName());
96 util.startMiniCluster();
97
98 setupNamespace();
99 }
100
101 protected static void setupNamespace() throws Exception {
102 util.getHBaseAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build());
103 }
104
105 @AfterClass
106 public static void tearDownAfterClass() throws Exception {
107 util.shutdownMiniCluster();
108 }
109
110
111
112
113
114 @Test(timeout = 60000)
115 public void testSimpleLoad() throws Exception {
116 runTest("testSimpleLoad", BloomType.NONE,
117 new byte[][][] {
118 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") },
119 new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
120 });
121 }
122
123
124
125
126
127 @Test(timeout = 60000)
128 public void testRegionCrossingLoad() throws Exception {
129 runTest("testRegionCrossingLoad", BloomType.NONE,
130 new byte[][][] {
131 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
132 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
133 });
134 }
135
136
137
138
139 @Test(timeout = 60000)
140 public void testRegionCrossingRowBloom() throws Exception {
141 runTest("testRegionCrossingLoadRowBloom", BloomType.ROW,
142 new byte[][][] {
143 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
144 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
145 });
146 }
147
148
149
150
151 @Test(timeout = 60000)
152 public void testRegionCrossingRowColBloom() throws Exception {
153 runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL,
154 new byte[][][] {
155 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
156 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
157 });
158 }
159
160
161
162
163
164 @Test(timeout = 60000)
165 public void testSimpleHFileSplit() throws Exception {
166 runTest("testHFileSplit", BloomType.NONE,
167 new byte[][] {
168 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
169 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
170 },
171 new byte[][][] {
172 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("lll") },
173 new byte[][]{ Bytes.toBytes("mmm"), Bytes.toBytes("zzz") },
174 }
175 );
176 }
177
178
179
180
181
182 @Test(timeout = 60000)
183 public void testRegionCrossingHFileSplit() throws Exception {
184 testRegionCrossingHFileSplit(BloomType.NONE);
185 }
186
187
188
189
190
191 @Test(timeout = 60000)
192 public void testRegionCrossingHFileSplitRowBloom() throws Exception {
193 testRegionCrossingHFileSplit(BloomType.ROW);
194 }
195
196
197
198
199
200 @Test(timeout = 60000)
201 public void testRegionCrossingHFileSplitRowColBloom() throws Exception {
202 testRegionCrossingHFileSplit(BloomType.ROWCOL);
203 }
204
205 private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception {
206 runTest("testHFileSplit" + bloomType + "Bloom", bloomType,
207 new byte[][] {
208 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
209 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
210 },
211 new byte[][][] {
212 new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") },
213 new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") },
214 }
215 );
216 }
217
218 private HTableDescriptor buildHTD(TableName tableName, BloomType bloomType) {
219 HTableDescriptor htd = new HTableDescriptor(tableName);
220 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
221 familyDesc.setBloomFilterType(bloomType);
222 htd.addFamily(familyDesc);
223 return htd;
224 }
225
226 private void runTest(String testName, BloomType bloomType,
227 byte[][][] hfileRanges) throws Exception {
228 runTest(testName, bloomType, null, hfileRanges);
229 }
230
231 private void runTest(String testName, BloomType bloomType,
232 byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception {
233 final byte[] TABLE_NAME = Bytes.toBytes("mytable_"+testName);
234 final boolean preCreateTable = tableSplitKeys != null;
235
236
237 final TableName TABLE_WITHOUT_NS = TableName.valueOf(TABLE_NAME);
238 runTest(testName, TABLE_WITHOUT_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges);
239
240
241 final TableName TABLE_WITH_NS = TableName.valueOf(Bytes.toBytes(NAMESPACE), TABLE_NAME);
242 runTest(testName, TABLE_WITH_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges);
243 }
244
245 private void runTest(String testName, TableName tableName, BloomType bloomType,
246 boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception {
247 HTableDescriptor htd = buildHTD(tableName, bloomType);
248 runTest(testName, htd, bloomType, preCreateTable, tableSplitKeys, hfileRanges);
249 }
250
251 private void runTest(String testName, HTableDescriptor htd, BloomType bloomType,
252 boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception {
253
254 for (boolean managed : new boolean[] { true, false }) {
255 Path dir = util.getDataTestDirOnTestFS(testName);
256 FileSystem fs = util.getTestFileSystem();
257 dir = dir.makeQualified(fs);
258 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
259
260 int hfileIdx = 0;
261 for (byte[][] range : hfileRanges) {
262 byte[] from = range[0];
263 byte[] to = range[1];
264 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_"
265 + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000);
266 }
267 int expectedRows = hfileIdx * 1000;
268
269 if (preCreateTable) {
270 util.getHBaseAdmin().createTable(htd, tableSplitKeys);
271 }
272
273 final TableName tableName = htd.getTableName();
274 if (!util.getHBaseAdmin().tableExists(tableName)) {
275 util.getHBaseAdmin().createTable(htd);
276 }
277 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
278
279 if (managed) {
280 try (HTable table = new HTable(util.getConfiguration(), tableName)) {
281 loader.doBulkLoad(dir, table);
282 assertEquals(expectedRows, util.countRows(table));
283 }
284 } else {
285 try (Connection conn = ConnectionFactory.createConnection(util.getConfiguration());
286 HTable table = (HTable) conn.getTable(tableName)) {
287 loader.doBulkLoad(dir, table);
288 }
289 }
290
291
292 Path stagingBasePath = SecureBulkLoadUtil.getBaseStagingDir(util.getConfiguration());
293 if (fs.exists(stagingBasePath)) {
294 FileStatus[] files = fs.listStatus(stagingBasePath);
295 for (FileStatus file : files) {
296 assertTrue("Folder=" + file.getPath() + " is not cleaned up.",
297 file.getPath().getName() != "DONOTERASE");
298 }
299 }
300
301 util.deleteTable(tableName);
302 }
303 }
304
305
306
307
308
309
310
311 @Test(timeout = 60000)
312 public void htestTagsSurviveBulkLoadSplit() throws Exception {
313 Path dir = util.getDataTestDirOnTestFS(tn.getMethodName());
314 FileSystem fs = util.getTestFileSystem();
315 dir = dir.makeQualified(fs);
316 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
317
318 byte [][] tableSplitKeys = new byte[][] {
319 Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"),
320 Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"),
321 };
322
323
324 byte[] from = Bytes.toBytes("ddd");
325 byte[] to = Bytes.toBytes("ooo");
326 HFileTestUtil.createHFileWithTags(util.getConfiguration(), fs,
327 new Path(familyDir, tn.getMethodName()+"_hfile"),
328 FAMILY, QUALIFIER, from, to, 1000);
329 int expectedRows = 1000;
330
331 TableName tableName = TableName.valueOf(tn.getMethodName());
332 HTableDescriptor htd = buildHTD(tableName, BloomType.NONE);
333 util.getHBaseAdmin().createTable(htd, tableSplitKeys);
334
335 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
336 String [] args= {dir.toString(), tableName.toString()};
337 loader.run(args);
338
339 Table table = util.getConnection().getTable(tableName);
340 try {
341 assertEquals(expectedRows, util.countRows(table));
342 HFileTestUtil.verifyTags(table);
343 } finally {
344 table.close();
345 }
346
347 util.deleteTable(tableName);
348 }
349
350
351
352
353 @Test(timeout = 60000)
354 public void testNonexistentColumnFamilyLoad() throws Exception {
355 String testName = "testNonexistentColumnFamilyLoad";
356 byte[][][] hFileRanges = new byte[][][] {
357 new byte[][]{ Bytes.toBytes("aaa"), Bytes.toBytes("ccc") },
358 new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") },
359 };
360
361 final byte[] TABLE = Bytes.toBytes("mytable_"+testName);
362 HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE));
363
364
365 HColumnDescriptor family =
366 new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase()));
367 htd.addFamily(family);
368
369 try {
370 runTest(testName, htd, BloomType.NONE, true, SPLIT_KEYS, hFileRanges);
371 assertTrue("Loading into table with non-existent family should have failed", false);
372 } catch (Exception e) {
373 assertTrue("IOException expected", e instanceof IOException);
374
375 String errMsg = e.getMessage();
376 assertTrue("Incorrect exception message, expected message: ["
377 + EXPECTED_MSG_FOR_NON_EXISTING_FAMILY + "], current message: [" + errMsg + "]",
378 errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY));
379 }
380 }
381
382 @Test(timeout = 60000)
383 public void testNonHfileFolderWithUnmatchedFamilyName() throws Exception {
384 testNonHfileFolder("testNonHfileFolderWithUnmatchedFamilyName", true);
385 }
386
387 @Test(timeout = 60000)
388 public void testNonHfileFolder() throws Exception {
389 testNonHfileFolder("testNonHfileFolder", false);
390 }
391
392
393
394
395
396
397 private void testNonHfileFolder(String tableName, boolean preCreateTable) throws Exception {
398 Path dir = util.getDataTestDirOnTestFS(tableName);
399 FileSystem fs = util.getTestFileSystem();
400 dir = dir.makeQualified(fs);
401
402 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
403 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_0"),
404 FAMILY, QUALIFIER, Bytes.toBytes("begin"), Bytes.toBytes("end"), 500);
405 createRandomDataFile(fs, new Path(familyDir, "012356789"), 16 * 1024);
406
407 final String NON_FAMILY_FOLDER = "_logs";
408 Path nonFamilyDir = new Path(dir, NON_FAMILY_FOLDER);
409 fs.mkdirs(nonFamilyDir);
410 fs.mkdirs(new Path(nonFamilyDir, "non-file"));
411 createRandomDataFile(fs, new Path(nonFamilyDir, "012356789"), 16 * 1024);
412
413 Table table = null;
414 try {
415 if (preCreateTable) {
416 table = util.createTable(TableName.valueOf(tableName), FAMILY);
417 } else {
418 table = util.getConnection().getTable(TableName.valueOf(tableName));
419 }
420
421 final String[] args = {dir.toString(), tableName};
422 new LoadIncrementalHFiles(util.getConfiguration()).run(args);
423 assertEquals(500, util.countRows(table));
424 } finally {
425 if (table != null) {
426 table.close();
427 }
428 fs.delete(dir, true);
429 }
430 }
431
432 private static void createRandomDataFile(FileSystem fs, Path path, int size)
433 throws IOException {
434 FSDataOutputStream stream = fs.create(path);
435 try {
436 byte[] data = new byte[1024];
437 for (int i = 0; i < data.length; ++i) {
438 data[i] = (byte)(i & 0xff);
439 }
440 while (size >= data.length) {
441 stream.write(data, 0, data.length);
442 size -= data.length;
443 }
444 if (size > 0) {
445 stream.write(data, 0, size);
446 }
447 } finally {
448 stream.close();
449 }
450 }
451
452 @Test(timeout = 60000)
453 public void testSplitStoreFile() throws IOException {
454 Path dir = util.getDataTestDirOnTestFS("testSplitHFile");
455 FileSystem fs = util.getTestFileSystem();
456 Path testIn = new Path(dir, "testhfile");
457 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
458 HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER,
459 Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
460
461 Path bottomOut = new Path(dir, "bottom.out");
462 Path topOut = new Path(dir, "top.out");
463
464 LoadIncrementalHFiles.splitStoreFile(
465 util.getConfiguration(), testIn,
466 familyDesc, Bytes.toBytes("ggg"),
467 bottomOut,
468 topOut);
469
470 int rowCount = verifyHFile(bottomOut);
471 rowCount += verifyHFile(topOut);
472 assertEquals(1000, rowCount);
473 }
474
475 @Test
476 public void testSplitStoreFileWithNoneToNone() throws IOException {
477 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
478 }
479
480 @Test
481 public void testSplitStoreFileWithEncodedToEncoded() throws IOException {
482 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF);
483 }
484
485 @Test
486 public void testSplitStoreFileWithEncodedToNone() throws IOException {
487 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE);
488 }
489
490 @Test
491 public void testSplitStoreFileWithNoneToEncoded() throws IOException {
492 testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF);
493 }
494
495 private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding,
496 DataBlockEncoding cfEncoding) throws IOException {
497 Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding");
498 FileSystem fs = util.getTestFileSystem();
499 Path testIn = new Path(dir, "testhfile");
500 HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
501 familyDesc.setDataBlockEncoding(cfEncoding);
502 HFileTestUtil.createHFileWithDataBlockEncoding(
503 util.getConfiguration(), fs, testIn, bulkloadEncoding,
504 FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
505
506 Path bottomOut = new Path(dir, "bottom.out");
507 Path topOut = new Path(dir, "top.out");
508
509 LoadIncrementalHFiles.splitStoreFile(
510 util.getConfiguration(), testIn,
511 familyDesc, Bytes.toBytes("ggg"),
512 bottomOut,
513 topOut);
514
515 int rowCount = verifyHFile(bottomOut);
516 rowCount += verifyHFile(topOut);
517 assertEquals(1000, rowCount);
518 }
519
520 private int verifyHFile(Path p) throws IOException {
521 Configuration conf = util.getConfiguration();
522 HFile.Reader reader = HFile.createReader(
523 p.getFileSystem(conf), p, new CacheConfig(conf), conf);
524 reader.loadFileInfo();
525 HFileScanner scanner = reader.getScanner(false, false);
526 scanner.seekTo();
527 int count = 0;
528 do {
529 count++;
530 } while (scanner.next());
531 assertTrue(count > 0);
532 reader.close();
533 return count;
534 }
535
536 private void addStartEndKeysForTest(TreeMap<byte[], Integer> map, byte[] first, byte[] last) {
537 Integer value = map.containsKey(first)?map.get(first):0;
538 map.put(first, value+1);
539
540 value = map.containsKey(last)?map.get(last):0;
541 map.put(last, value-1);
542 }
543
544 @Test
545 public void testInferBoundaries() {
546 TreeMap<byte[], Integer> map = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR);
547
548
549
550
551
552
553
554
555
556
557
558 String first;
559 String last;
560
561 first = "a"; last = "e";
562 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
563
564 first = "r"; last = "s";
565 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
566
567 first = "o"; last = "p";
568 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
569
570 first = "g"; last = "k";
571 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
572
573 first = "v"; last = "x";
574 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
575
576 first = "c"; last = "i";
577 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
578
579 first = "m"; last = "q";
580 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
581
582 first = "s"; last = "t";
583 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
584
585 first = "u"; last = "w";
586 addStartEndKeysForTest(map, first.getBytes(), last.getBytes());
587
588 byte[][] keysArray = LoadIncrementalHFiles.inferBoundaries(map);
589 byte[][] compare = new byte[3][];
590 compare[0] = "m".getBytes();
591 compare[1] = "r".getBytes();
592 compare[2] = "u".getBytes();
593
594 assertEquals(keysArray.length, 3);
595
596 for (int row = 0; row<keysArray.length; row++){
597 assertArrayEquals(keysArray[row], compare[row]);
598 }
599 }
600
601 @Test(timeout = 60000)
602 public void testLoadTooMayHFiles() throws Exception {
603 Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles");
604 FileSystem fs = util.getTestFileSystem();
605 dir = dir.makeQualified(fs);
606 Path familyDir = new Path(dir, Bytes.toString(FAMILY));
607
608 byte[] from = Bytes.toBytes("begin");
609 byte[] to = Bytes.toBytes("end");
610 for (int i = 0; i <= MAX_FILES_PER_REGION_PER_FAMILY; i++) {
611 HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_"
612 + i), FAMILY, QUALIFIER, from, to, 1000);
613 }
614
615 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
616 String [] args= {dir.toString(), "mytable_testLoadTooMayHFiles"};
617 try {
618 loader.run(args);
619 fail("Bulk loading too many files should fail");
620 } catch (IOException ie) {
621 assertTrue(ie.getMessage().contains("Trying to load more than "
622 + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles"));
623 }
624 }
625
626 @Test(expected = TableNotFoundException.class)
627 public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception {
628 Configuration conf = util.getConfiguration();
629 conf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no");
630 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
631 String[] args = { "directory", "nonExistingTable" };
632 loader.run(args);
633 }
634
635 @Test(timeout = 60000)
636 public void testTableWithCFNameStartWithUnderScore() throws Exception {
637 Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore");
638 FileSystem fs = util.getTestFileSystem();
639 dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory());
640 String family = "_cf";
641 Path familyDir = new Path(dir, family);
642
643 byte[] from = Bytes.toBytes("begin");
644 byte[] to = Bytes.toBytes("end");
645 Configuration conf = util.getConfiguration();
646 String tableName = "mytable_cfNameStartWithUnderScore";
647 Table table = util.createTable(TableName.valueOf(tableName), family);
648 HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family),
649 QUALIFIER, from, to, 1000);
650
651 LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
652 String[] args = { dir.toString(), tableName };
653 try {
654 loader.run(args);
655 assertEquals(1000, util.countRows(table));
656 } finally {
657 if (null != table) {
658 table.close();
659 }
660 }
661 }
662 }
663