RAT (Release Audit Tool) results

The following document contains the results of RAT (Release Audit Tool).

*****************************************************
Summary
-------
Generated at: 2015-02-12T17:27:14-05:00
Notes: 5
Binaries: 16
Archives: 0
Standards: 1507

Apache Licensed: 1433
Generated Documents: 0

JavaDocs are generated and so license header is optional
Generated files do not required license headers

74 Unknown Licenses

*******************************

Unapproved licenses:

  .git/config
  .git/logs/HEAD
  .git/logs/refs/heads/1.3-linux-maint
  .git/logs/refs/heads/dal
  .git/description
  .git/info/exclude
  .git/packed-refs
  .git/HEAD
  .git/refs/heads/1.3-linux-maint
  .git/refs/heads/dal
  .git/refs/remotes/origin/HEAD
  .git/hooks/pre-rebase.sample
  .git/hooks/post-update.sample
  .git/hooks/post-receive.sample
  .git/hooks/prepare-commit-msg.sample
  .git/hooks/commit-msg.sample
  .git/hooks/pre-commit.sample
  .git/hooks/applypatch-msg.sample
  .git/hooks/update.sample
  .git/hooks/post-commit.sample
  .git/hooks/pre-applypatch.sample
  .gitignore
  HDP-CHANGES.txt
  CHANGES.txt
  conf/regionservers
  conf/log4j.properties
  conf/hadoop-metrics.properties
  .arcconfig
  src/site/resources/css/freebsd_docbook.css
  src/site/resources/images/hbase_logo.svg
  src/site/resources/images/big_h_logo.svg
  src/packages/deb/hbase.control/conffile
  src/test/java/org/apache/hadoop/hbase/client/InstantSchemaChangeTestBase.java
  src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
  src/test/java/org/apache/hadoop/hbase/regionserver/TestHBase7051.java
  src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
  src/main/avro/hbase.avpr
  src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java
  src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java
  src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
  src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDeleteType.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java
  src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTimeRange.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
  src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java

*******************************

Archives:

*****************************************************
  Files with Apache License headers will be marked AL
  Binary files (which do not require AL headers) will be marked B
  Compressed archives will be marked A
  Notices, licenses etc will be marked N
 !????? .git/config
 !????? .git/logs/HEAD
 !????? .git/logs/refs/heads/1.3-linux-maint
 !????? .git/logs/refs/heads/dal
 !????? .git/description
  B     .git/index
 !????? .git/info/exclude
  B     .git/objects/pack/pack-00e1a15ccf425324153e9fb94e6e612de0f65d07.idx
  B     .git/objects/pack/pack-00e1a15ccf425324153e9fb94e6e612de0f65d07.pack
 !????? .git/packed-refs
 !????? .git/HEAD
 !????? .git/refs/heads/1.3-linux-maint
 !????? .git/refs/heads/dal
 !????? .git/refs/remotes/origin/HEAD
 !????? .git/hooks/pre-rebase.sample
 !????? .git/hooks/post-update.sample
 !????? .git/hooks/post-receive.sample
 !????? .git/hooks/prepare-commit-msg.sample
 !????? .git/hooks/commit-msg.sample
 !????? .git/hooks/pre-commit.sample
 !????? .git/hooks/applypatch-msg.sample
 !????? .git/hooks/update.sample
 !????? .git/hooks/post-commit.sample
 !????? .git/hooks/pre-applypatch.sample
  N     NOTICE.txt
  AL    security/src/test/resources/hbase-site.xml
  AL    security/src/test/java/org/apache/hadoop/hbase/security/token/TestZKSecretWatcher.java
  AL    security/src/test/java/org/apache/hadoop/hbase/security/token/TestTokenAuthentication.java
  AL    security/src/test/java/org/apache/hadoop/hbase/security/access/TestZKPermissionsWatcher.java
  AL    security/src/test/java/org/apache/hadoop/hbase/security/access/SecureTestUtil.java
  AL    security/src/test/java/org/apache/hadoop/hbase/security/access/TestTablePermissions.java
  AL    security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java
  AL    security/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessControlFilter.java
  AL    security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java
  AL    security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/HBasePolicyProvider.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcClient.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationKey.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationProtocol.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/AccessDeniedException.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlLists.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/AccessControllerProtocol.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadProtocol.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/access/UserPermission.java
  AL    security/src/main/java/org/apache/hadoop/hbase/security/HBaseMultiRealmUserAuthentication.java
  AL    security/src/main/java/org/apache/hadoop/hbase/ipc/SecureConnectionHeader.java
  AL    security/src/main/java/org/apache/hadoop/hbase/ipc/SecureRpcEngine.java
  AL    security/src/main/java/org/apache/hadoop/hbase/ipc/SecureServer.java
  AL    security/src/main/java/org/apache/hadoop/hbase/ipc/SecureClient.java
 !????? .gitignore
  AL    pom.xml
  AL    dev-support/hbasetests.sh
  AL    dev-support/test-patch.properties
  AL    dev-support/smart-apply-patch.sh
  AL    dev-support/findHangingTest.sh
  AL    dev-support/test-patch.sh
  AL    dev-support/test-util.sh
  N     README.txt
  AL    bin/hbase-jruby
  AL    bin/master-backup.sh
  AL    bin/hbase-daemon.sh
  AL    bin/start-hbase.sh
  AL    bin/local-master-backup.sh
  AL    bin/hirb.rb
  AL    bin/rolling-restart.sh
  AL    bin/regionservers.sh
  AL    bin/local-regionservers.sh
  AL    bin/region_mover.rb
  AL    bin/hbase-daemons.sh
  AL    bin/replication/copy_tables_desc.rb
  AL    bin/hbase-config.sh
  AL    bin/region_status.rb
  AL    bin/hbase
  AL    bin/zookeepers.sh
  AL    bin/stop-hbase.sh
  AL    bin/graceful_stop.sh
  AL    bin/get-active-master.rb
 !????? HDP-CHANGES.txt
 !????? CHANGES.txt
  AL    pom.xml.versionsBackup
  AL    conf/hbase-site.xml
 !????? conf/regionservers
 !????? conf/log4j.properties
  AL    conf/hbase-env.sh
  AL    conf/hbase-policy.xml
 !????? conf/hadoop-metrics.properties
  N     LICENSE.txt
 !????? .arcconfig
  AL    src/site/resources/css/site.css
 !????? src/site/resources/css/freebsd_docbook.css
  AL    src/site/resources/doap_Hbase.rdf
  B     src/site/resources/images/favicon.ico
  B     src/site/resources/images/hadoop-logo.jpg
 !????? src/site/resources/images/hbase_logo.svg
  B     src/site/resources/images/hbase_logo.png
  B     src/site/resources/images/big_h_logo.png
 !????? src/site/resources/images/big_h_logo.svg
  B     src/site/resources/images/replication_overview.png
  B     src/site/resources/images/hfile.png
  B     src/site/resources/images/hfilev2.png
  B     src/site/resources/images/architecture.gif
  AL    src/site/site.xml
  AL    src/site/xdoc/sponsors.xml
  AL    src/site/xdoc/old_news.xml
  AL    src/site/xdoc/metrics.xml
  AL    src/site/xdoc/acid-semantics.xml
  AL    src/site/xdoc/cygwin.xml
  AL    src/site/xdoc/replication.xml
  AL    src/site/xdoc/bulk-loads.xml
  AL    src/site/xdoc/index.xml
  AL    src/site/xdoc/resources.xml
  AL    src/site/xdoc/pseudo-distributed.xml
  AL    src/site/site.vm
  AL    src/docbkx/community.xml
  AL    src/docbkx/customization.xsl
  AL    src/docbkx/external_apis.xml
  AL    src/docbkx/upgrading.xml
  AL    src/docbkx/configuration.xml
  AL    src/docbkx/troubleshooting.xml
  AL    src/docbkx/book.xml
  AL    src/docbkx/performance.xml
  AL    src/docbkx/developer.xml
  AL    src/docbkx/ops_mgt.xml
  AL    src/docbkx/case_studies.xml
  AL    src/docbkx/zookeeper.xml
  AL    src/docbkx/shell.xml
  AL    src/docbkx/security.xml
  AL    src/docbkx/getting_started.xml
  AL    src/docbkx/preface.xml
  AL    src/examples/thrift/Makefile
  N     src/examples/thrift/README.txt
  AL    src/examples/thrift/DemoClient.py
  AL    src/examples/thrift/DemoClient.rb
  AL    src/examples/thrift/DemoClient.php
  AL    src/examples/thrift/DemoClient.cpp
  AL    src/examples/thrift/DemoClient.pl
  AL    src/examples/thrift/DemoClient.java
  AL    src/examples/thrift2/DemoClient.py
  AL    src/examples/thrift2/DemoClient.java
  N     src/examples/README.txt
  AL    src/examples/mapreduce/index-builder-setup.rb
  AL    src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/SampleUploader.java
  AL    src/examples/mapreduce/org/apache/hadoop/hbase/mapreduce/IndexBuilder.java
  AL    src/examples/healthcheck/healthcheck.sh
  AL    src/packages/build.xml
  AL    src/packages/conf-pseudo/hbase-site.xml
  AL    src/packages/deb/conf-pseudo.control/conffile
  AL    src/packages/deb/conf-pseudo.control/prerm
  AL    src/packages/deb/conf-pseudo.control/control
  AL    src/packages/deb/conf-pseudo.control/postinst
  AL    src/packages/deb/hbase.control/preinst
 !????? src/packages/deb/hbase.control/conffile
  AL    src/packages/deb/hbase.control/prerm
  AL    src/packages/deb/hbase.control/control
  AL    src/packages/deb/hbase.control/postinst
  AL    src/packages/deb/hbase.control/postrm
  AL    src/packages/deb/init.d/hbase-regionserver
  AL    src/packages/deb/init.d/hbase-master
  AL    src/packages/update-hbase-env.sh
  AL    src/packages/rpm/init.d/hbase-regionserver
  AL    src/packages/rpm/init.d/hbase-master
  AL    src/packages/rpm/spec/hbase.spec
  AL    src/packages/rpm/spec/conf-pseudo.spec
  AL    src/test/resources/hbase-site.xml
  AL    src/test/resources/log4j.properties
  B     src/test/resources/org/apache/hadoop/hbase/io/hfile/8e8ab58dcf39412da19833fcd8f687ac
  AL    src/test/resources/org/apache/hadoop/hbase/PerformanceEvaluation_Counter.properties
  AL    src/test/resources/mapred-queues.xml
  B     src/test/data/hbase-4388-root.dir.tgz
  AL    src/test/java/org/apache/hadoop/hbase/TestHRegionLocation.java
  AL    src/test/java/org/apache/hadoop/hbase/MiniHBaseCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/TestLocalHBaseCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/TestHDFSBlocksDistribution.java
  AL    src/test/java/org/apache/hadoop/hbase/constraint/CheckConfigurationConstraint.java
  AL    src/test/java/org/apache/hadoop/hbase/constraint/RuntimeFailConstraint.java
  AL    src/test/java/org/apache/hadoop/hbase/constraint/TestConstraints.java
  AL    src/test/java/org/apache/hadoop/hbase/constraint/AllFailConstraint.java
  AL    src/test/java/org/apache/hadoop/hbase/constraint/WorksConstraint.java
  AL    src/test/java/org/apache/hadoop/hbase/constraint/AllPassConstraint.java
  AL    src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
  AL    src/test/java/org/apache/hadoop/hbase/TimestampTestBase.java
  AL    src/test/java/org/apache/hadoop/hbase/IntegrationTests.java
  AL    src/test/java/org/apache/hadoop/hbase/migration/TestMigrationFrom090To092.java
  AL    src/test/java/org/apache/hadoop/hbase/ClusterManager.java
  AL    src/test/java/org/apache/hadoop/hbase/security/TestUser.java
  AL    src/test/java/org/apache/hadoop/hbase/executor/TestExecutorService.java
  AL    src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
  AL    src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
  AL    src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
  AL    src/test/java/org/apache/hadoop/hbase/TestClassFinder.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotLogSplitter.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreFlushSnapshotFromClient.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestWALReferenceTask.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestFlushSnapshotFromClient.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotTask.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestReferenceRegionHFilesTask.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestCopyRecoveredEditsTask.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestExportSnapshot.java
  AL    src/test/java/org/apache/hadoop/hbase/snapshot/TestSnapshotDescriptionUtils.java
  AL    src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java
  AL    src/test/java/org/apache/hadoop/hbase/IntegrationTestingUtility.java
  AL    src/test/java/org/apache/hadoop/hbase/thrift2/TestThriftHBaseServiceHandler.java
  AL    src/test/java/org/apache/hadoop/hbase/io/encoding/TestDataBlockEncoders.java
  AL    src/test/java/org/apache/hadoop/hbase/io/encoding/TestLoadAndSwitchEncodeOnDisk.java
  AL    src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
  AL    src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
  AL    src/test/java/org/apache/hadoop/hbase/io/encoding/RedundantKVGenerator.java
  AL    src/test/java/org/apache/hadoop/hbase/io/encoding/TestBufferedDataBlockEncoder.java
  AL    src/test/java/org/apache/hadoop/hbase/io/encoding/TestUpgradeFromHFileV1ToEncoding.java
  AL    src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/RandomSeek.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheColumnFamilySummary.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/RandomDistribution.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileInlineToRootChunkConversion.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/CacheTestUtils.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/NanoTimer.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileReaderV1.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFilePerformance.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestFixedFileTrailer.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/slab/TestSlabCache.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/slab/TestSlab.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/slab/TestSingleSizeCache.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestSeekTo.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestChecksum.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestLruBlockCache.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlock.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestReseekTo.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFile.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileSeek.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileWriterV2.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockCompatibility.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java
  AL    src/test/java/org/apache/hadoop/hbase/io/hfile/KeySampler.java
  AL    src/test/java/org/apache/hadoop/hbase/io/TestImmutableBytesWritable.java
  AL    src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java
  AL    src/test/java/org/apache/hadoop/hbase/io/TestHbaseObjectWritable.java
  AL    src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
  AL    src/test/java/org/apache/hadoop/hbase/avro/TestAvroServer.java
  AL    src/test/java/org/apache/hadoop/hbase/avro/TestAvroUtil.java
  AL    src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
  AL    src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java
  AL    src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
  AL    src/test/java/org/apache/hadoop/hbase/ResourceCheckerJUnitRule.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMasterStatusServlet.java
  AL    src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDescriptorModification.java
  AL    src/test/java/org/apache/hadoop/hbase/master/handler/TestTableDeleteFamilyHandler.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
  AL    src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotFileCache.java
  AL    src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotLogCleaner.java
  AL    src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotManager.java
  AL    src/test/java/org/apache/hadoop/hbase/master/snapshot/TestSnapshotHFileCleaner.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestActiveMasterManager.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestHMasterRPCException.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMasterRestartAfterDisablingTable.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMXBean.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMaster.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestOpenedRegionHandler.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestRestartCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/master/Mocking.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestAssignmentManager.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMasterZKSessionRecovery.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMasterShutdown.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestRollingRestart.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMasterFailover.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestMasterFileSystem.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestDeadServer.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestClockSkewDetection.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
  AL    src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java
  AL    src/test/java/org/apache/hadoop/hbase/master/cleaner/TestLogsCleaner.java
  AL    src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileCleaner.java
  AL    src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
  AL    src/test/java/org/apache/hadoop/hbase/master/cleaner/TestCleanerChore.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestDistributedLogSplitting.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestDefaultLoadBalancer.java
  AL    src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java
  AL    src/test/java/org/apache/hadoop/hbase/TestHTableDescriptor.java
  AL    src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestWithChaosMonkey.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestMultiParallel.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestGet.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestAttributes.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestHConnection.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestShell.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestCloneSnapshotFromClient.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestMetaMigrationRemovingHTD.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestMetaScanner.java
 !????? src/test/java/org/apache/hadoop/hbase/client/InstantSchemaChangeTestBase.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestOperation.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestScan.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestResult.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestHTableUtil.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestTimestampsFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestConnectionUtils.java
  AL    src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdmin.java
  AL    src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestHTablePool.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestHCM.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestSnapshotsFromAdmin.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestScannerTimeout.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestPutDotHas.java
  AL    src/test/java/org/apache/hadoop/hbase/client/TestAdmin.java
  AL    src/test/java/org/apache/hadoop/hbase/TestNodeHealthCheckChore.java
  AL    src/test/java/org/apache/hadoop/hbase/TestMultiVersions.java
  AL    src/test/java/org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
  AL    src/test/java/org/apache/hadoop/hbase/MapFilePerformanceEvaluation.java
  AL    src/test/java/org/apache/hadoop/hbase/metrics/TestExponentiallyDecayingSample.java
  AL    src/test/java/org/apache/hadoop/hbase/metrics/TestMetricsMBeanBase.java
  AL    src/test/java/org/apache/hadoop/hbase/metrics/TestExactCounterMetric.java
  AL    src/test/java/org/apache/hadoop/hbase/metrics/TestMetricsHistogram.java
  AL    src/test/java/org/apache/hadoop/hbase/LargeTests.java
  AL    src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java
  AL    src/test/java/org/apache/hadoop/hbase/IngestIntegrationTestBase.java
  AL    src/test/java/org/apache/hadoop/hbase/TestInfoServers.java
  AL    src/test/java/org/apache/hadoop/hbase/TestHBaseTestingUtility.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/CreateRandomStoreFile.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestCloseRegionHandler.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/handler/TestOpenRegionHandler.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionWithCoprocessor.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestGetClosestAtOrBefore.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSelection.java
 !????? src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueSkipListSet.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMXBean.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestScanner.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/CheckedArchivingHFileCleaner.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestRpcMetrics.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStore.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestParallelPut.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueScanFixture.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaConfigured.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/metrics/TestSchemaMetrics.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMemStoreLAB.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestColumnSeeking.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreScanner.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplitCompressed.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedSequenceFileLogWriter.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLRUDictionary.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestKeyValueCompression.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogFiltering.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollingNoCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplayCompressed.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCompressor.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtilsForTests.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/HLogPerformanceEvaluation.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRollAbort.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogBench.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogMethods.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestQueryMatcher.java
 !????? src/test/java/org/apache/hadoop/hbase/regionserver/TestHBase7051.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/KeyValueScanFixture.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMiniBatchOperationInProgress.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestWideScanner.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestScanDeleteTracker.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestKeyValueHeap.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestRSStatusServlet.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestRSKilledWhenMasterInitializing.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiColumnScanner.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/EncodedSeekPerformanceTest.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWithBloomError.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/OOMERegionServer.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionSplitPolicy.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMultiVersionConsistencyControl.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransaction.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestFSErrorsExposed.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestScanWildcardColumnTracker.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMasterAddressManager.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionBusyWait.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoad.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestKeepDeletes.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestAtomicOperation.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileBlockCacheSummary.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionOnCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestExplicitColumnTracker.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestMinVersions.java
  AL    src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitLogWorker.java
  AL    src/test/java/org/apache/hadoop/hbase/IntegrationTestsDriver.java
  AL    src/test/java/org/apache/hadoop/hbase/SmallTests.java
  AL    src/test/java/org/apache/hadoop/hbase/util/ProcessBasedLocalHBaseCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestByteBloomFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestDefaultEnvironmentEdge.java
  AL    src/test/java/org/apache/hadoop/hbase/util/RestartMetaTest.java
  AL    src/test/java/org/apache/hadoop/hbase/util/LoadTestKVGenerator.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestRootPath.java
  AL    src/test/java/org/apache/hadoop/hbase/util/MultiThreadedReader.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
  AL    src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
  AL    src/test/java/org/apache/hadoop/hbase/util/hbck/HbckTestingUtil.java
  AL    src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildOverlap.java
  AL    src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildHole.java
  AL    src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
  AL    src/test/java/org/apache/hadoop/hbase/util/hbck/TestOfflineMetaRebuildBase.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestThreads.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestIncrementingEnvironmentEdge.java
  AL    src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestPoolMap.java
  AL    src/test/java/org/apache/hadoop/hbase/util/MockServer.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
  AL    src/test/java/org/apache/hadoop/hbase/util/EnvironmentEdgeManagerTestHelper.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckComparator.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadParallel.java
  AL    src/test/java/org/apache/hadoop/hbase/util/StoppableImplementation.java
  AL    src/test/java/org/apache/hadoop/hbase/util/MockRegionServerServices.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestFSTableDescriptors.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
  AL    src/test/java/org/apache/hadoop/hbase/util/ChaosMonkey.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestLoadTestKVGenerator.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsck.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestKeying.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestSizeBasedThrottler.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestBase64.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestFSVisitor.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestCompressionTest.java
  AL    src/test/java/org/apache/hadoop/hbase/util/TestEnvironmentEdgeManager.java
  AL    src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
  AL    src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
  AL    src/test/java/org/apache/hadoop/hbase/ClassTestFinder.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/MapreduceTestingShim.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableSplit.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestSimpleTotalOrderPartitioner.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TestJarFinder.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TsvImporterCustomTestMapper.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/NMapInputFormat.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
  AL    src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportTsv.java
  AL    src/test/java/org/apache/hadoop/hbase/TestSerialization.java
  AL    src/test/java/org/apache/hadoop/hbase/TestFullLogReconstruction.java
  AL    src/test/java/org/apache/hadoop/hbase/TestCompare.java
  AL    src/test/java/org/apache/hadoop/hbase/KeyValueTestUtil.java
  AL    src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
  AL    src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
  AL    src/test/java/org/apache/hadoop/hbase/errorhandling/TestTimeoutExceptionInjector.java
  AL    src/test/java/org/apache/hadoop/hbase/IntegrationTestDataIngestSlowDeterministic.java
  AL    src/test/java/org/apache/hadoop/hbase/ipc/TestDelayedRpc.java
  AL    src/test/java/org/apache/hadoop/hbase/ipc/TestPBOnWritableRpc.java
  AL    src/test/java/org/apache/hadoop/hbase/ipc/TestProtocolExtension.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestTableResource.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestStatusResource.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteAdmin.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestRowResource.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestTableInfoModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterStatusModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestRowModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestStorageClusterVersionModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestCellSetModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestCellModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestScannerModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestTableListModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/model/TestColumnSchemaModel.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/PerformanceEvaluation.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithFilters.java
  AL    src/test/java/org/apache/hadoop/hbase/rest/HBaseRESTTestingUtility.java
  AL    src/test/java/org/apache/hadoop/hbase/TestCheckTestClasses.java
  AL    src/test/java/org/apache/hadoop/hbase/PerformanceEvaluationCommons.java
  AL    src/test/java/org/apache/hadoop/hbase/MediumTests.java
  AL    src/test/java/org/apache/hadoop/hbase/TestKeyValue.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationProtocol.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithRemove.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterCoprocessorExceptionWithAbort.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestZooKeeperScanPolicyObserver.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestBulkDeleteProtocol.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/ColumnAggregationEndpoint.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorInterface.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestBigDecimalColumnInterpreter.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/GenericEndpoint.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestMasterObserver.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithRemove.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/GenericProtocol.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/SampleRegionWALObserver.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
  AL    src/test/java/org/apache/hadoop/hbase/coprocessor/SimpleRegionObserver.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestColumnCountGetFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java
  AL    src/test/java/org/apache/hadoop/hbase/TestClusterBootOrder.java
 !????? src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestReplicationBase.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/ReplicationSourceDummy.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestReplicationQueueFailover.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSink.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestReplicationQueueFailoverCompressed.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSource.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestMasterReplication.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestMultiSlaveReplication.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestReplicationDisableInactivePeer.java
  AL    src/test/java/org/apache/hadoop/hbase/replication/TestReplicationZookeeper.java
  AL    src/test/java/org/apache/hadoop/hbase/TestGlobalMemStoreSize.java
  AL    src/test/java/org/apache/hadoop/hbase/TestDrainingServer.java
  AL    src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
  AL    src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
  AL    src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
  AL    src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
  AL    src/test/java/org/apache/hadoop/hbase/ClassFinder.java
  AL    src/test/java/org/apache/hadoop/hbase/HBaseCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java
  AL    src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java
  AL    src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java
  AL    src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java
  AL    src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java
  AL    src/test/java/org/apache/hadoop/hbase/TestHServerAddress.java
  AL    src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditorNoCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/catalog/TestCatalogTracker.java
  AL    src/test/java/org/apache/hadoop/hbase/catalog/TestMetaReaderEditor.java
  AL    src/test/java/org/apache/hadoop/hbase/ResourceChecker.java
  AL    src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java
  AL    src/test/java/org/apache/hadoop/hbase/TestRegionRebalancing.java
  AL    src/test/java/org/apache/hadoop/hbase/TestServerName.java
  AL    src/test/java/org/apache/hadoop/hbase/monitoring/TestTaskMonitor.java
  AL    src/test/java/org/apache/hadoop/hbase/monitoring/TestMemoryBoundedLogMessageBuffer.java
  AL    src/test/java/org/apache/hadoop/hbase/HServerLoad092.java
  AL    src/test/java/org/apache/hadoop/hbase/TestHServerInfo.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestHQuorumPeer.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTableReadOnly.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestRecoverableZooKeeper.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperMainServerArg.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKTable.java
  AL    src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperNodeTracker.java
  AL    src/test/java/org/apache/hadoop/hbase/DistributedHBaseCluster.java
  AL    src/test/java/org/apache/hadoop/hbase/TestAcidGuarantees.java
  AL    src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
  AL    src/test/ruby/shell/shell_test.rb
  AL    src/test/ruby/shell/formatter_test.rb
  AL    src/test/ruby/shell/commands_test.rb
  AL    src/test/ruby/tests_runner.rb
  AL    src/test/ruby/test_helper.rb
  AL    src/test/ruby/hbase/table_test.rb
  AL    src/test/ruby/hbase/hbase_test.rb
  AL    src/test/ruby/hbase/admin_test.rb
  AL    src/main/resources/hbase-webapps/thrift/thrift.jsp
  AL    src/main/resources/hbase-webapps/thrift/index.html
  AL    src/main/resources/hbase-webapps/master/master.jsp
  AL    src/main/resources/hbase-webapps/master/table.jsp
  AL    src/main/resources/hbase-webapps/master/tablesDetailed.jsp
  AL    src/main/resources/hbase-webapps/master/zk.jsp
  AL    src/main/resources/hbase-webapps/master/index.html
  B     src/main/resources/hbase-webapps/static/favicon.ico
  AL    src/main/resources/hbase-webapps/static/hbase.css
  B     src/main/resources/hbase-webapps/static/hbase_logo.png
  B     src/main/resources/hbase-webapps/static/hbase_logo_med.gif
  AL    src/main/resources/hbase-webapps/regionserver/regionserver.jsp
  AL    src/main/resources/hbase-webapps/regionserver/index.html
  AL    src/main/resources/hbase-webapps/rest/rest.jsp
  AL    src/main/resources/hbase-webapps/rest/index.html
  AL    src/main/resources/hbase-default.xml
  AL    src/main/resources/org/apache/hadoop/hbase/thrift/Hbase.thrift
  AL    src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift
  AL    src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
  AL    src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/CellSetMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/CellMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/StorageClusterStatusMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ColumnSchemaMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/TableSchemaMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/VersionMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/ScannerMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/TableInfoMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/protobuf/TableListMessage.proto
  AL    src/main/resources/org/apache/hadoop/hbase/rest/XMLSchema.xsd
  AL    src/main/protobuf/hbase.proto
  AL    src/main/protobuf/ErrorHandling.proto
 !????? src/main/avro/hbase.avpr
  AL    src/main/python/hbase/merge_conf.py
  AL    src/main/jamon/org/apache/hadoop/hbase/tmpl/master/AssignmentManagerStatusTmpl.jamon
  AL    src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
  AL    src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
  AL    src/main/jamon/org/apache/hadoop/hbase/tmpl/common/TaskMonitorTmpl.jamon
  AL    src/main/javadoc/overview.html
  AL    src/main/javadoc/org/apache/hadoop/hbase/thrift/doc-files/Hbase.html
  AL    src/main/javadoc/org/apache/hadoop/hbase/thrift/doc-files/style.css
  AL    src/main/javadoc/org/apache/hadoop/hbase/thrift/doc-files/index.html
  AL    src/main/javadoc/org/apache/hadoop/hbase/thrift/package.html
  AL    src/main/javadoc/org/apache/hadoop/hbase/io/hfile/package.html
  AL    src/main/javadoc/org/apache/hadoop/hbase/ipc/package.html
  AL    src/main/javadoc/org/apache/hadoop/hbase/replication/package.html
  AL    src/main/java/org/apache/hadoop/hbase/HealthReport.java
  AL    src/main/java/org/apache/hadoop/hbase/HServerInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
  AL    src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java
  AL    src/main/java/org/apache/hadoop/hbase/constraint/BaseConstraint.java
  AL    src/main/java/org/apache/hadoop/hbase/constraint/Constraints.java
  AL    src/main/java/org/apache/hadoop/hbase/constraint/package-info.java
  AL    src/main/java/org/apache/hadoop/hbase/constraint/ConstraintException.java
  AL    src/main/java/org/apache/hadoop/hbase/migration/HRegionInfo090x.java
  AL    src/main/java/org/apache/hadoop/hbase/HealthChecker.java
  AL    src/main/java/org/apache/hadoop/hbase/security/TokenInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/security/KerberosInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/security/User.java
  AL    src/main/java/org/apache/hadoop/hbase/executor/ExecutorService.java
  AL    src/main/java/org/apache/hadoop/hbase/executor/RegionTransitionData.java
  AL    src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/HBaseIOException.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/ThriftServer.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/ThriftMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift/HbaseHandlerMetricsProxy.java
  AL    src/main/java/org/apache/hadoop/hbase/TableInfoMissingException.java
  AL    src/main/java/org/apache/hadoop/hbase/MasterNotRunningException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/UnknownSnapshotException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshotException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotReferenceUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/TablePartiallyOpenException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/HSnapshotDescription.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/ReferenceRegionHFilesTask.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotTask.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/TableInfoCopyTask.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/ReferenceServerWALsTask.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/TakeSnapshotUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotLogSplitter.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
  AL    src/main/java/org/apache/hadoop/hbase/snapshot/CopyRecoveredEditsTask.java
 !????? src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
 !????? src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java
  AL    src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/Server.java
  AL    src/main/java/org/apache/hadoop/hbase/NotServingRegionException.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
  AL    src/main/java/org/apache/hadoop/hbase/thrift2/package.html
  AL    src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDeleteType.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java
 !????? src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTimeRange.java
  AL    src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
  AL    src/main/java/org/apache/hadoop/hbase/io/DoubleOutputStream.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/CopyKeyDataBlockEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/FastDiffDeltaEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/EncoderBufferTooSmallException.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/PrefixKeyDeltaEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/CompressionState.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/encoding/DataBlockEncoding.java
  AL    src/main/java/org/apache/hadoop/hbase/io/HeapSize.java
  AL    src/main/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java
  AL    src/main/java/org/apache/hadoop/hbase/io/ImmutableBytesWritable.java
  AL    src/main/java/org/apache/hadoop/hbase/io/TimeRange.java
  AL    src/main/java/org/apache/hadoop/hbase/io/Reference.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/DoubleBlockCache.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/InlineBlockWriter.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV2.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/ReusableStreamGzipCodec.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/SimpleBlockCache.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/BlockWithScanInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileWriter.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabItemActionWatcher.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SingleSizeCache.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/slab/Slab.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/slab/SlabCache.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlockQueue.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoderImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/FixedFileTrailer.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/Compression.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV2.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/CachedBlock.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/CorruptHFileException.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/NoOpDataBlockEncoder.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/InvalidHFileException.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/AbstractHFileReader.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterV1.java
  AL    src/main/java/org/apache/hadoop/hbase/io/hfile/BoundedRangeFileInputStream.java
  AL    src/main/java/org/apache/hadoop/hbase/io/DataOutputOutputStream.java
  AL    src/main/java/org/apache/hadoop/hbase/io/FileLink.java
  AL    src/main/java/org/apache/hadoop/hbase/io/HFileLink.java
  AL    src/main/java/org/apache/hadoop/hbase/io/CodeToClassAndBack.java
  AL    src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java
  AL    src/main/java/org/apache/hadoop/hbase/io/HLogLink.java
  AL    src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java
  AL    src/main/java/org/apache/hadoop/hbase/InvalidFamilyOperationException.java
  AL    src/main/java/org/apache/hadoop/hbase/avro/package.html
  AL    src/main/java/org/apache/hadoop/hbase/avro/AvroUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/avro/AvroServer.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableMap.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/HRegionPartitioner.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableInputFormatBase.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableReduce.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/RowCounter.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReader.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/IdentityTableReduce.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/Driver.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/package-info.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableMapReduceUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/mapred/TableSplit.java
  AL    src/main/java/org/apache/hadoop/hbase/master/LoadBalancerFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/TableAddFamilyHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/TableModifyFamilyHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/TotesHRegionInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/OpenedRegionHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/EnableTableHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/SplitRegionHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/ClosedRegionHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/CreateTableHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/ModifyTableHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/DisableTableHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/MetaServerShutdownHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/DeleteTableHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/handler/ServerShutdownHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/HMasterCommandLine.java
  AL    src/main/java/org/apache/hadoop/hbase/master/BulkAssigner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/CloneSnapshotHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/EnabledTableSnapshotHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/DisabledTableSnapshotHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotLogCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/TakeSnapshotHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/MasterSnapshotVerifier.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotFileCache.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/RestoreSnapshotHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/MXBean.java
  AL    src/main/java/org/apache/hadoop/hbase/master/HMaster.java
  AL    src/main/java/org/apache/hadoop/hbase/master/RegionPlan.java
  AL    src/main/java/org/apache/hadoop/hbase/master/metrics/MasterMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/master/metrics/MasterStatistics.java
  AL    src/main/java/org/apache/hadoop/hbase/master/MXBeanImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
  AL    src/main/java/org/apache/hadoop/hbase/master/UnAssignCallable.java
  AL    src/main/java/org/apache/hadoop/hbase/master/ActiveMasterManager.java
  AL    src/main/java/org/apache/hadoop/hbase/master/MasterFileSystem.java
  AL    src/main/java/org/apache/hadoop/hbase/master/MasterStatusServlet.java
  AL    src/main/java/org/apache/hadoop/hbase/master/ServerManager.java
  AL    src/main/java/org/apache/hadoop/hbase/master/ServerAndLoad.java
  AL    src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
  AL    src/main/java/org/apache/hadoop/hbase/master/BulkReOpen.java
  AL    src/main/java/org/apache/hadoop/hbase/master/DefaultLoadBalancer.java
  AL    src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
  AL    src/main/java/org/apache/hadoop/hbase/master/AssignCallable.java
  AL    src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/LogCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileLinkCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseLogCleanerDelegate.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/HFileCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveLogCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/BaseHFileCleanerDelegate.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/TimeToLiveHFileCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/master/cleaner/FileCleanerDelegate.java
  AL    src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
  AL    src/main/java/org/apache/hadoop/hbase/master/DeadServer.java
  AL    src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
  AL    src/main/java/org/apache/hadoop/hbase/master/MasterDumpServlet.java
  AL    src/main/java/org/apache/hadoop/hbase/master/CatalogJanitor.java
  AL    src/main/java/org/apache/hadoop/hbase/client/MultiPutResponse.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Get.java
  AL    src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HTableUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Operation.java
  AL    src/main/java/org/apache/hadoop/hbase/client/NoServerForRegionException.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Attributes.java
  AL    src/main/java/org/apache/hadoop/hbase/client/RegionOfflineException.java
  AL    src/main/java/org/apache/hadoop/hbase/client/OperationWithAttributes.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Append.java
  AL    src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
  AL    src/main/java/org/apache/hadoop/hbase/client/RowLock.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HConnection.java
  AL    src/main/java/org/apache/hadoop/hbase/client/metrics/ScanMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java
  AL    src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHRegionInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Increment.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Result.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
  AL    src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
  AL    src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedException.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Row.java
  AL    src/main/java/org/apache/hadoop/hbase/client/ResultScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/client/MultiPut.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HTableInterfaceFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HTableFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
  AL    src/main/java/org/apache/hadoop/hbase/client/MetaScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java
  AL    src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java
  AL    src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
  AL    src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
  AL    src/main/java/org/apache/hadoop/hbase/client/coprocessor/package-info.java
  AL    src/main/java/org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
  AL    src/main/java/org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Action.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Put.java
  AL    src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
  AL    src/main/java/org/apache/hadoop/hbase/client/ClientScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/client/MultiAction.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Scan.java
  AL    src/main/java/org/apache/hadoop/hbase/client/ScannerCallable.java
  AL    src/main/java/org/apache/hadoop/hbase/client/ScannerTimeoutException.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HTable.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Mutation.java
  AL    src/main/java/org/apache/hadoop/hbase/client/package-info.java
  AL    src/main/java/org/apache/hadoop/hbase/client/Delete.java
  AL    src/main/java/org/apache/hadoop/hbase/client/UnmodifyableHColumnDescriptor.java
  AL    src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
  AL    src/main/java/org/apache/hadoop/hbase/client/AbstractClientScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
  AL    src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
  AL    src/main/java/org/apache/hadoop/hbase/HConstants.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/MetricsRate.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/histogram/MetricsHistogram.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/PersistentMetricsTimeVaryingRate.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/MetricsMBeanBase.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/HBaseInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/file/TimeStampingFileContext.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/ExactCounterMetric.java
  AL    src/main/java/org/apache/hadoop/hbase/metrics/MetricsString.java
  AL    src/main/java/org/apache/hadoop/hbase/HRegionLocation.java
  AL    src/main/java/org/apache/hadoop/hbase/VersionAnnotation.java
  AL    src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/HServerAddress.java
  AL    src/main/java/org/apache/hadoop/hbase/TableNotFoundException.java
  AL    src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MetaLogRoller.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ColumnTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRegionHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenMetaHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseRootHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/handler/CloseMetaHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRootHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/handler/OpenRegionHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionAlreadyInTransitionException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RSDumpServlet.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/RegionServerSnapshotManager.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/snapshot/FlushSnapshotSubprocedure.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MXBean.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/LruHashMap.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/InternalScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MultiVersionConsistencyControl.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/SplitRequest.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/NonLazyKeyValueScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionOpeningState.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/FlushRequester.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ScanType.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ScanQueryMatcher.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionMetricsStorage.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaConfigured.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/SchemaMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerDynamicStatistics.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/OperationMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/metrics/RegionServerStatistics.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/SplitTransaction.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/DebugPrint.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ExplicitColumnTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/LeaseException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MXBeanImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ColumnCount.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/OperationStatus.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/Leases.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerRunningException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/LastSequenceId.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/FailedLogCloseException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/CompressionContext.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/OrphanHLogAfterSplitException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogPrettyPrinter.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCoprocessorHost.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/LRUDictionary.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/Dictionary.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALActionsListener.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/DeleteTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerServices.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/KeyPrefixRegionSplitPolicy.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSinkService.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/WrongRegionException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/NoSuchColumnFamilyException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionSplitPolicy.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ScanWildcardColumnTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/LogRoller.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/Compactor.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/LeaseListener.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/Store.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/DelimitedKeyPrefixRegionSplitPolicy.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/InternalScan.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RSStatusServlet.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/GetClosestRowBeforeTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/IncreasingToUpperBoundRegionSplitPolicy.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/HRegionThriftServer.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerStoppedException.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/CompactionRequestor.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ScanDeleteTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlusher.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/CompactionTool.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerAccounting.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/MiniBatchOperationInProgress.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationSourceService.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueSkipListSet.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ReplicationService.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionRequest.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactSelection.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/compactions/CompactionProgress.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/ChangedReadersObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/regionserver/OnlineRegions.java
  AL    src/main/java/org/apache/hadoop/hbase/YouAreDeadException.java
  AL    src/main/java/org/apache/hadoop/hbase/DroppedSnapshotException.java
  AL    src/main/java/org/apache/hadoop/hbase/MasterAddressTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/HBaseConfiguration.java
  AL    src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
  AL    src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java
  AL    src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ByteBloomFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Addressing.java
  AL    src/main/java/org/apache/hadoop/hbase/util/MD5Hash.java
  AL    src/main/java/org/apache/hadoop/hbase/util/RegionSplitCalculator.java
  AL    src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/util/AbstractHBaseTool.java
  AL    src/main/java/org/apache/hadoop/hbase/util/HBaseConfTool.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Methods.java
  AL    src/main/java/org/apache/hadoop/hbase/util/HasThread.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Threads.java
  AL    src/main/java/org/apache/hadoop/hbase/util/FSHDFSUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/util/InfoServer.java
  AL    src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterWriter.java
  AL    src/main/java/org/apache/hadoop/hbase/util/HashedBytes.java
  AL    src/main/java/org/apache/hadoop/hbase/util/MurmurHash.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Merge.java
  AL    src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/util/JenkinsHash.java
  AL    src/main/java/org/apache/hadoop/hbase/util/FileSystemVersionException.java
  AL    src/main/java/org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ChecksumFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRepair.java
  AL    src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/util/hbck/HFileCorruptionChecker.java
  AL    src/main/java/org/apache/hadoop/hbase/util/hbck/TableIntegrityErrorHandlerImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/util/IdLock.java
  AL    src/main/java/org/apache/hadoop/hbase/util/BloomFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/util/JVMClusterUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/util/RetryCounterFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/util/EnvironmentEdgeManager.java
  AL    src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
  AL    src/main/java/org/apache/hadoop/hbase/util/MetaUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/util/EnvironmentEdge.java
  AL    src/main/java/org/apache/hadoop/hbase/util/FSVisitor.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Keying.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Bytes.java
  AL    src/main/java/org/apache/hadoop/hbase/util/VersionInfo.java
  AL    src/main/java/org/apache/hadoop/hbase/util/KeyRange.java
  AL    src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ClassSize.java
  AL    src/main/java/org/apache/hadoop/hbase/util/HMerge.java
  AL    src/main/java/org/apache/hadoop/hbase/util/CancelableProgressable.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Base64.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Writables.java
  AL    src/main/java/org/apache/hadoop/hbase/util/PairOfSameType.java
  AL    src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/util/DefaultEnvironmentEdge.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ByteBufferOutputStream.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ByteBufferUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Hash.java
  AL    src/main/java/org/apache/hadoop/hbase/util/BloomFilterWriter.java
  AL    src/main/java/org/apache/hadoop/hbase/util/CollectionBackedScanner.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Classes.java
  AL    src/main/java/org/apache/hadoop/hbase/util/SizeBasedThrottler.java
  AL    src/main/java/org/apache/hadoop/hbase/util/PoolMap.java
  AL    src/main/java/org/apache/hadoop/hbase/util/HBaseFsckRepair.java
  AL    src/main/java/org/apache/hadoop/hbase/util/RetryCounter.java
  AL    src/main/java/org/apache/hadoop/hbase/util/JvmVersion.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Sleeper.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ManualEnvironmentEdge.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ChecksumType.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Objects.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Pair.java
  AL    src/main/java/org/apache/hadoop/hbase/util/SoftValueSortedMap.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ModifyRegionUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
  AL    src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
  AL    src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
  AL    src/main/java/org/apache/hadoop/hbase/util/GetJavaProperty.java
  AL    src/main/java/org/apache/hadoop/hbase/util/Strings.java
  AL    src/main/java/org/apache/hadoop/hbase/util/SortedCopyOnWriteSet.java
  AL    src/main/java/org/apache/hadoop/hbase/util/BloomFilterBase.java
  AL    src/main/java/org/apache/hadoop/hbase/util/ShutdownHookManager.java
  AL    src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
  AL    src/main/java/org/apache/hadoop/hbase/PleaseHoldException.java
  AL    src/main/java/org/apache/hadoop/hbase/ServerName.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/KeyValueSortReducer.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/SimpleTotalOrderPartitioner.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableMapper.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/ImportTsv.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/Export.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/HLogInputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapper.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputCommitter.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableOutputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableReducer.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/JarFinder.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/TotalOrderPartitioner.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/hadoopbackport/InputSampler.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/GroupingTableMapper.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReaderImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TsvImporterMapper.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/CopyTable.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/Driver.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/package-info.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/IdentityTableReducer.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/WALPlayer.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableMapReduceUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/TableSplit.java
  AL    src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java
  AL    src/main/java/org/apache/hadoop/hbase/tool/Canary.java
  AL    src/main/java/org/apache/hadoop/hbase/UnknownRegionException.java
  AL    src/main/java/org/apache/hadoop/hbase/LocalHBaseCluster.java
  AL    src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutException.java
  AL    src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignException.java
  AL    src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
  AL    src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionListener.java
  AL    src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionSnare.java
  AL    src/main/java/org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
  AL    src/main/java/org/apache/hadoop/hbase/TableExistsException.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/RpcCallContext.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/Invocation.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/VersionedProtocol.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCStatistics.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/CallerDisconnectedException.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/Status.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/ServerNotRunningYetException.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HRegionInterface.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/ProtocolSignature.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/ResponseFlag.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPC.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/ConnectionHeader.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/WritableRpcEngine.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HMasterRegionInterface.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/MasterExecRPCInvoker.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/RequestContext.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/HMasterInterface.java
  AL    src/main/java/org/apache/hadoop/hbase/ipc/RpcEngine.java
  AL    src/main/java/org/apache/hadoop/hbase/DaemonThreadFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/SchemaResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/ExistsResource.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
 !????? src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/client/Cluster.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/client/RemoteAdmin.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/StorageClusterStatusResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/metrics/RESTStatistics.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/metrics/RESTMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/ColumnSchemaModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/TableInfoModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/TableRegionModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/CellSetModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/VersionModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/TableListModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterStatusModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/TableModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/TableSchemaModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/CellModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/model/RowModel.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/ProtobufMessageHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/Main.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/package.html
  AL    src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/TableResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/provider/producer/PlainTextMessageBodyProducer.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/provider/producer/ProtobufMessageBodyProducer.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/provider/JAXBContextResolver.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/provider/consumer/ProtobufMessageBodyConsumer.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestStream.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPRequestWrapper.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/RESTServlet.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/StorageClusterVersionResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/Constants.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/VersionResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
  AL    src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java
  AL    src/main/java/org/apache/hadoop/hbase/BaseConfigurable.java
  AL    src/main/java/org/apache/hadoop/hbase/RegionException.java
  AL    src/main/java/org/apache/hadoop/hbase/TableNotDisabledException.java
  AL    src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
  AL    src/main/java/org/apache/hadoop/hbase/HDFSBlocksDistribution.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorClassLoader.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/SecureBulkLoadClient.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteProtocol.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteResponse.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/WALObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorException.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerCoprocessorEnvironment.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/MultiRowMutationProtocol.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/ObserverContext.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateProtocol.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/package-info.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
  AL    src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/WritableByteArrayComparable.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/FilterBase.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/IncompatibleFilterException.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/ParseConstants.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/Filter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/package-info.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
  AL    src/main/java/org/apache/hadoop/hbase/filter/InvalidRowFilterException.java
  AL    src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
  AL    src/main/java/org/apache/hadoop/hbase/Chore.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/ReplicationZookeeper.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/master/ReplicationLogCleaner.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationStatistics.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationHLogReaderManager.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceInterface.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkMetrics.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSourceManager.java
  AL    src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
  AL    src/main/java/org/apache/hadoop/hbase/Stoppable.java
  AL    src/main/java/org/apache/hadoop/hbase/UnknownScannerException.java
  AL    src/main/java/org/apache/hadoop/hbase/backup/HFileArchiver.java
  AL    src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
  AL    src/main/java/org/apache/hadoop/hbase/Coprocessor.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/Subprocedure.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/SubprocedureFactory.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMember.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinatorRpcs.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/Procedure.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/ProcedureMemberRpcs.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
  AL    src/main/java/org/apache/hadoop/hbase/procedure/ZKProcedureCoordinatorRpcs.java
  AL    src/main/java/org/apache/hadoop/hbase/Abortable.java
  AL    src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
  AL    src/main/java/org/apache/hadoop/hbase/HServerLoadWithSeqIds.java
  AL    src/main/java/org/apache/hadoop/hbase/catalog/CatalogTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/catalog/MetaMigrationRemovingHTD.java
  AL    src/main/java/org/apache/hadoop/hbase/catalog/MetaReader.java
  AL    src/main/java/org/apache/hadoop/hbase/catalog/RootLocationEditor.java
  AL    src/main/java/org/apache/hadoop/hbase/catalog/MetaEditor.java
  AL    src/main/java/org/apache/hadoop/hbase/HealthCheckChore.java
  AL    src/main/java/org/apache/hadoop/hbase/UnknownRowLockException.java
  AL    src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
  AL    src/main/java/org/apache/hadoop/hbase/HServerLoad.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/LogMonitoring.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/ThreadMonitoring.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/MonitoredTask.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/StateDumpServlet.java
  AL    src/main/java/org/apache/hadoop/hbase/monitoring/TaskMonitor.java
  AL    src/main/java/org/apache/hadoop/hbase/DoNotRetryIOException.java
  AL    src/main/java/org/apache/hadoop/hbase/ZooKeeperConnectionException.java
  AL    src/main/java/org/apache/hadoop/hbase/ClockOutOfSyncException.java
  AL    src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
  AL    src/main/java/org/apache/hadoop/hbase/RemoteExceptionHandler.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKAssign.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKServerTool.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKConfig.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperListener.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/DrainingServerTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/RegionServerTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/RootRegionTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTableReadOnly.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/MiniZooKeeperCluster.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/MetaNodeTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterStatusTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/HQuorumPeer.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServerArg.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKLeaderManager.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKSplitLog.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKUtil.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/RecoverableZooKeeper.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperNodeTracker.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ZKTable.java
  AL    src/main/java/org/apache/hadoop/hbase/zookeeper/ClusterId.java
  AL    src/main/java/org/apache/hadoop/hbase/TableNotEnabledException.java
  AL    src/main/java/org/apache/hadoop/hbase/EmptyWatcher.java
  AL    src/main/java/org/apache/hadoop/hbase/KeyValue.java
  AL    src/main/ruby/shell/formatter.rb
  AL    src/main/ruby/shell/commands.rb
  AL    src/main/ruby/shell/commands/list_peers.rb
  AL    src/main/ruby/shell/commands/disable_peer.rb
  AL    src/main/ruby/shell/commands/put.rb
  AL    src/main/ruby/shell/commands/user_permission.rb
  AL    src/main/ruby/shell/commands/disable.rb
  AL    src/main/ruby/shell/commands/add_peer.rb
  AL    src/main/ruby/shell/commands/major_compact.rb
  AL    src/main/ruby/shell/commands/show_filters.rb
  AL    src/main/ruby/shell/commands/whoami.rb
  AL    src/main/ruby/shell/commands/create.rb
  AL    src/main/ruby/shell/commands/incr.rb
  AL    src/main/ruby/shell/commands/start_replication.rb
  AL    src/main/ruby/shell/commands/snapshot.rb
  AL    src/main/ruby/shell/commands/grant.rb
  AL    src/main/ruby/shell/commands/get_counter.rb
  AL    src/main/ruby/shell/commands/split.rb
  AL    src/main/ruby/shell/commands/count.rb
  AL    src/main/ruby/shell/commands/unassign.rb
  AL    src/main/ruby/shell/commands/delete.rb
  AL    src/main/ruby/shell/commands/deleteall.rb
  AL    src/main/ruby/shell/commands/is_disabled.rb
  AL    src/main/ruby/shell/commands/truncate.rb
  AL    src/main/ruby/shell/commands/restore_snapshot.rb
  AL    src/main/ruby/shell/commands/hlog_roll.rb
  AL    src/main/ruby/shell/commands/drop_all.rb
  AL    src/main/ruby/shell/commands/stop_replication.rb
  AL    src/main/ruby/shell/commands/drop.rb
  AL    src/main/ruby/shell/commands/enable.rb
  AL    src/main/ruby/shell/commands/close_region.rb
  AL    src/main/ruby/shell/commands/revoke.rb
  AL    src/main/ruby/shell/commands/describe.rb
  AL    src/main/ruby/shell/commands/compact.rb
  AL    src/main/ruby/shell/commands/clone_snapshot.rb
  AL    src/main/ruby/shell/commands/disable_all.rb
  AL    src/main/ruby/shell/commands/is_enabled.rb
  AL    src/main/ruby/shell/commands/move.rb
  AL    src/main/ruby/shell/commands/remove_peer.rb
  AL    src/main/ruby/shell/commands/delete_snapshot.rb
  AL    src/main/ruby/shell/commands/enable_all.rb
  AL    src/main/ruby/shell/commands/alter.rb
  AL    src/main/ruby/shell/commands/version.rb
  AL    src/main/ruby/shell/commands/alter_status.rb
  AL    src/main/ruby/shell/commands/list_snapshots.rb
  AL    src/main/ruby/shell/commands/list.rb
  AL    src/main/ruby/shell/commands/enable_peer.rb
  AL    src/main/ruby/shell/commands/assign.rb
  AL    src/main/ruby/shell/commands/balance_switch.rb
  AL    src/main/ruby/shell/commands/get.rb
  AL    src/main/ruby/shell/commands/exists.rb
  AL    src/main/ruby/shell/commands/balancer.rb
  AL    src/main/ruby/shell/commands/status.rb
  AL    src/main/ruby/shell/commands/scan.rb
  AL    src/main/ruby/shell/commands/zk_dump.rb
  AL    src/main/ruby/shell/commands/alter_async.rb
  AL    src/main/ruby/shell/commands/flush.rb
  AL    src/main/ruby/irb/hirb.rb
  AL    src/main/ruby/shell.rb
  AL    src/main/ruby/hbase.rb
  AL    src/main/ruby/hbase/admin.rb
  AL    src/main/ruby/hbase/security.rb
  AL    src/main/ruby/hbase/replication_admin.rb
  AL    src/main/ruby/hbase/table.rb
  AL    src/main/ruby/hbase/hbase.rb
  AL    src/main/xslt/configuration_to_docbook_section.xsl
  AL    src/assembly/all.xml
  AL    src/saveVersion.sh
 
 *****************************************************
 Printing headers for files without AL header...
 
 
 =======================================================================
 ==.git/config
 =======================================================================
[core]
	repositoryformatversion = 0
	filemode = true
	bare = false
	logallrefupdates = true
[remote "origin"]
	fetch = +refs/heads/*:refs/remotes/origin/*
	url = git@github.com:hortonworks/hbase.git
[branch "dal"]
	remote = origin
	merge = refs/heads/dal
[branch "1.3-linux-maint"]
	remote = origin
	merge = refs/heads/1.3-linux-maint

 =======================================================================
 ==.git/logs/HEAD
 =======================================================================
0000000000000000000000000000000000000000 6098e677cb74d9069af765c5a1a658c02fcee62e Jenkins <jenkins@hortonworks.com> 1423779729 -0500	clone: from git@github.com:hortonworks/hbase.git
6098e677cb74d9069af765c5a1a658c02fcee62e aa47b8e8ce2ab46ba817d83e8d98e5b8480e2830 Jenkins <jenkins@hortonworks.com> 1423779730 -0500	checkout: moving from dal to 1.3-linux-maint
aa47b8e8ce2ab46ba817d83e8d98e5b8480e2830 aa47b8e8ce2ab46ba817d83e8d98e5b8480e2830 Jenkins <jenkins@hortonworks.com> 1423779730 -0500	checkout: moving from 1.3-linux-maint to aa47b8e8ce2ab46ba817d83e8d98e5b8480e2830

 =======================================================================
 ==.git/logs/refs/heads/1.3-linux-maint
 =======================================================================
0000000000000000000000000000000000000000 aa47b8e8ce2ab46ba817d83e8d98e5b8480e2830 Jenkins <jenkins@hortonworks.com> 1423779730 -0500	branch: Created from refs/remotes/origin/1.3-linux-maint

 =======================================================================
 ==.git/logs/refs/heads/dal
 =======================================================================
0000000000000000000000000000000000000000 6098e677cb74d9069af765c5a1a658c02fcee62e Jenkins <jenkins@hortonworks.com> 1423779729 -0500	clone: from git@github.com:hortonworks/hbase.git

 =======================================================================
 ==.git/description
 =======================================================================
Unnamed repository; edit this file 'description' to name the repository.

 =======================================================================
 ==.git/info/exclude
 =======================================================================
# git ls-files --others --exclude-from=.git/info/exclude
# Lines that start with '#' are comments.
# For a project mostly in C, the following would be a good set of
# exclude patterns (uncomment them if you want to use them):
# *.[oa]
# *~

 =======================================================================
 ==.git/packed-refs
 =======================================================================
# pack-refs with: peeled 
1f7209ef57923261e4b815781db743a6a8165601 refs/tags/hdp-hbase-0.92-PREVIEW-4-HDP-1.0.13-PREVIEW-5-BUILD-12
7e0a478cf48eb45963cfd8bb80759d7d92b4d10b refs/tags/hbase-0.94.2-bigwheel
f6150c10e7e21793856bd22b308790e3726136fd refs/tags/hbase-0.92.15
35906b90ae656ceffd1e352da86d32e9bd4a06e0 refs/tags/bimota-1.2.0.21
^4e053e7966ef40cbc73a2508766c3448fac7edb1
d2ce01ff1442e2022c012bbd4cfbd0ee0e6e58aa refs/tags/bigwheel-alpha2-2.0.0.2.22
3b31f09996386b79b1c9acdafe34c0d1d600416d refs/tags/bigwheel-GA
bd0351e69c7001c9ab32866928ff3d1b75861ede refs/tags/bianchi-0.94.5.23
18e3e58ae6ca5ef5e9c60e3129a1089a8656f91d refs/tags/HDP-2.2.0.0
10cd2a45fee3184d1c32663488b40b7a5391bd81 refs/tags/HDP-2.1.7.1
688a8413b39e16446d383d0bc6dc89902c67a9c0 refs/tags/HDP-2.1.7.0
aed8a99b63d587ea7b43e6bfc99a020fe0f76852 refs/tags/HDP-2.1.5.0
060e67deb5c42821d78cc23940341f6dd394d43e refs/tags/HDP-2.1.4.0
d8cab907e9d10c5d01936174c11d894ba7a9f8c6 refs/tags/HDP-2.1.3.6
6134e072f6b176c569ef4c64eebfdc633f09f5c0 refs/tags/HDP-2.1.3.0
e7b81b02bc8ba00573e009241b8e15848ae8bfb7 refs/tags/HDP-2.1.2.4
ae36d22ceb7144c86ed6ba4693de94e6fdc597ba refs/tags/HDP-2.1.2.2
e7b81b02bc8ba00573e009241b8e15848ae8bfb7 refs/tags/HDP-2.1.2.0
f2961bb178325265977559a43a3c3f4b133ee7b2 refs/tags/HDP-2.1.10.0
e7b81b02bc8ba00573e009241b8e15848ae8bfb7 refs/tags/HDP-2.1.1.0
cf3f71e5014c66e85c10a244fa9a1e3c43cef077 refs/tags/HDP-2.0.6.1-102
cf3f71e5014c66e85c10a244fa9a1e3c43cef077 refs/tags/HDP-2.0.6.1-101
e6d7a56f72914d01e55c0478d74e5cfd3778f231 refs/tags/HDP-2.0.6.0-76
e6d7a56f72914d01e55c0478d74e5cfd3778f231 refs/tags/HDP-2.0.6.0-72
7f78b0ea2c9db0133387f0719a8b59366486691d refs/tags/HDP-2.0.5.0
1a62b76bde7776a433ee069ab47ee6270f8992d4 refs/tags/HDP-2.0.4.0
3b31f09996386b79b1c9acdafe34c0d1d600416d refs/tags/HDP-2.0.13.0
95f8cdce429f51d421a63c8f420c71c1033af78d refs/tags/HDP-2.0.12.0
cf3f71e5014c66e85c10a244fa9a1e3c43cef077 refs/tags/HDP-2.0.11.0
cf3f71e5014c66e85c10a244fa9a1e3c43cef077 refs/tags/HDP-2.0.10.0
3e6b66a3c1ef283793906de4ded8bbb0e1b1564b refs/tags/HDP-1.3.8.0
cdbb486a3ded28bfa680612a84ea481c4b4c970a refs/tags/HDP-1.3.7.0
1547c2f5b7b9edceab84ff8692164f3f94603bc7 refs/tags/HDP-1.3.3.4
1547c2f5b7b9edceab84ff8692164f3f94603bc7 refs/tags/HDP-1.3.3.2
1547c2f5b7b9edceab84ff8692164f3f94603bc7 refs/tags/HDP-1.3.3.1
1547c2f5b7b9edceab84ff8692164f3f94603bc7 refs/tags/HDP-1.3.3.0
410a7a1c151ca953553eae68aa84e2a9f0d6e4ca refs/tags/HDP-1.3.2.0-115
410a7a1c151ca953553eae68aa84e2a9f0d6e4ca refs/tags/HDP-1.3.2.0-111
410a7a1c151ca953553eae68aa84e2a9f0d6e4ca refs/tags/HDP-1.3.2.0-110
e448451cd1de3dbf0d98eb3ebb2ad7494a50ee9a refs/tags/HDP-1.3.0.0-96
a820d5f5bd42904819a3839e75896cae7cfbd5bc refs/tags/HDP-1.3.0.0-62
1a62b76bde7776a433ee069ab47ee6270f8992d4 refs/tags/HDP-1.3.0.0-107
45a198462366551b95f822930582d83307938503 refs/remotes/origin/stinger3-beta-0.96.0
e7b81b02bc8ba00573e009241b8e15848ae8bfb7 refs/remotes/origin/rolling-upgrade-poc
1cf4879c287f79b16307d3d376d1097116cd3abb refs/remotes/origin/rolling-upgrade
c4d7073318028c6aedfc4e3a2b38bdbf85123ac2 refs/remotes/origin/repclient
18e3e58ae6ca5ef5e9c60e3129a1089a8656f91d refs/remotes/origin/monarch-ref/champlain
41d850b30a14fddd86fafec1db24547ce4d49601 refs/remotes/origin/monarch-ref/2.2-maint
2a47d78a0ca5d43576ad868c5c28e2d7758146e0 refs/remotes/origin/monarch-ref/2.1-maint

 =======================================================================
 ==.git/HEAD
 =======================================================================
aa47b8e8ce2ab46ba817d83e8d98e5b8480e2830

 =======================================================================
 ==.git/refs/heads/1.3-linux-maint
 =======================================================================
aa47b8e8ce2ab46ba817d83e8d98e5b8480e2830

 =======================================================================
 ==.git/refs/heads/dal
 =======================================================================
6098e677cb74d9069af765c5a1a658c02fcee62e

 =======================================================================
 ==.git/refs/remotes/origin/HEAD
 =======================================================================
ref: refs/remotes/origin/dal

 =======================================================================
 ==.git/hooks/pre-rebase.sample
 =======================================================================
#!/bin/sh
#
# Copyright (c) 2006, 2008 Junio C Hamano
#
# The "pre-rebase" hook is run just before "git rebase" starts doing
# its job, and can prevent the command from running by exiting with
# non-zero status.
#
# The hook is called with the following parameters:
#
# $1 -- the upstream the series was forked from.
# $2 -- the branch being rebased (or empty when rebasing the current branch).
#
# This sample shows how to prevent topic branches that are already
# merged to 'next' branch from getting rebased, because allowing it
# would result in rebasing already published history.

publish=next
basebranch="$1"
if test "$#" = 2
then
	topic="refs/heads/$2"
else
	topic=`git symbolic-ref HEAD` ||
	exit 0 ;# we do not interrupt rebasing detached HEAD
fi

case "$topic" in
refs/heads/??/*)
	;;
*)
	exit 0 ;# we do not interrupt others.
	;;
esac

# Now we are dealing with a topic branch being rebased
# on top of master.  Is it OK to rebase it?

# Does the topic really exist?
git show-ref -q "$topic" || {
	echo >&2 "No such branch $topic"
	exit 1
}

# Is topic fully merged to master?
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
if test -z "$not_in_master"
then
	echo >&2 "$topic is fully merged to master; better remove it."
	exit 1 ;# we could allow it, but there is no point.

 =======================================================================
 ==.git/hooks/post-update.sample
 =======================================================================
#!/bin/sh
#
# An example hook script to prepare a packed repository for use over
# dumb transports.
#
# To enable this hook, rename this file to "post-update".

exec git update-server-info

 =======================================================================
 ==.git/hooks/post-receive.sample
 =======================================================================
#!/bin/sh
#
# An example hook script for the "post-receive" event.
#
# The "post-receive" script is run after receive-pack has accepted a pack
# and the repository has been updated.  It is passed arguments in through
# stdin in the form
#  <oldrev> <newrev> <refname>
# For example:
#  aa453216d1b3e49e7f6f98441fa56946ddcd6a20 68f7abf4e6f922807889f52bc043ecd31b79f814 refs/heads/master
#
# see contrib/hooks/ for a sample, or uncomment the next line and
# rename the file to "post-receive".

#. /usr/share/git-core/contrib/hooks/post-receive-email

 =======================================================================
 ==.git/hooks/prepare-commit-msg.sample
 =======================================================================
#!/bin/sh
#
# An example hook script to prepare the commit log message.
# Called by "git commit" with the name of the file that has the
# commit message, followed by the description of the commit
# message's source.  The hook's purpose is to edit the commit
# message file.  If the hook fails with a non-zero status,
# the commit is aborted.
#
# To enable this hook, rename this file to "prepare-commit-msg".

# This hook includes three examples.  The first comments out the
# "Conflicts:" part of a merge commit.
#
# The second includes the output of "git diff --name-status -r"
# into the message, just before the "git status" output.  It is
# commented because it doesn't cope with --amend or with squashed
# commits.
#
# The third example adds a Signed-off-by line to the message, that can
# still be edited.  This is rarely a good idea.

case "$2,$3" in
  merge,)
    /usr/bin/perl -i.bak -ne 's/^/# /, s/^# #/#/ if /^Conflicts/ .. /#/; print' "$1" ;;

# ,|template,)
#   /usr/bin/perl -i.bak -pe '
#      print "\n" . `git diff --cached --name-status -r`
#	 if /^#/ && $first++ == 0' "$1" ;;

  *) ;;
esac

# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"

 =======================================================================
 ==.git/hooks/commit-msg.sample
 =======================================================================
#!/bin/sh
#
# An example hook script to check the commit log message.
# Called by "git commit" with one argument, the name of the file
# that has the commit message.  The hook should exit with non-zero
# status after issuing an appropriate message if it wants to stop the
# commit.  The hook is allowed to edit the commit message file.
#
# To enable this hook, rename this file to "commit-msg".

# Uncomment the below to add a Signed-off-by line to the message.
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
# hook is more suited to it.
#
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"

# This example catches duplicate Signed-off-by lines.

test "" = "$(grep '^Signed-off-by: ' "$1" |
	 sort | uniq -c | sed -e '/^[ 	]*1[ 	]/d')" || {
	echo >&2 Duplicate Signed-off-by lines.
	exit 1
}

 =======================================================================
 ==.git/hooks/pre-commit.sample
 =======================================================================
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git commit" with no arguments.  The hook should
# exit with non-zero status after issuing an appropriate message if
# it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-commit".

if git rev-parse --verify HEAD >/dev/null 2>&1
then
	against=HEAD
else
	# Initial commit: diff against an empty tree object
	against=4b825dc642cb6eb9a060e54bf8d69288fbee4904
fi

# If you want to allow non-ascii filenames set this variable to true.
allownonascii=$(git config hooks.allownonascii)

# Cross platform projects tend to avoid non-ascii filenames; prevent
# them from being added to the repository. We exploit the fact that the
# printable range starts at the space character and ends with tilde.
if [ "$allownonascii" != "true" ] &&
	# Note that the use of brackets around a tr range is ok here, (it's
	# even required, for portability to Solaris 10's /usr/bin/tr), since
	# the square bracket bytes happen to fall in the designated range.
	test "$(git diff --cached --name-only --diff-filter=A -z $against |
	  LC_ALL=C tr -d '[ -~]\0')"
then
	echo "Error: Attempt to add a non-ascii file name."
	echo
	echo "This can cause problems if you want to work"
	echo "with people on other platforms."
	echo
	echo "To be portable it is advisable to rename the file ..."
	echo
	echo "If you know what you are doing you can disable this"
	echo "check using:"
	echo
	echo "  git config hooks.allownonascii true"
	echo
	exit 1
fi

exec git diff-index --check --cached $against --

 =======================================================================
 ==.git/hooks/applypatch-msg.sample
 =======================================================================
#!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit.  The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".

. git-sh-setup
test -x "$GIT_DIR/hooks/commit-msg" &&
	exec "$GIT_DIR/hooks/commit-msg" ${1+"$@"}
:

 =======================================================================
 ==.git/hooks/update.sample
 =======================================================================
#!/bin/sh
#
# An example hook script to blocks unannotated tags from entering.
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
#
# To enable this hook, rename this file to "update".
#
# Config
# ------
# hooks.allowunannotated
#   This boolean sets whether unannotated tags will be allowed into the
#   repository.  By default they won't be.
# hooks.allowdeletetag
#   This boolean sets whether deleting tags will be allowed in the
#   repository.  By default they won't be.
# hooks.allowmodifytag
#   This boolean sets whether a tag may be modified after creation. By default
#   it won't be.
# hooks.allowdeletebranch
#   This boolean sets whether deleting branches will be allowed in the
#   repository.  By default they won't be.
# hooks.denycreatebranch
#   This boolean sets whether remotely creating branches will be denied
#   in the repository.  By default this is allowed.
#

# --- Command line
refname="$1"
oldrev="$2"
newrev="$3"

# --- Safety check
if [ -z "$GIT_DIR" ]; then
	echo "Don't run this script from the command line." >&2
	echo " (if you want, you could supply GIT_DIR then run" >&2
	echo "  $0 <ref> <oldrev> <newrev>)" >&2
	exit 1
fi

if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
	echo "Usage: $0 <ref> <oldrev> <newrev>" >&2
	exit 1
fi

# --- Config
allowunannotated=$(git config --bool hooks.allowunannotated)
allowdeletebranch=$(git config --bool hooks.allowdeletebranch)
denycreatebranch=$(git config --bool hooks.denycreatebranch)
allowdeletetag=$(git config --bool hooks.allowdeletetag)
allowmodifytag=$(git config --bool hooks.allowmodifytag)

 =======================================================================
 ==.git/hooks/post-commit.sample
 =======================================================================
#!/bin/sh
#
# An example hook script that is called after a successful
# commit is made.
#
# To enable this hook, rename this file to "post-commit".

: Nothing

 =======================================================================
 ==.git/hooks/pre-applypatch.sample
 =======================================================================
#!/bin/sh
#
# An example hook script to verify what is about to be committed
# by applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-applypatch".

. git-sh-setup
test -x "$GIT_DIR/hooks/pre-commit" &&
	exec "$GIT_DIR/hooks/pre-commit" ${1+"$@"}
:

 =======================================================================
 ==.gitignore
 =======================================================================
/.arc_jira_lib
/.classpath
/.externalToolBuilders
/.project
/.settings
/build
/.idea/
/logs
/target
*.iml
*.orig
*~

 =======================================================================
 ==HDP-CHANGES.txt
 =======================================================================
condorM30-0.94.6 is branched from hortonworks  comanche-0.94.6  SHA1 855cf8f60d3595ea8a3ab89cb3e8f9362f07f1f4 @  git@github.com:hortonworks/hbase.git

BUG FIXES
  HBASE-8760 Possible loss of data in snapshot taken after region split
  HBASE-9303 Snapshot restore of table which splits after snapshot was taken encounters 'Region is not online'
  HBASE-9906 Restore snapshot fails to restore the meta edits sporadically

 =======================================================================
 ==CHANGES.txt
 =======================================================================
HBase Change Log

Release 0.94.6 - 3/14/2013
Sub-task

    [HBASE-7944] - Replication leaks file reader resource & not reset currentNbOperations

Bug

    [HBASE-6132] - ColumnCountGetFilter & PageFilter not working with FilterList
    [HBASE-6347] - -ROOT- and .META. are stale in table.jsp if they moved
    [HBASE-6748] - Endless recursive of deleteNode happened in SplitLogManager#DeleteAsyncCallback
    [HBASE-7111] - hbase zkcli will not start if the zookeeper server chosen to connect to is unavailable
    [HBASE-7153] - print gc option in hbase-env.sh affects hbase zkcli
    [HBASE-7507] - Make memstore flush be able to retry after exception
    [HBASE-7521] - fix HBASE-6060 (regions stuck in opening state) in 0.94
    [HBASE-7624] - Backport HBASE-5359 and HBASE-7596 to 0.94
    [HBASE-7671] - Flushing memstore again after last failure could cause data loss
    [HBASE-7700] - TestColumnSeeking is mathematically bound to fail
    [HBASE-7723] - Remove NameNode URI from ZK splitlogs
    [HBASE-7725] - Add ability to create custom compaction request
    [HBASE-7761] - MemStore.USEMSLAB_DEFAULT is false, hbase-default.xml says it's true
    [HBASE-7763] - Compactions not sorting based on size anymore.
    [HBASE-7768] - zkcluster in local mode not seeing configurations in hbase-{site|default}.xml
    [HBASE-7777] - HBCK check for lingering split parents should check for child regions
    [HBASE-7813] - Bug in BulkDeleteEndpoint kills entire rows on COLUMN/VERSION Deletes
    [HBASE-7814] - Port HBASE-6963 'unable to run hbck on a secure cluster' to 0.94
    [HBASE-7829] - zookeeper kerberos conf keytab and principal parameters interchanged
    [HBASE-7832] - Use User.getShortName() in FSUtils
    [HBASE-7833] - 0.94 does not compile with Hadoop-0.20.205 and 0.22.0
    [HBASE-7851] - Include the guava classes as a dependency for jobs using mapreduce.TableMapReduceUtil
    [HBASE-7866] - TestSplitTransactionOnCluster.testSplitBeforeSettingSplittingInZK failed 3 times in a row
    [HBASE-7867] - setPreallocSize is different with COMMENT in setupTestEnv in MiniZooKeeperCluster.java
    [HBASE-7869] - Provide way to not start LogSyncer thread
    [HBASE-7876] - Got exception when manually triggers a split on an empty region
    [HBASE-7883] - Update memstore size when removing the entries in append operation
    [HBASE-7884] - ByteBloomFilter's performance can be improved by avoiding multiplication when generating hash
    [HBASE-7913] - Secure Rest server should login before getting an instance of Rest servlet
    [HBASE-7914] - Port the fix of HBASE-6748 into 0.94 branch
    [HBASE-7915] - Secure ThriftServer needs to login before calling HBaseHandler
    [HBASE-7916] - HMaster uses wrong InetSocketAddress parameter to throw exception
    [HBASE-7919] - Wrong key is used in ServerManager#getServerConnection() to retrieve from Map serverConnections
    [HBASE-7920] - Move isFamilyEssential(byte[] name) out of Filter interface in 0.94
    [HBASE-7945] - Remove flaky TestCatalogTrackerOnCluster
    [HBASE-7986] - [REST] Make HTablePool size configurable
    [HBASE-7991] - Backport HBASE-6479 'HFileReaderV1 caching the same parent META block could cause server abort when splitting' to 0.94
    [HBASE-8007] - Adopt TestLoadAndVerify from BigTop
    [HBASE-8019] - Port HBASE-7779 '[snapshot 130201 merge] Fix TestMultiParallel' to 0.94
    [HBASE-8025] - zkcli fails when SERVER_GC_OPTS is enabled
    [HBASE-8040] - Race condition in AM after HBASE-7521 (only 0.94)

 =======================================================================
 ==conf/regionservers
 =======================================================================
localhost

 =======================================================================
 ==conf/log4j.properties
 =======================================================================
# Define some default values that can be overridden by system properties
hbase.root.logger=INFO,console
hbase.security.logger=INFO,console
hbase.log.dir=.
hbase.log.file=hbase.log

# Define the root logger to the system property "hbase.root.logger".
log4j.rootLogger=${hbase.root.logger}

# Logging Threshold
log4j.threshold=ALL

#
# Daily Rolling File Appender
#
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}

# Rollver at midnight
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd

# 30-day backup
#log4j.appender.DRFA.MaxBackupIndex=30
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout

# Pattern format: Date LogLevel LoggerName LogMessage
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n

# Debugging Pattern format
#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n

#
# Security audit appender
#
hbase.security.log.file=SecurityAuth.audit
log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
log4j.appender.DRFAS.File=${hbase.log.dir}/${hbase.security.log.file}
log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.category.SecurityLogger=${hbase.security.logger}
log4j.additivity.SecurityLogger=false
#log4j.logger.SecurityLogger.org.apache.hadoop.hbase.security.access.AccessController=TRACE

#
# Null Appender
#
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender

#
# console

 =======================================================================
 ==conf/hadoop-metrics.properties
 =======================================================================
# See http://wiki.apache.org/hadoop/GangliaMetrics
# Make sure you know whether you are using ganglia 3.0 or 3.1.
# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
# And, yes, this file is named hadoop-metrics.properties rather than
# hbase-metrics.properties because we're leveraging the hadoop metrics
# package and hadoop-metrics.properties is an hardcoded-name, at least
# for the moment.
#
# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
# GMETADHOST_IP is the hostname (or) IP address of the server on which the ganglia 
# meta daemon (gmetad) service is running

# Configuration of the "hbase" context for NullContextWithUpdateThread
# NullContextWithUpdateThread is a  null context which has a thread calling
# periodically when monitoring is started. This keeps the data sampled
# correctly.
hbase.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
hbase.period=10

# Configuration of the "hbase" context for file
# hbase.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# hbase.fileName=/tmp/metrics_hbase.log

# HBase-specific configuration to reset long-running stats (e.g. compactions)
# If this variable is left out, then the default is no expiration.
hbase.extendedperiod = 3600

# Configuration of the "hbase" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# hbase.period=10
# hbase.servers=GMETADHOST_IP:8649

# Configuration of the "jvm" context for null
jvm.class=org.apache.hadoop.metrics.spi.NullContextWithUpdateThread
jvm.period=10

# Configuration of the "jvm" context for file
# jvm.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# jvm.fileName=/tmp/metrics_jvm.log

# Configuration of the "jvm" context for ganglia
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
# jvm.period=10
# jvm.servers=GMETADHOST_IP:8649

# Configuration of the "rpc" context for null

 =======================================================================
 ==.arcconfig
 =======================================================================
{
  "project_id" : "hbase",
  "conduit_uri" : "https://reviews.facebook.net/",
  "copyright_holder" : "Apache Software Foundation",
  "phutil_libraries" : {
    "arclib" : ".arc_jira_lib"
  },
  "arcanist_configuration" : "ArcJIRAConfiguration",
  "jira_project" : "HBASE",
  "jira_api_url" : "https://issues.apache.org/jira/si/"
}

 =======================================================================
 ==src/site/resources/css/freebsd_docbook.css
 =======================================================================
/*
 * Copyright (c) 2001, 2003, 2010 The FreeBSD Documentation Project
 * All rights reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 *
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
 * SUCH DAMAGE.
 *
 * $FreeBSD: doc/share/misc/docbook.css,v 1.15 2010/03/20 04:15:01 hrs Exp $
 */

BODY ADDRESS {
	line-height: 1.3;
	margin: .6em 0;
}

BODY BLOCKQUOTE {
	margin-top: .75em;
	line-height: 1.5;
	margin-bottom: .75em;
}

HTML BODY {
	margin: 1em 8% 1em 10%;
	line-height: 1.2;
}

.LEGALNOTICE {
	font-size: small;
	font-variant: small-caps;
}

BODY DIV {

 =======================================================================
 ==src/site/resources/images/hbase_logo.svg
 =======================================================================
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generator: Adobe Illustrator 15.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->

<svg
   xmlns:dc="http://purl.org/dc/elements/1.1/"
   xmlns:cc="http://creativecommons.org/ns#"
   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
   xmlns:svg="http://www.w3.org/2000/svg"
   xmlns="http://www.w3.org/2000/svg"
   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
   version="1.1"
   id="Layer_1"
   x="0px"
   y="0px"
   width="792px"
   height="612px"
   viewBox="0 0 792 612"
   enable-background="new 0 0 792 612"
   xml:space="preserve"
   inkscape:version="0.48.4 r9939"
   sodipodi:docname="hbase_banner_logo.png"
   inkscape:export-filename="hbase_logo_filledin.png"
   inkscape:export-xdpi="90"
   inkscape:export-ydpi="90"><metadata
   id="metadata3285"><rdf:RDF><cc:Work
       rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
         rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
   id="defs3283" /><sodipodi:namedview
   pagecolor="#ffffff"
   bordercolor="#666666"
   borderopacity="1"
   objecttolerance="10"
   gridtolerance="10"
   guidetolerance="10"
   inkscape:pageopacity="0"
   inkscape:pageshadow="2"
   inkscape:window-width="1131"
   inkscape:window-height="715"
   id="namedview3281"
   showgrid="false"
   inkscape:zoom="4.3628026"
   inkscape:cx="328.98554"
   inkscape:cy="299.51695"
   inkscape:window-x="752"
   inkscape:window-y="456"
   inkscape:window-maximized="0"
   inkscape:current-layer="Layer_1" />
<path
   d="m 233.586,371.672 -9.895,0 0,-51.583 9.895,0 0,51.583 z m -9.77344,-51.59213 -0.12156,-31.94487 9.895,0 -0.0405,31.98539 z m -0.12156,51.59213 -9.896,0 0,-32.117 -63.584,0 0,32.117 -19.466,0 0,-83.537 19.466,0 0,31.954 55.128,0 8.457,0 9.896,0 0,51.583 z m 0,-83.537 -9.896,0 0,31.98539 10.01756,-0.0405 z"

 =======================================================================
 ==src/site/resources/images/big_h_logo.svg
 =======================================================================
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Generator: Adobe Illustrator 15.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->

<svg
   xmlns:dc="http://purl.org/dc/elements/1.1/"
   xmlns:cc="http://creativecommons.org/ns#"
   xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
   xmlns:svg="http://www.w3.org/2000/svg"
   xmlns="http://www.w3.org/2000/svg"
   xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
   xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
   version="1.1"
   id="Layer_1"
   x="0px"
   y="0px"
   width="792px"
   height="612px"
   viewBox="0 0 792 612"
   enable-background="new 0 0 792 612"
   xml:space="preserve"
   inkscape:version="0.48.4 r9939"
   sodipodi:docname="big_h_same_font_hbase3_logo.png"
   inkscape:export-filename="big_h_bitmap.png"
   inkscape:export-xdpi="90"
   inkscape:export-ydpi="90"><metadata
   id="metadata3693"><rdf:RDF><cc:Work
       rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
         rdf:resource="http://purl.org/dc/dcmitype/StillImage" /><dc:title></dc:title></cc:Work></rdf:RDF></metadata><defs
   id="defs3691" /><sodipodi:namedview
   pagecolor="#000000"
   bordercolor="#666666"
   borderopacity="1"
   objecttolerance="10"
   gridtolerance="10"
   guidetolerance="10"
   inkscape:pageopacity="0"
   inkscape:pageshadow="2"
   inkscape:window-width="1440"
   inkscape:window-height="856"
   id="namedview3689"
   showgrid="false"
   inkscape:zoom="2.1814013"
   inkscape:cx="415.39305"
   inkscape:cy="415.72702"
   inkscape:window-x="1164"
   inkscape:window-y="22"
   inkscape:window-maximized="0"
   inkscape:current-layer="Layer_1" />



 =======================================================================
 ==src/packages/deb/hbase.control/conffile
 =======================================================================
/etc/hbase/hadoop-metrics.properties
/etc/hbase/hbase-env.sh
/etc/hbase/hbase-site.xml
/etc/hbase/log4j.properties
/etc/hbase/regionservers

 =======================================================================
 ==src/test/java/org/apache/hadoop/hbase/client/InstantSchemaChangeTestBase.java
 =======================================================================

 =======================================================================
 ==src/test/java/org/apache/hadoop/hbase/regionserver/NoOpScanPolicyObserver.java
 =======================================================================
package org.apache.hadoop.hbase.regionserver;

import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.NavigableSet;

import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TestFromClientSideWithCoprocessor;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;

/**
 * RegionObserver that just reimplements the default behavior,
 * in order to validate that all the necessary APIs for this are public
 * This observer is also used in {@link TestFromClientSideWithCoprocessor} and
 * {@link TestCompactionWithCoprocessor} to make sure that a wide range
 * of functionality still behaves as expected.
 */
public class NoOpScanPolicyObserver extends BaseRegionObserver {
  /**
   * Reimplement the default behavior
   */
  @Override
  public InternalScanner preFlushScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
      Store store, KeyValueScanner memstoreScanner, InternalScanner s) throws IOException {
    Store.ScanInfo oldSI = store.getScanInfo();
    Store.ScanInfo scanInfo = new Store.ScanInfo(store.getFamily(), oldSI.getTtl(),
        oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
    Scan scan = new Scan();
    scan.setMaxVersions(oldSI.getMaxVersions());
    return new StoreScanner(store, scanInfo, scan, Collections.singletonList(memstoreScanner),
        ScanType.MINOR_COMPACT, store.getHRegion().getSmallestReadPoint(),
        HConstants.OLDEST_TIMESTAMP);
  }

  /**
   * Reimplement the default behavior
   */
  @Override
  public InternalScanner preCompactScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
      Store store, List<? extends KeyValueScanner> scanners, ScanType scanType, long earliestPutTs,
      InternalScanner s) throws IOException {
    // this demonstrates how to override the scanners default behavior
    Store.ScanInfo oldSI = store.getScanInfo();
    Store.ScanInfo scanInfo = new Store.ScanInfo(store.getFamily(), oldSI.getTtl(),
        oldSI.getTimeToPurgeDeletes(), oldSI.getComparator());
    Scan scan = new Scan();

 =======================================================================
 ==src/test/java/org/apache/hadoop/hbase/regionserver/TestHBase7051.java
 =======================================================================
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.MultithreadedTestUtil;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.junit.Test;
import org.junit.experimental.categories.Category;

import com.google.common.collect.Lists;

/**
 * Test of HBASE-7051; that checkAndPuts and puts behave atomically with respect to each other.
 * Rather than perform a bunch of trials to verify atomicity, this test recreates a race condition
 * that causes the test to fail if checkAndPut doesn't wait for outstanding put transactions
 * to complete.  It does this by invasively overriding HRegion function to affect the timing of
 * the operations.
 */
@Category(SmallTests.class)
public class TestHBase7051 {

  private static CountDownLatch latch = new CountDownLatch(1);
  private enum TestStep {
    INIT,                  // initial put of 10 to set value of the cell
    PUT_STARTED,           // began doing a put of 50 to cell
    PUT_COMPLETED,         // put complete (released RowLock, but may not have advanced MVCC).

 =======================================================================
 ==src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
 =======================================================================
package org.apache.hadoop.hbase.replication;


import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.mapreduce.Job;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;

import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;

@Category(LargeTests.class)
public class TestReplicationSmallTests extends TestReplicationBase {

  private static final Log LOG = LogFactory.getLog(TestReplicationSmallTests.class);

  /**
   * @throws java.lang.Exception
   */
  @Before
  public void setUp() throws Exception {
    htable1.setAutoFlush(true);
    // Starting and stopping replication can make us miss new logs,
    // rolling like this makes sure the most recent one gets added to the queue
    for ( JVMClusterUtil.RegionServerThread r :
        utility1.getHBaseCluster().getRegionServerThreads()) {
      r.getRegionServer().getWAL().rollWriter();
    }
    utility1.truncateTable(tableName);
    // truncating the table will send one Delete per row to the slave cluster
    // in an async fashion, which is why we cannot just call truncateTable on
    // utility2 since late writes could make it to the slave in some way.
    // Instead, we truncate the first table and wait for all the Deletes to
    // make it to the slave.
    Scan scan = new Scan();
    int lastCount = 0;
    for (int i = 0; i < NB_RETRIES; i++) {
      if (i==NB_RETRIES-1) {
        fail("Waited too much time for truncate");
      }

 =======================================================================
 ==src/main/avro/hbase.avpr
 =======================================================================
{
  "protocol" : "HBase",
  "namespace" : "org.apache.hadoop.hbase.avro.generated",
  "types" : [ {
    "type" : "record",
    "name" : "AServerAddress",
    "fields" : [ {
      "name" : "hostname",
      "type" : "string"
    }, {
      "name" : "inetSocketAddress",
      "type" : "string"
    }, {
      "name" : "port",
      "type" : "int"
    } ]
  }, {
    "type" : "record",
    "name" : "ARegionLoad",
    "fields" : [ {
      "name" : "memStoreSizeMB",
      "type" : "int"
    }, {
      "name" : "name",
      "type" : "bytes"
    }, {
      "name" : "storefileIndexSizeMB",
      "type" : "int"
    }, {
      "name" : "storefiles",
      "type" : "int"
    }, {
      "name" : "storefileSizeMB",
      "type" : "int"
    }, {
      "name" : "stores",
      "type" : "int"
    } ]
  }, {
    "type" : "record",
    "name" : "AServerLoad",
    "fields" : [ {
      "name" : "load",
      "type" : "int"
    }, {
      "name" : "maxHeapMB",
      "type" : "int"
    }, {
      "name" : "memStoreSizeInMB",
      "type" : "int"

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TScan.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * A Scan object is used to specify scanner parameters when opening a scanner.
 */
public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan");

  private static final org.apache.thrift.protocol.TField START_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("startRow", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField STOP_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("stopRow", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3);
  private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)4);
  private static final org.apache.thrift.protocol.TField CACHING_FIELD_DESC = new org.apache.thrift.protocol.TField("caching", org.apache.thrift.protocol.TType.I32, (short)5);
  private static final org.apache.thrift.protocol.TField FILTER_STRING_FIELD_DESC = new org.apache.thrift.protocol.TField("filterString", org.apache.thrift.protocol.TType.STRING, (short)6);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TScanStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TScanTupleSchemeFactory());
  }

  public ByteBuffer startRow; // optional
  public ByteBuffer stopRow; // optional

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TRegionInfo.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * A TRegionInfo contains information about an HTable region.
 */
public class TRegionInfo implements org.apache.thrift.TBase<TRegionInfo, TRegionInfo._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRegionInfo");

  private static final org.apache.thrift.protocol.TField START_KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("startKey", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField END_KEY_FIELD_DESC = new org.apache.thrift.protocol.TField("endKey", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I64, (short)3);
  private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)4);
  private static final org.apache.thrift.protocol.TField VERSION_FIELD_DESC = new org.apache.thrift.protocol.TField("version", org.apache.thrift.protocol.TType.BYTE, (short)5);
  private static final org.apache.thrift.protocol.TField SERVER_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("serverName", org.apache.thrift.protocol.TType.STRING, (short)6);
  private static final org.apache.thrift.protocol.TField PORT_FIELD_DESC = new org.apache.thrift.protocol.TField("port", org.apache.thrift.protocol.TType.I32, (short)7);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TRegionInfoStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TRegionInfoTupleSchemeFactory());
  }

  public ByteBuffer startKey; // required

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * An IllegalArgument exception indicates an illegal or invalid
 * argument was passed into a procedure.
 */
public class IllegalArgument extends Exception implements org.apache.thrift.TBase<IllegalArgument, IllegalArgument._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IllegalArgument");

  private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new IllegalArgumentStandardSchemeFactory());
    schemes.put(TupleScheme.class, new IllegalArgumentTupleSchemeFactory());
  }

  public String message; // required

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    MESSAGE((short)1, "message");


 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/AlreadyExists.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * An AlreadyExists exceptions signals that a table with the specified
 * name already exists
 */
public class AlreadyExists extends Exception implements org.apache.thrift.TBase<AlreadyExists, AlreadyExists._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExists");

  private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new AlreadyExistsStandardSchemeFactory());
    schemes.put(TupleScheme.class, new AlreadyExistsTupleSchemeFactory());
  }

  public String message; // required

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    MESSAGE((short)1, "message");


 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TCell.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * TCell - Used to transport a cell value (byte[]) and the timestamp it was
 * stored with together as a result for get and getRow methods. This promotes
 * the timestamp of a cell to a first-class value, making it easy to take
 * note of temporal data. Cell is used all the way from HStore up to HTable.
 */
public class TCell implements org.apache.thrift.TBase<TCell, TCell._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCell");

  private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)2);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TCellStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TCellTupleSchemeFactory());
  }

  public ByteBuffer value; // required
  public long timestamp; // required


 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TIncrement.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * For increments that are not incrementColumnValue
 * equivalents.
 */
public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement");

  private static final org.apache.thrift.protocol.TField TABLE_FIELD_DESC = new org.apache.thrift.protocol.TField("table", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("column", org.apache.thrift.protocol.TType.STRING, (short)3);
  private static final org.apache.thrift.protocol.TField AMMOUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("ammount", org.apache.thrift.protocol.TType.I64, (short)4);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TIncrementStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TIncrementTupleSchemeFactory());
  }

  public ByteBuffer table; // required
  public ByteBuffer row; // required
  public ByteBuffer column; // required

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/Hbase.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class Hbase {

  public interface Iface {

    /**
     * Brings a table on-line (enables it)
     * 
     * @param tableName name of the table
     */
    public void enableTable(ByteBuffer tableName) throws IOError, org.apache.thrift.TException;

    /**
     * Disables a table (takes it off-line) If it is being served, the master
     * will tell the servers to stop serving it.
     * 
     * @param tableName name of the table
     */
    public void disableTable(ByteBuffer tableName) throws IOError, org.apache.thrift.TException;

    /**
     * @return true if table is on-line

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/BatchMutation.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * A BatchMutation object is used to apply a number of Mutations to a single row.
 */
public class BatchMutation implements org.apache.thrift.TBase<BatchMutation, BatchMutation._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BatchMutation");

  private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField MUTATIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("mutations", org.apache.thrift.protocol.TType.LIST, (short)2);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new BatchMutationStandardSchemeFactory());
    schemes.put(TupleScheme.class, new BatchMutationTupleSchemeFactory());
  }

  public ByteBuffer row; // required
  public List<Mutation> mutations; // required

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    ROW((short)1, "row"),

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/Mutation.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * A Mutation object is used to either update or delete a column-value.
 */
public class Mutation implements org.apache.thrift.TBase<Mutation, Mutation._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Mutation");

  private static final org.apache.thrift.protocol.TField IS_DELETE_FIELD_DESC = new org.apache.thrift.protocol.TField("isDelete", org.apache.thrift.protocol.TType.BOOL, (short)1);
  private static final org.apache.thrift.protocol.TField COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("column", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)3);
  private static final org.apache.thrift.protocol.TField WRITE_TO_WAL_FIELD_DESC = new org.apache.thrift.protocol.TField("writeToWAL", org.apache.thrift.protocol.TType.BOOL, (short)4);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new MutationStandardSchemeFactory());
    schemes.put(TupleScheme.class, new MutationTupleSchemeFactory());
  }

  public boolean isDelete; // required
  public ByteBuffer column; // required
  public ByteBuffer value; // required
  public boolean writeToWAL; // required

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/IOError.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * An IOError exception signals that an error occurred communicating
 * to the Hbase master or an Hbase region server.  Also used to return
 * more general Hbase error conditions.
 */
public class IOError extends Exception implements org.apache.thrift.TBase<IOError, IOError._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IOError");

  private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new IOErrorStandardSchemeFactory());
    schemes.put(TupleScheme.class, new IOErrorTupleSchemeFactory());
  }

  public String message; // required

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    MESSAGE((short)1, "message");

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/TRowResult.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Holds row name and then a map of columns to cells.
 */
public class TRowResult implements org.apache.thrift.TBase<TRowResult, TRowResult._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowResult");

  private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.MAP, (short)2);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TRowResultStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TRowResultTupleSchemeFactory());
  }

  public ByteBuffer row; // required
  public Map<ByteBuffer,TCell> columns; // required

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    ROW((short)1, "row"),

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift/generated/ColumnDescriptor.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * An HColumnDescriptor contains information about a column family
 * such as the number of versions, compression settings, etc. It is
 * used as input when creating a table or adding a column.
 */
public class ColumnDescriptor implements org.apache.thrift.TBase<ColumnDescriptor, ColumnDescriptor._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnDescriptor");

  private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)2);
  private static final org.apache.thrift.protocol.TField COMPRESSION_FIELD_DESC = new org.apache.thrift.protocol.TField("compression", org.apache.thrift.protocol.TType.STRING, (short)3);
  private static final org.apache.thrift.protocol.TField IN_MEMORY_FIELD_DESC = new org.apache.thrift.protocol.TField("inMemory", org.apache.thrift.protocol.TType.BOOL, (short)4);
  private static final org.apache.thrift.protocol.TField BLOOM_FILTER_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("bloomFilterType", org.apache.thrift.protocol.TType.STRING, (short)5);
  private static final org.apache.thrift.protocol.TField BLOOM_FILTER_VECTOR_SIZE_FIELD_DESC = new org.apache.thrift.protocol.TField("bloomFilterVectorSize", org.apache.thrift.protocol.TType.I32, (short)6);
  private static final org.apache.thrift.protocol.TField BLOOM_FILTER_NB_HASHES_FIELD_DESC = new org.apache.thrift.protocol.TField("bloomFilterNbHashes", org.apache.thrift.protocol.TType.I32, (short)7);
  private static final org.apache.thrift.protocol.TField BLOCK_CACHE_ENABLED_FIELD_DESC = new org.apache.thrift.protocol.TField("blockCacheEnabled", org.apache.thrift.protocol.TType.BOOL, (short)8);
  private static final org.apache.thrift.protocol.TField TIME_TO_LIVE_FIELD_DESC = new org.apache.thrift.protocol.TField("timeToLive", org.apache.thrift.protocol.TType.I32, (short)9);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new ColumnDescriptorStandardSchemeFactory());

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: hbase.proto

package org.apache.hadoop.hbase.protobuf.generated;

public final class HBaseProtos {
  private HBaseProtos() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface SnapshotDescriptionOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required string name = 1;
    boolean hasName();
    String getName();
    
    // optional string table = 2;
    boolean hasTable();
    String getTable();
    
    // optional int64 creationTime = 3 [default = 0];
    boolean hasCreationTime();
    long getCreationTime();
    
    // optional .SnapshotDescription.Type type = 4 [default = FLUSH];
    boolean hasType();
    org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType();
    
    // optional int32 version = 5;
    boolean hasVersion();
    int getVersion();
  }
  public static final class SnapshotDescription extends
      com.google.protobuf.GeneratedMessage
      implements SnapshotDescriptionOrBuilder {
    // Use SnapshotDescription.newBuilder() to construct.
    private SnapshotDescription(Builder builder) {
      super(builder);
    }
    private SnapshotDescription(boolean noInit) {}
    
    private static final SnapshotDescription defaultInstance;
    public static SnapshotDescription getDefaultInstance() {
      return defaultInstance;
    }
    
    public SnapshotDescription getDefaultInstanceForType() {
      return defaultInstance;
    }

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/protobuf/generated/ErrorHandlingProtos.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: ErrorHandling.proto

package org.apache.hadoop.hbase.protobuf.generated;

public final class ErrorHandlingProtos {
  private ErrorHandlingProtos() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface StackTraceElementMessageOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // optional string declaringClass = 1;
    boolean hasDeclaringClass();
    String getDeclaringClass();
    
    // optional string methodName = 2;
    boolean hasMethodName();
    String getMethodName();
    
    // optional string fileName = 3;
    boolean hasFileName();
    String getFileName();
    
    // optional int32 lineNumber = 4;
    boolean hasLineNumber();
    int getLineNumber();
  }
  public static final class StackTraceElementMessage extends
      com.google.protobuf.GeneratedMessage
      implements StackTraceElementMessageOrBuilder {
    // Use StackTraceElementMessage.newBuilder() to construct.
    private StackTraceElementMessage(Builder builder) {
      super(builder);
    }
    private StackTraceElementMessage(boolean noInit) {}
    
    private static final StackTraceElementMessage defaultInstance;
    public static StackTraceElementMessage getDefaultInstance() {
      return defaultInstance;
    }
    
    public StackTraceElementMessage getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor;

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TScan.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Any timestamps in the columns are ignored, use timeRange to select by timestamp.
 * Max versions defaults to 1.
 */
public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan");

  private static final org.apache.thrift.protocol.TField START_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("startRow", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField STOP_ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("stopRow", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)3);
  private static final org.apache.thrift.protocol.TField CACHING_FIELD_DESC = new org.apache.thrift.protocol.TField("caching", org.apache.thrift.protocol.TType.I32, (short)4);
  private static final org.apache.thrift.protocol.TField MAX_VERSIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxVersions", org.apache.thrift.protocol.TType.I32, (short)5);
  private static final org.apache.thrift.protocol.TField TIME_RANGE_FIELD_DESC = new org.apache.thrift.protocol.TField("timeRange", org.apache.thrift.protocol.TType.STRUCT, (short)6);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TScanStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TScanTupleSchemeFactory());
  }

  public ByteBuffer startRow; // optional

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TGet.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Used to perform Get operations on a single row.
 * 
 * The scope can be further narrowed down by specifying a list of
 * columns or column families.
 * 
 * To get everything for a row, instantiate a Get object with just the row to get.
 * To further define the scope of what to get you can add a timestamp or time range
 * with an optional maximum number of versions to return.
 * 
 * If you specify a time range and a timestamp the range is ignored.
 * Timestamps on TColumns are ignored.
 * 
 * TODO: Filter, Locks
 */
public class TGet implements org.apache.thrift.TBase<TGet, TGet._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet");

  private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)2);
  private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3);

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIncrement.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Used to perform Increment operations for a single row.
 * 
 * You can specify if this Increment should be written
 * to the write-ahead Log (WAL) or not. It defaults to true.
 */
public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement");

  private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)2);
  private static final org.apache.thrift.protocol.TField WRITE_TO_WAL_FIELD_DESC = new org.apache.thrift.protocol.TField("writeToWal", org.apache.thrift.protocol.TType.BOOL, (short)3);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TIncrementStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TIncrementTupleSchemeFactory());
  }

  public ByteBuffer row; // required
  public List<TColumnIncrement> columns; // required

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumn.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Addresses a single cell or multiple cells
 * in a HBase table by column family and optionally
 * a column qualifier and timestamp
 */
public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn");

  private static final org.apache.thrift.protocol.TField FAMILY_FIELD_DESC = new org.apache.thrift.protocol.TField("family", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField QUALIFIER_FIELD_DESC = new org.apache.thrift.protocol.TField("qualifier", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TColumnStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TColumnTupleSchemeFactory());
  }

  public ByteBuffer family; // required
  public ByteBuffer qualifier; // optional
  public long timestamp; // optional

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDelete.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Used to perform Delete operations on a single row.
 * 
 * The scope can be further narrowed down by specifying a list of
 * columns or column families as TColumns.
 * 
 * Specifying only a family in a TColumn will delete the whole family.
 * If a timestamp is specified all versions with a timestamp less than
 * or equal to this will be deleted. If no timestamp is specified the
 * current time will be used.
 * 
 * Specifying a family and a column qualifier in a TColumn will delete only
 * this qualifier. If a timestamp is specified only versions equal
 * to this timestamp will be deleted. If no timestamp is specified the
 * most recent version will be deleted.  To delete all previous versions,
 * specify the DELETE_COLUMNS TDeleteType.
 * 
 * The top level timestamp is only used if a complete row should be deleted
 * (i.e. no columns are passed) and if it is specified it works the same way
 * as if you had added a TColumn for every column family and this timestamp
 * (i.e. all versions older than or equal in all column families will be deleted)

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIOError.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * A TIOError exception signals that an error occurred communicating
 * to the HBase master or a HBase region server. Also used to return
 * more general HBase error conditions.
 */
public class TIOError extends Exception implements org.apache.thrift.TBase<TIOError, TIOError._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIOError");

  private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TIOErrorStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TIOErrorTupleSchemeFactory());
  }

  public String message; // optional

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    MESSAGE((short)1, "message");

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TDeleteType.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;


import java.util.Map;
import java.util.HashMap;
import org.apache.thrift.TEnum;

/**
 * Specify type of delete:
 *  - DELETE_COLUMN means exactly one version will be removed,
 *  - DELETE_COLUMNS means previous versions will also be removed.
 */
public enum TDeleteType implements org.apache.thrift.TEnum {
  DELETE_COLUMN(0),
  DELETE_COLUMNS(1);

  private final int value;

  private TDeleteType(int value) {
    this.value = value;
  }

  /**
   * Get the integer value of this enum value, as defined in the Thrift IDL.
   */
  public int getValue() {
    return value;
  }

  /**
   * Find a the enum type by its integer value, as defined in the Thrift IDL.
   * @return null if the value is not found.
   */
  public static TDeleteType findByValue(int value) { 
    switch (value) {
      case 0:
        return DELETE_COLUMN;
      case 1:
        return DELETE_COLUMNS;
      default:
        return null;
    }
  }
}

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TResult.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * if no Result is found, row and columnValues will not be set.
 */
public class TResult implements org.apache.thrift.TBase<TResult, TResult._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TResult");

  private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField COLUMN_VALUES_FIELD_DESC = new org.apache.thrift.protocol.TField("columnValues", org.apache.thrift.protocol.TType.LIST, (short)2);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TResultStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TResultTupleSchemeFactory());
  }

  public ByteBuffer row; // optional
  public List<TColumnValue> columnValues; // required

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    ROW((short)1, "row"),

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TIllegalArgument.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * A TIllegalArgument exception indicates an illegal or invalid
 * argument was passed into a procedure.
 */
public class TIllegalArgument extends Exception implements org.apache.thrift.TBase<TIllegalArgument, TIllegalArgument._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIllegalArgument");

  private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)1);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TIllegalArgumentStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TIllegalArgumentTupleSchemeFactory());
  }

  public String message; // optional

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    MESSAGE((short)1, "message");


 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnValue.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Represents a single cell and its value.
 */
public class TColumnValue implements org.apache.thrift.TBase<TColumnValue, TColumnValue._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnValue");

  private static final org.apache.thrift.protocol.TField FAMILY_FIELD_DESC = new org.apache.thrift.protocol.TField("family", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField QUALIFIER_FIELD_DESC = new org.apache.thrift.protocol.TField("qualifier", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("value", org.apache.thrift.protocol.TType.STRING, (short)3);
  private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)4);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TColumnValueStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TColumnValueTupleSchemeFactory());
  }

  public ByteBuffer family; // required
  public ByteBuffer qualifier; // required
  public ByteBuffer value; // required
  public long timestamp; // optional

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/THBaseService.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class THBaseService {

  public interface Iface {

    /**
     * Test for the existence of columns in the table, as specified in the TGet.
     * 
     * @return true if the specified TGet matches one or more keys, false if not
     * 
     * @param table the table to check on
     * 
     * @param get the TGet to check for
     */
    public boolean exists(ByteBuffer table, TGet get) throws TIOError, org.apache.thrift.TException;

    /**
     * Method for getting data from a row.
     * 
     * If the row cannot be found an empty Result is returned.
     * This can be checked by the empty field of the TResult
     * 

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TColumnIncrement.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Represents a single cell and the amount to increment it by
 */
public class TColumnIncrement implements org.apache.thrift.TBase<TColumnIncrement, TColumnIncrement._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnIncrement");

  private static final org.apache.thrift.protocol.TField FAMILY_FIELD_DESC = new org.apache.thrift.protocol.TField("family", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField QUALIFIER_FIELD_DESC = new org.apache.thrift.protocol.TField("qualifier", org.apache.thrift.protocol.TType.STRING, (short)2);
  private static final org.apache.thrift.protocol.TField AMOUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("amount", org.apache.thrift.protocol.TType.I64, (short)3);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TColumnIncrementStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TColumnIncrementTupleSchemeFactory());
  }

  public ByteBuffer family; // required
  public ByteBuffer qualifier; // required
  public long amount; // optional

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TPut.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 * Used to perform Put operations for a single row.
 * 
 * Add column values to this object and they'll be added.
 * You can provide a default timestamp if the column values
 * don't have one. If you don't provide a default timestamp
 * the current time is inserted.
 * 
 * You can also specify if this Put should be written
 * to the write-ahead Log (WAL) or not. It defaults to true.
 */
public class TPut implements org.apache.thrift.TBase<TPut, TPut._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPut");

  private static final org.apache.thrift.protocol.TField ROW_FIELD_DESC = new org.apache.thrift.protocol.TField("row", org.apache.thrift.protocol.TType.STRING, (short)1);
  private static final org.apache.thrift.protocol.TField COLUMN_VALUES_FIELD_DESC = new org.apache.thrift.protocol.TField("columnValues", org.apache.thrift.protocol.TType.LIST, (short)2);
  private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)3);
  private static final org.apache.thrift.protocol.TField WRITE_TO_WAL_FIELD_DESC = new org.apache.thrift.protocol.TField("writeToWal", org.apache.thrift.protocol.TType.BOOL, (short)4);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/thrift2/generated/TTimeRange.java
 =======================================================================
/**
 * Autogenerated by Thrift Compiler (0.8.0)
 *
 * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
 *  @generated
 */
package org.apache.hadoop.hbase.thrift2.generated;

import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;

import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class TTimeRange implements org.apache.thrift.TBase<TTimeRange, TTimeRange._Fields>, java.io.Serializable, Cloneable {
  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTimeRange");

  private static final org.apache.thrift.protocol.TField MIN_STAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("minStamp", org.apache.thrift.protocol.TType.I64, (short)1);
  private static final org.apache.thrift.protocol.TField MAX_STAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("maxStamp", org.apache.thrift.protocol.TType.I64, (short)2);

  private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
  static {
    schemes.put(StandardScheme.class, new TTimeRangeStandardSchemeFactory());
    schemes.put(TupleScheme.class, new TTimeRangeTupleSchemeFactory());
  }

  public long minStamp; // required
  public long maxStamp; // required

  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    MIN_STAMP((short)1, "minStamp"),
    MAX_STAMP((short)2, "maxStamp");

    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableListMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: TableListMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class TableListMessage {
  private TableListMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface TableListOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated string name = 1;
    java.util.List<String> getNameList();
    int getNameCount();
    String getName(int index);
  }
  public static final class TableList extends
      com.google.protobuf.GeneratedMessage
      implements TableListOrBuilder {
    // Use TableList.newBuilder() to construct.
    private TableList(Builder builder) {
      super(builder);
    }
    private TableList(boolean noInit) {}
    
    private static final TableList defaultInstance;
    public static TableList getDefaultInstance() {
      return defaultInstance;
    }
    
    public TableList getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
    }
    
    // repeated string name = 1;
    public static final int NAME_FIELD_NUMBER = 1;
    private com.google.protobuf.LazyStringList name_;
    public java.util.List<String>

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableInfoMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: TableInfoMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class TableInfoMessage {
  private TableInfoMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface TableInfoOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // required string name = 1;
    boolean hasName();
    String getName();
    
    // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableInfo.Region regions = 2;
    java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region> 
        getRegionsList();
    org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.Region getRegions(int index);
    int getRegionsCount();
    java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.RegionOrBuilder> 
        getRegionsOrBuilderList();
    org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.TableInfo.RegionOrBuilder getRegionsOrBuilder(
        int index);
  }
  public static final class TableInfo extends
      com.google.protobuf.GeneratedMessage
      implements TableInfoOrBuilder {
    // Use TableInfo.newBuilder() to construct.
    private TableInfo(Builder builder) {
      super(builder);
    }
    private TableInfo(boolean noInit) {}
    
    private static final TableInfo defaultInstance;
    public static TableInfo getDefaultInstance() {
      return defaultInstance;
    }
    
    public TableInfo getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.rest.protobuf.generated.TableInfoMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableInfo_descriptor;
    }
    

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: TableSchemaMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class TableSchemaMessage {
  private TableSchemaMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface TableSchemaOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // optional string name = 1;
    boolean hasName();
    String getName();
    
    // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.TableSchema.Attribute attrs = 2;
    java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute> 
        getAttrsList();
    org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.Attribute getAttrs(int index);
    int getAttrsCount();
    java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.AttributeOrBuilder> 
        getAttrsOrBuilderList();
    org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema.AttributeOrBuilder getAttrsOrBuilder(
        int index);
    
    // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema columns = 3;
    java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema> 
        getColumnsList();
    org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema getColumns(int index);
    int getColumnsCount();
    java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchemaOrBuilder> 
        getColumnsOrBuilderList();
    org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchemaOrBuilder getColumnsOrBuilder(
        int index);
    
    // optional bool inMemory = 4;
    boolean hasInMemory();
    boolean getInMemory();
    
    // optional bool readOnly = 5;
    boolean hasReadOnly();
    boolean getReadOnly();
  }
  public static final class TableSchema extends
      com.google.protobuf.GeneratedMessage
      implements TableSchemaOrBuilder {
    // Use TableSchema.newBuilder() to construct.
    private TableSchema(Builder builder) {

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/StorageClusterStatusMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: StorageClusterStatusMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class StorageClusterStatusMessage {
  private StorageClusterStatusMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface StorageClusterStatusOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatus.Node liveNodes = 1;
    java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node> 
        getLiveNodesList();
    org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.Node getLiveNodes(int index);
    int getLiveNodesCount();
    java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.NodeOrBuilder> 
        getLiveNodesOrBuilderList();
    org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus.NodeOrBuilder getLiveNodesOrBuilder(
        int index);
    
    // repeated string deadNodes = 2;
    java.util.List<String> getDeadNodesList();
    int getDeadNodesCount();
    String getDeadNodes(int index);
    
    // optional int32 regions = 3;
    boolean hasRegions();
    int getRegions();
    
    // optional int32 requests = 4;
    boolean hasRequests();
    int getRequests();
    
    // optional double averageLoad = 5;
    boolean hasAverageLoad();
    double getAverageLoad();
  }
  public static final class StorageClusterStatus extends
      com.google.protobuf.GeneratedMessage
      implements StorageClusterStatusOrBuilder {
    // Use StorageClusterStatus.newBuilder() to construct.
    private StorageClusterStatus(Builder builder) {
      super(builder);
    }
    private StorageClusterStatus(boolean noInit) {}
    
    private static final StorageClusterStatus defaultInstance;

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellSetMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: CellSetMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class CellSetMessage {
  private CellSetMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface CellSetOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.CellSet.Row rows = 1;
    java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row> 
        getRowsList();
    org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.Row getRows(int index);
    int getRowsCount();
    java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.RowOrBuilder> 
        getRowsOrBuilderList();
    org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet.RowOrBuilder getRowsOrBuilder(
        int index);
  }
  public static final class CellSet extends
      com.google.protobuf.GeneratedMessage
      implements CellSetOrBuilder {
    // Use CellSet.newBuilder() to construct.
    private CellSet(Builder builder) {
      super(builder);
    }
    private CellSet(boolean noInit) {}
    
    private static final CellSet defaultInstance;
    public static CellSet getDefaultInstance() {
      return defaultInstance;
    }
    
    public CellSet getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_descriptor;
    }
    
    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_CellSet_fieldAccessorTable;
    }

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ColumnSchemaMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: ColumnSchemaMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class ColumnSchemaMessage {
  private ColumnSchemaMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface ColumnSchemaOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // optional string name = 1;
    boolean hasName();
    String getName();
    
    // repeated .org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchema.Attribute attrs = 2;
    java.util.List<org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute> 
        getAttrsList();
    org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.Attribute getAttrs(int index);
    int getAttrsCount();
    java.util.List<? extends org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.AttributeOrBuilder> 
        getAttrsOrBuilderList();
    org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema.AttributeOrBuilder getAttrsOrBuilder(
        int index);
    
    // optional int32 ttl = 3;
    boolean hasTtl();
    int getTtl();
    
    // optional int32 maxVersions = 4;
    boolean hasMaxVersions();
    int getMaxVersions();
    
    // optional string compression = 5;
    boolean hasCompression();
    String getCompression();
  }
  public static final class ColumnSchema extends
      com.google.protobuf.GeneratedMessage
      implements ColumnSchemaOrBuilder {
    // Use ColumnSchema.newBuilder() to construct.
    private ColumnSchema(Builder builder) {
      super(builder);
    }
    private ColumnSchema(boolean noInit) {}
    
    private static final ColumnSchema defaultInstance;
    public static ColumnSchema getDefaultInstance() {

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/VersionMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: VersionMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class VersionMessage {
  private VersionMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface VersionOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // optional string restVersion = 1;
    boolean hasRestVersion();
    String getRestVersion();
    
    // optional string jvmVersion = 2;
    boolean hasJvmVersion();
    String getJvmVersion();
    
    // optional string osVersion = 3;
    boolean hasOsVersion();
    String getOsVersion();
    
    // optional string serverVersion = 4;
    boolean hasServerVersion();
    String getServerVersion();
    
    // optional string jerseyVersion = 5;
    boolean hasJerseyVersion();
    String getJerseyVersion();
  }
  public static final class Version extends
      com.google.protobuf.GeneratedMessage
      implements VersionOrBuilder {
    // Use Version.newBuilder() to construct.
    private Version(Builder builder) {
      super(builder);
    }
    private Version(boolean noInit) {}
    
    private static final Version defaultInstance;
    public static Version getDefaultInstance() {
      return defaultInstance;
    }
    
    public Version getDefaultInstanceForType() {
      return defaultInstance;
    }

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: CellMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class CellMessage {
  private CellMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface CellOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // optional bytes row = 1;
    boolean hasRow();
    com.google.protobuf.ByteString getRow();
    
    // optional bytes column = 2;
    boolean hasColumn();
    com.google.protobuf.ByteString getColumn();
    
    // optional int64 timestamp = 3;
    boolean hasTimestamp();
    long getTimestamp();
    
    // optional bytes data = 4;
    boolean hasData();
    com.google.protobuf.ByteString getData();
  }
  public static final class Cell extends
      com.google.protobuf.GeneratedMessage
      implements CellOrBuilder {
    // Use Cell.newBuilder() to construct.
    private Cell(Builder builder) {
      super(builder);
    }
    private Cell(boolean noInit) {}
    
    private static final Cell defaultInstance;
    public static Cell getDefaultInstance() {
      return defaultInstance;
    }
    
    public Cell getDefaultInstanceForType() {
      return defaultInstance;
    }
    
    public static final com.google.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;

 =======================================================================
 ==src/main/java/org/apache/hadoop/hbase/rest/protobuf/generated/ScannerMessage.java
 =======================================================================
// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: ScannerMessage.proto

package org.apache.hadoop.hbase.rest.protobuf.generated;

public final class ScannerMessage {
  private ScannerMessage() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  public interface ScannerOrBuilder
      extends com.google.protobuf.MessageOrBuilder {
    
    // optional bytes startRow = 1;
    boolean hasStartRow();
    com.google.protobuf.ByteString getStartRow();
    
    // optional bytes endRow = 2;
    boolean hasEndRow();
    com.google.protobuf.ByteString getEndRow();
    
    // repeated bytes columns = 3;
    java.util.List<com.google.protobuf.ByteString> getColumnsList();
    int getColumnsCount();
    com.google.protobuf.ByteString getColumns(int index);
    
    // optional int32 batch = 4;
    boolean hasBatch();
    int getBatch();
    
    // optional int64 startTime = 5;
    boolean hasStartTime();
    long getStartTime();
    
    // optional int64 endTime = 6;
    boolean hasEndTime();
    long getEndTime();
    
    // optional int32 maxVersions = 7;
    boolean hasMaxVersions();
    int getMaxVersions();
    
    // optional string filter = 8;
    boolean hasFilter();
    String getFilter();
  }
  public static final class Scanner extends
      com.google.protobuf.GeneratedMessage
      implements ScannerOrBuilder {
    // Use Scanner.newBuilder() to construct.