This is an automated email from the ASF dual-hosted git repository.

madhan pushed a commit to branch atlas-2.5
in repository https://gitbox.apache.org/repos/asf/atlas.git

commit b191b45f47893046207ee643099f723eeab56701
Author: pareshd <paresh.deva...@freestoneinfotech.com>
AuthorDate: Sat Feb 15 07:59:56 2025 +0530

    ATLAS-4972 : hbase-bridge, hbase-bridge-shim modules: update for code… 
(#289)
    
    (cherry picked from commit 7968b8f80105ab048f0cc3c04294b081799cc45f)
---
 addons/hbase-bridge-shim/pom.xml                   |   5 +
 .../atlas/hbase/hook/HBaseAtlasCoprocessor.java    | 173 ++++++++-------------
 addons/hbase-bridge/pom.xml                        |   2 +
 .../apache/atlas/hbase/bridge/HBaseAtlasHook.java  | 112 ++++++-------
 .../org/apache/atlas/hbase/bridge/HBaseBridge.java |  97 +++++++-----
 .../atlas/hbase/hook/HBaseAtlasCoprocessor.java    |  59 +++----
 .../atlas/hbase/model/HBaseOperationContext.java   |  30 ++--
 .../org/apache/atlas/hbase/HBaseAtlasHookIT.java   |  93 +++++------
 addons/hbase-testing-util/pom.xml                  |   2 +
 .../atlas/hbase/TestHBaseTestingUtilSpinup.java    |  56 +++----
 10 files changed, 294 insertions(+), 335 deletions(-)

diff --git a/addons/hbase-bridge-shim/pom.xml b/addons/hbase-bridge-shim/pom.xml
index c71261062..771fe7501 100644
--- a/addons/hbase-bridge-shim/pom.xml
+++ b/addons/hbase-bridge-shim/pom.xml
@@ -32,6 +32,11 @@
     <name>Apache Atlas Hbase Bridge Shim</name>
     <description>Apache Atlas Hbase Bridge Shim Module</description>
 
+    <properties>
+        <checkstyle.failOnViolation>true</checkstyle.failOnViolation>
+        <checkstyle.skip>false</checkstyle.skip>
+    </properties>
+
     <dependencies>
         <!-- Logging -->
         <dependency>
diff --git 
a/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
 
b/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
index 0b69104b1..fe0ed0679 100755
--- 
a/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
+++ 
b/addons/hbase-bridge-shim/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
@@ -18,14 +18,13 @@
  */
 package org.apache.atlas.hbase.hook;
 
-
 import org.apache.atlas.plugin.classloader.AtlasPluginClassLoader;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
 import org.apache.hadoop.hbase.client.SnapshotDescription;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
@@ -38,61 +37,25 @@ import 
org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
 import java.io.IOException;
 import java.util.Optional;
 
-
 public class HBaseAtlasCoprocessor implements MasterCoprocessor, 
MasterObserver, RegionObserver, RegionServerObserver {
-    public static final Log LOG = 
LogFactory.getLog(HBaseAtlasCoprocessor.class);
+    private static final Log LOG = 
LogFactory.getLog(HBaseAtlasCoprocessor.class);
 
     private static final String ATLAS_PLUGIN_TYPE               = "hbase";
     private static final String ATLAS_HBASE_HOOK_IMPL_CLASSNAME = 
"org.apache.atlas.hbase.hook.HBaseAtlasCoprocessor";
 
-    private AtlasPluginClassLoader  atlasPluginClassLoader      = null;
-    private Object                  impl                        = null;
-    private MasterObserver          implMasterObserver          = null;
-    private RegionObserver          implRegionObserver          = null;
-    private RegionServerObserver    implRegionServerObserver    = null;
-    private MasterCoprocessor      implMasterCoprocessor           = null;
+    private AtlasPluginClassLoader  atlasPluginClassLoader;
+    private Object                  impl;
+    private MasterObserver          implMasterObserver;
+    private RegionObserver          implRegionObserver;
+    private RegionServerObserver    implRegionServerObserver;
+    private MasterCoprocessor       implMasterCoprocessor;
 
     public HBaseAtlasCoprocessor() {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.HBaseAtlasCoprocessor()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.HBaseAtlasCoprocessor()");
 
         this.init();
 
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.HBaseAtlasCoprocessor()");
-        }
-    }
-
-    private void init(){
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.init()");
-        }
-
-        try {
-            atlasPluginClassLoader = 
AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass());
-
-            @SuppressWarnings("unchecked")
-            Class<?> cls = Class.forName(ATLAS_HBASE_HOOK_IMPL_CLASSNAME, 
true, atlasPluginClassLoader);
-
-            activatePluginClassLoader();
-
-            impl                     = cls.newInstance();
-            implMasterObserver       = (MasterObserver)impl;
-            implRegionObserver       = (RegionObserver)impl;
-            implRegionServerObserver = (RegionServerObserver)impl;
-            implMasterCoprocessor       = (MasterCoprocessor)impl;
-
-        } catch (Exception e) {
-            // check what need to be done
-            LOG.error("Error Enabling RangerHbasePlugin", e);
-        } finally {
-            deactivatePluginClassLoader();
-        }
-
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.init()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.HBaseAtlasCoprocessor()");
     }
 
     @Override
@@ -102,176 +65,176 @@ public class HBaseAtlasCoprocessor implements 
MasterCoprocessor, MasterObserver,
 
     @Override
     public void start(CoprocessorEnvironment env) throws IOException {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.start()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.start()");
 
         try {
             activatePluginClassLoader();
+
             if (env instanceof MasterCoprocessorEnvironment) {
                 implMasterCoprocessor.start(env);
             }
         } finally {
             deactivatePluginClassLoader();
         }
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.start()");
-        }
+
+        LOG.debug("<== HBaseAtlasCoprocessor.start()");
     }
 
     @Override
     public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> 
ctx, TableDescriptor desc, RegionInfo[] regions) throws IOException {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.postCreateTable()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.postCreateTable()");
 
         try {
             activatePluginClassLoader();
+
             implMasterObserver.postCreateTable(ctx, desc, regions);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postCreateTable()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postCreateTable()");
     }
 
     @Override
     public void postModifyTable(ObserverContext<MasterCoprocessorEnvironment> 
ctx, TableName tableName, TableDescriptor htd) throws IOException {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()");
 
         try {
             activatePluginClassLoader();
+
             implMasterObserver.postModifyTable(ctx, tableName, htd);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
     }
 
     @Override
     public void postDeleteTable(ObserverContext<MasterCoprocessorEnvironment> 
ctx, TableName tableName) throws IOException {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.postDeleteTable()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.postDeleteTable()");
 
         try {
             activatePluginClassLoader();
+
             implMasterObserver.postDeleteTable(ctx, tableName);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()");
     }
 
     @Override
     public void 
postCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, 
NamespaceDescriptor ns) throws IOException {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.preCreateNamespace()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.preCreateNamespace()");
 
         try {
             activatePluginClassLoader();
+
             implMasterObserver.postCreateNamespace(ctx, ns);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.preCreateNamespace()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.preCreateNamespace()");
     }
 
     @Override
     public void 
postDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, String 
ns) throws IOException {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.preDeleteNamespace()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.preDeleteNamespace()");
 
         try {
             activatePluginClassLoader();
+
             implMasterObserver.postDeleteNamespace(ctx, ns);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.preDeleteNamespace()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.preDeleteNamespace()");
     }
+
     @Override
     public void 
postModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, 
NamespaceDescriptor ns) throws IOException {
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.preModifyNamespace()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.preModifyNamespace()");
 
         try {
             activatePluginClassLoader();
+
             implMasterObserver.preModifyNamespace(ctx, ns);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if(LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.preModifyNamespace()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.preModifyNamespace()");
     }
 
     @Override
     public void 
postCloneSnapshot(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) 
throws IOException {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.postCloneSnapshot()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.postCloneSnapshot()");
 
         try {
             activatePluginClassLoader();
-            
implMasterObserver.postCloneSnapshot(observerContext,snapshot,tableDescriptor);
+
+            implMasterObserver.postCloneSnapshot(observerContext, snapshot, 
tableDescriptor);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()");
     }
 
     @Override
     public void 
postRestoreSnapshot(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) 
throws IOException {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
 
         try {
             activatePluginClassLoader();
-            
implMasterObserver.postRestoreSnapshot(observerContext,snapshot,tableDescriptor);
+
+            implMasterObserver.postRestoreSnapshot(observerContext, snapshot, 
tableDescriptor);
         } finally {
             deactivatePluginClassLoader();
         }
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()");
+        LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()");
+    }
+
+    private void init() {
+        LOG.debug("==> HBaseAtlasCoprocessor.init()");
+
+        try {
+            atlasPluginClassLoader = 
AtlasPluginClassLoader.getInstance(ATLAS_PLUGIN_TYPE, this.getClass());
+
+            @SuppressWarnings("unchecked")
+            Class<?> cls = Class.forName(ATLAS_HBASE_HOOK_IMPL_CLASSNAME, 
true, atlasPluginClassLoader);
+
+            activatePluginClassLoader();
+
+            impl                     = cls.newInstance();
+            implMasterObserver       = (MasterObserver) impl;
+            implRegionObserver       = (RegionObserver) impl;
+            implRegionServerObserver = (RegionServerObserver) impl;
+            implMasterCoprocessor    = (MasterCoprocessor) impl;
+        } catch (Exception e) {
+            // check what need to be done
+            LOG.error("Error Enabling RangerHbasePlugin", e);
+        } finally {
+            deactivatePluginClassLoader();
         }
+
+        LOG.debug("<== HBaseAtlasCoprocessor.init()");
     }
 
     private void activatePluginClassLoader() {
-        if(atlasPluginClassLoader != null) {
+        if (atlasPluginClassLoader != null) {
             atlasPluginClassLoader.activate();
         }
     }
 
     private void deactivatePluginClassLoader() {
-        if(atlasPluginClassLoader != null) {
+        if (atlasPluginClassLoader != null) {
             atlasPluginClassLoader.deactivate();
         }
     }
-
 }
diff --git a/addons/hbase-bridge/pom.xml b/addons/hbase-bridge/pom.xml
index e8f7de66a..067559e43 100644
--- a/addons/hbase-bridge/pom.xml
+++ b/addons/hbase-bridge/pom.xml
@@ -34,6 +34,8 @@
 
     <properties>
         <apache.log4j.version>2.17.1</apache.log4j.version>
+        <checkstyle.failOnViolation>true</checkstyle.failOnViolation>
+        <checkstyle.skip>false</checkstyle.skip>
         <hadoop.version>3.0.3</hadoop.version>
         <javax.ws.rs-api.version>2.1.1</javax.ws.rs-api.version>
         <jetty.version>9.3.14.v20161028</jetty.version>
diff --git 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
index 8e6c57dba..18aceb8d2 100644
--- 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
+++ 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
@@ -19,8 +19,8 @@
 package org.apache.atlas.hbase.bridge;
 
 import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.hbase.model.HBaseOperationContext;
 import org.apache.atlas.hbase.model.HBaseDataTypes;
+import org.apache.atlas.hbase.model.HBaseOperationContext;
 import org.apache.atlas.hook.AtlasHook;
 import org.apache.atlas.model.instance.AtlasEntity;
 import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
@@ -30,8 +30,8 @@ import 
org.apache.atlas.model.notification.HookNotification.EntityDeleteRequestV
 import 
org.apache.atlas.model.notification.HookNotification.EntityUpdateRequestV2;
 import org.apache.atlas.type.AtlasTypeUtil;
 import org.apache.commons.collections.CollectionUtils;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
@@ -55,7 +55,6 @@ import static 
org.apache.atlas.repository.Constants.HBASE_SOURCE;
 public class HBaseAtlasHook extends AtlasHook {
     private static final Logger LOG = 
LoggerFactory.getLogger(HBaseAtlasHook.class);
 
-    
     public static final String ATTR_DESCRIPTION           = "description";
     public static final String ATTR_ATLAS_ENDPOINT        = 
"atlas.rest.address";
     public static final String ATTR_PARAMETERS            = "parameters";
@@ -141,7 +140,8 @@ public class HBaseAtlasHook extends AtlasHook {
                     ret = me;
 
                     if (ret == null) {
-                        me = ret = new HBaseAtlasHook();
+                        ret = new HBaseAtlasHook();
+                        me  = ret;
                     }
                 }
             } catch (Exception e) {
@@ -304,7 +304,6 @@ public class HBaseAtlasHook extends AtlasHook {
         hbaseOperationContext.addMessage(new 
EntityDeleteRequestV2(hbaseOperationContext.getUser(), 
Collections.singletonList(columnFamilyId)));
     }
 
-
     /**
      * Construct the qualified name used to uniquely identify a ColumnFamily 
instance in Atlas.
      *
@@ -430,7 +429,7 @@ public class HBaseAtlasHook extends AtlasHook {
     }
 
     private List<AtlasEntity> buildColumnFamilies(HBaseOperationContext 
hbaseOperationContext, AtlasEntity nameSpace, AtlasEntity table) {
-        List<AtlasEntity>   columnFamilies     = new ArrayList<>();
+        List<AtlasEntity>        columnFamilies          = new ArrayList<>();
         ColumnFamilyDescriptor[] columnFamilyDescriptors = 
hbaseOperationContext.gethColumnDescriptors();
 
         if (columnFamilyDescriptors != null) {
@@ -458,24 +457,24 @@ public class HBaseAtlasHook extends AtlasHook {
         columnFamily.setAttribute(ATTR_OWNER, 
hbaseOperationContext.getOwner());
         columnFamily.setRelationshipAttribute(ATTR_TABLE, 
AtlasTypeUtil.getAtlasRelatedObjectId(table, 
RELATIONSHIP_HBASE_TABLE_COLUMN_FAMILIES));
 
-        if (columnFamilyDescriptor!= null) {
+        if (columnFamilyDescriptor != null) {
             columnFamily.setAttribute(ATTR_CF_BLOCK_CACHE_ENABLED, 
columnFamilyDescriptor.isBlockCacheEnabled());
-            columnFamily.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, 
(columnFamilyDescriptor.getBloomFilterType() != null ? 
columnFamilyDescriptor.getBloomFilterType().name():null));
+            columnFamily.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, 
(columnFamilyDescriptor.getBloomFilterType() != null ? 
columnFamilyDescriptor.getBloomFilterType().name() : null));
             columnFamily.setAttribute(ATTR_CF_CACHED_BLOOM_ON_WRITE, 
columnFamilyDescriptor.isCacheBloomsOnWrite());
             columnFamily.setAttribute(ATTR_CF_CACHED_DATA_ON_WRITE, 
columnFamilyDescriptor.isCacheDataOnWrite());
             columnFamily.setAttribute(ATTR_CF_CACHED_INDEXES_ON_WRITE, 
columnFamilyDescriptor.isCacheIndexesOnWrite());
-            columnFamily.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, 
(columnFamilyDescriptor.getCompactionCompressionType() != null ? 
columnFamilyDescriptor.getCompactionCompressionType().name():null));
-            columnFamily.setAttribute(ATTR_CF_COMPRESSION_TYPE, 
(columnFamilyDescriptor.getCompressionType() != null ? 
columnFamilyDescriptor.getCompressionType().name():null));
-            columnFamily.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, 
(columnFamilyDescriptor.getDataBlockEncoding() != null ? 
columnFamilyDescriptor.getDataBlockEncoding().name():null));
+            columnFamily.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, 
(columnFamilyDescriptor.getCompactionCompressionType() != null ? 
columnFamilyDescriptor.getCompactionCompressionType().name() : null));
+            columnFamily.setAttribute(ATTR_CF_COMPRESSION_TYPE, 
(columnFamilyDescriptor.getCompressionType() != null ? 
columnFamilyDescriptor.getCompressionType().name() : null));
+            columnFamily.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, 
(columnFamilyDescriptor.getDataBlockEncoding() != null ? 
columnFamilyDescriptor.getDataBlockEncoding().name() : null));
             columnFamily.setAttribute(ATTR_CF_ENCRYPTION_TYPE, 
columnFamilyDescriptor.getEncryptionType());
             columnFamily.setAttribute(ATTR_CF_EVICT_BLOCK_ONCLOSE, 
columnFamilyDescriptor.isEvictBlocksOnClose());
-            columnFamily.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, 
(columnFamilyDescriptor.getInMemoryCompaction() != null ? 
columnFamilyDescriptor.getInMemoryCompaction().name():null));
-            columnFamily.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, ( 
columnFamilyDescriptor.getKeepDeletedCells() != null ? 
columnFamilyDescriptor.getKeepDeletedCells().name():null));
+            columnFamily.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, 
(columnFamilyDescriptor.getInMemoryCompaction() != null ? 
columnFamilyDescriptor.getInMemoryCompaction().name() : null));
+            columnFamily.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, 
(columnFamilyDescriptor.getKeepDeletedCells() != null ? 
columnFamilyDescriptor.getKeepDeletedCells().name() : null));
             columnFamily.setAttribute(ATTR_CF_MAX_VERSIONS, 
columnFamilyDescriptor.getMaxVersions());
             columnFamily.setAttribute(ATTR_CF_MIN_VERSIONS, 
columnFamilyDescriptor.getMinVersions());
             columnFamily.setAttribute(ATTR_CF_NEW_VERSION_BEHAVIOR, 
columnFamilyDescriptor.isNewVersionBehavior());
             columnFamily.setAttribute(ATTR_CF_MOB_ENABLED, 
columnFamilyDescriptor.isMobEnabled());
-            columnFamily.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, ( 
columnFamilyDescriptor.getMobCompactPartitionPolicy() != null ? 
columnFamilyDescriptor.getMobCompactPartitionPolicy().name():null));
+            columnFamily.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, 
(columnFamilyDescriptor.getMobCompactPartitionPolicy() != null ? 
columnFamilyDescriptor.getMobCompactPartitionPolicy().name() : null));
             columnFamily.setAttribute(ATTR_CF_PREFETCH_BLOCK_ONOPEN, 
columnFamilyDescriptor.isPrefetchBlocksOnOpen());
             columnFamily.setAttribute(ATTR_CF_STORAGE_POLICY, 
columnFamilyDescriptor.getStoragePolicy());
             columnFamily.setAttribute(ATTR_CF_TTL, 
columnFamilyDescriptor.getTimeToLive());
@@ -520,14 +519,13 @@ public class HBaseAtlasHook extends AtlasHook {
     }
 
     public void sendHBaseNameSpaceOperation(final NamespaceDescriptor 
namespaceDescriptor, final String nameSpace, final OPERATION operation, 
ObserverContext<MasterCoprocessorEnvironment> ctx) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasHook.sendHBaseNameSpaceOperation()");
-        }
+        LOG.debug("==> HBaseAtlasHook.sendHBaseNameSpaceOperation()");
 
         try {
-            final UserGroupInformation ugi  = getUGI(ctx);
-            final User user                 = getActiveUser(ctx);
-            final String userName           = (user != null) ? 
user.getShortName() : null;
+            final UserGroupInformation ugi      = getUGI(ctx);
+            final User                 user     = getActiveUser(ctx);
+            final String               userName = (user != null) ? 
user.getShortName() : null;
+
             HBaseOperationContext hbaseOperationContext = 
handleHBaseNameSpaceOperation(namespaceDescriptor, nameSpace, operation, ugi, 
userName);
 
             sendNotification(hbaseOperationContext);
@@ -535,20 +533,17 @@ public class HBaseAtlasHook extends AtlasHook {
             LOG.error("HBaseAtlasHook.sendHBaseNameSpaceOperation(): failed to 
send notification", t);
         }
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasHook.sendHBaseNameSpaceOperation()");
-        }
+        LOG.debug("<== HBaseAtlasHook.sendHBaseNameSpaceOperation()");
     }
 
     public void sendHBaseTableOperation(TableDescriptor tableDescriptor, final 
TableName tableName, final OPERATION operation, 
ObserverContext<MasterCoprocessorEnvironment> ctx) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasHook.sendHBaseTableOperation()");
-        }
+        LOG.debug("==> HBaseAtlasHook.sendHBaseTableOperation()");
 
         try {
-            final UserGroupInformation ugi  = getUGI(ctx);
-            final User user                 = getActiveUser(ctx);
-            final String userName           = (user != null) ? 
user.getShortName() : null;
+            final UserGroupInformation ugi      = getUGI(ctx);
+            final User                 user     = getActiveUser(ctx);
+            final String               userName = (user != null) ? 
user.getShortName() : null;
+
             HBaseOperationContext hbaseOperationContext = 
handleHBaseTableOperation(tableDescriptor, tableName, operation, ugi, userName);
 
             sendNotification(hbaseOperationContext);
@@ -556,9 +551,7 @@ public class HBaseAtlasHook extends AtlasHook {
             LOG.error("<== HBaseAtlasHook.sendHBaseTableOperation(): failed to 
send notification", t);
         }
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasHook.sendHBaseTableOperation()");
-        }
+        LOG.debug("<== HBaseAtlasHook.sendHBaseTableOperation()");
     }
 
     private void sendNotification(HBaseOperationContext hbaseOperationContext) 
{
@@ -572,37 +565,34 @@ public class HBaseAtlasHook extends AtlasHook {
     }
 
     private HBaseOperationContext 
handleHBaseNameSpaceOperation(NamespaceDescriptor namespaceDescriptor, String 
nameSpace, OPERATION operation, UserGroupInformation ugi, String userName) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasHook.handleHBaseNameSpaceOperation()");
-        }
+        LOG.debug("==> HBaseAtlasHook.handleHBaseNameSpaceOperation()");
 
         HBaseOperationContext hbaseOperationContext = new 
HBaseOperationContext(namespaceDescriptor, nameSpace, operation, ugi, userName, 
userName);
+
         createAtlasInstances(hbaseOperationContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasHook.handleHBaseNameSpaceOperation(): 
{}",  hbaseOperationContext);
-        }
+        LOG.debug("<== HBaseAtlasHook.handleHBaseNameSpaceOperation(): {}",  
hbaseOperationContext);
 
         return hbaseOperationContext;
     }
 
     private HBaseOperationContext handleHBaseTableOperation(TableDescriptor 
tableDescriptor, TableName tableName, OPERATION operation, UserGroupInformation 
ugi, String userName) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasHook.handleHBaseTableOperation()");
-        }
+        LOG.debug("==> HBaseAtlasHook.handleHBaseTableOperation()");
 
-        Map<String, String>  hbaseConf          = null;
-        String               owner              = null;
-        String               tableNameSpace     = null;
-        TableName            hbaseTableName     = null;
-        ColumnFamilyDescriptor[]  columnFamilyDescriptors = null;
+        Map<String, String>      hbaseConf               = null;
+        String                   owner                   = null;
+        String                   tableNameSpace          = null;
+        TableName                hbaseTableName          = null;
+        ColumnFamilyDescriptor[] columnFamilyDescriptors = null;
 
         if (tableDescriptor != null) {
-            owner = tableDescriptor.getOwnerString();
-            hbaseConf = null;
+            owner          = tableDescriptor.getOwnerString();
+            hbaseConf      = null;
             hbaseTableName = tableDescriptor.getTableName();
+
             if (hbaseTableName != null) {
                 tableNameSpace = hbaseTableName.getNamespaceAsString();
+
                 if (tableNameSpace == null) {
                     tableNameSpace = 
hbaseTableName.getNameWithNamespaceInclAsString();
                 }
@@ -618,23 +608,21 @@ public class HBaseAtlasHook extends AtlasHook {
         }
 
         HBaseOperationContext hbaseOperationContext = new 
HBaseOperationContext(tableNameSpace, tableDescriptor, tableName, 
columnFamilyDescriptors, operation, ugi, userName, owner, hbaseConf);
+
         createAtlasInstances(hbaseOperationContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasHook.handleHBaseTableOperation(): {}",  
hbaseOperationContext);
-        }
+        LOG.debug("<== HBaseAtlasHook.handleHBaseTableOperation(): {}",  
hbaseOperationContext);
+
         return hbaseOperationContext;
     }
 
     private HBaseOperationContext 
handleHBaseColumnFamilyOperation(ColumnFamilyDescriptor columnFamilyDescriptor, 
TableName tableName, String columnFamily, OPERATION operation, 
UserGroupInformation ugi, String userName) {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasHook.handleHBaseColumnFamilyOperation()");
-        }
+        LOG.debug("==> HBaseAtlasHook.handleHBaseColumnFamilyOperation()");
 
-        String               owner     = userName;
-        Map<String, String>  hbaseConf = new HashMap<>();
+        String               owner          = userName;
+        Map<String, String>  hbaseConf      = new HashMap<>();
+        String               tableNameSpace = tableName.getNamespaceAsString();
 
-        String tableNameSpace = tableName.getNamespaceAsString();
         if (tableNameSpace == null) {
             tableNameSpace = tableName.getNameWithNamespaceInclAsString();
         }
@@ -644,17 +632,18 @@ public class HBaseAtlasHook extends AtlasHook {
         }
 
         HBaseOperationContext hbaseOperationContext = new 
HBaseOperationContext(tableNameSpace, tableName, columnFamilyDescriptor, 
columnFamily, operation, ugi, userName, owner, hbaseConf);
+
         createAtlasInstances(hbaseOperationContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasHook.handleHBaseColumnFamilyOperation(): 
{}",  hbaseOperationContext);
-        }
+        LOG.debug("<== HBaseAtlasHook.handleHBaseColumnFamilyOperation(): {}", 
 hbaseOperationContext);
+
         return hbaseOperationContext;
     }
 
     private UserGroupInformation getUGI(ObserverContext<?> ctx) {
         UserGroupInformation ugi  = null;
         User                 user = null;
+
         try {
             user = getActiveUser(ctx);
             ugi  = UserGroupInformation.getLoginUser();
@@ -669,10 +658,11 @@ public class HBaseAtlasHook extends AtlasHook {
         }
 
         LOG.info("HBaseAtlasHook: UGI: {}",  ugi);
+
         return ugi;
     }
 
     private User getActiveUser(ObserverContext<?> ctx) throws IOException {
-        return (User)ctx.getCaller().orElse(User.getCurrent());
+        return (User) ctx.getCaller().orElse(User.getCurrent());
     }
 }
diff --git 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
index f5a1d2ad5..3c58f509a 100644
--- 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
+++ 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseBridge.java
@@ -121,14 +121,14 @@ public class HBaseBridge {
     private final AtlasClientV2 atlasClientV2;
     private final Admin         hbaseAdmin;
 
-
     public static void main(String[] args) {
-        int exitCode = EXIT_CODE_FAILED;
-        AtlasClientV2 atlasClientV2  =null;
+        int           exitCode       = EXIT_CODE_FAILED;
+        AtlasClientV2 atlasClientV2  = null;
 
         try {
             Options options = new Options();
-            options.addOption("n","namespace", true, "namespace");
+
+            options.addOption("n", "namespace", true, "namespace");
             options.addOption("t", "table", true, "tablename");
             options.addOption("f", "filename", true, "filename");
 
@@ -141,10 +141,9 @@ public class HBaseBridge {
             String[]          urls              = 
atlasConf.getStringArray(ATLAS_ENDPOINT);
 
             if (urls == null || urls.length == 0) {
-                urls = new String[] { DEFAULT_ATLAS_URL };
+                urls = new String[] {DEFAULT_ATLAS_URL};
             }
 
-
             if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) {
                 String[] basicAuthUsernamePassword = 
AuthenticationUtil.getBasicAuthenticationInput();
 
@@ -164,8 +163,8 @@ public class HBaseBridge {
                     BufferedReader br   = new BufferedReader(new 
FileReader(f));
                     String         line = null;
 
-                    while((line = br.readLine()) != null) {
-                        String val[] = line.split(":");
+                    while ((line = br.readLine()) != null) {
+                        String[] val = line.split(":");
 
                         if (ArrayUtils.isNotEmpty(val)) {
                             namespaceToImport = val[0];
@@ -189,15 +188,15 @@ public class HBaseBridge {
 
                 exitCode = EXIT_CODE_SUCCESS;
             }
-        } catch(ParseException e) {
+        } catch (ParseException e) {
             LOG.error("Failed to parse arguments. Error: ", e.getMessage());
             printUsage();
-        } catch(Exception e) {
+        } catch (Exception e) {
             System.out.println("ImportHBaseEntities failed. Please check the 
log file for the detailed error message");
 
             LOG.error("ImportHBaseEntities failed", e);
-        }finally {
-            if(atlasClientV2!=null) {
+        } finally {
+            if (atlasClientV2 != null) {
                 atlasClientV2.close();
             }
         }
@@ -236,13 +235,16 @@ public class HBaseBridge {
         if (StringUtils.isEmpty(namespaceToImport) && 
StringUtils.isEmpty(tableToImport)) {
             // when both NameSpace and Table options are not present
             importNameSpaceAndTable();
+
             ret = true;
         } else if (StringUtils.isNotEmpty(namespaceToImport)) {
             // When Namespace option is present or both namespace and table 
options are present
             importNameSpaceWithTable(namespaceToImport, tableToImport);
+
             ret = true;
         } else  if (StringUtils.isNotEmpty(tableToImport)) {
             importTable(tableToImport);
+
             ret = true;
         }
 
@@ -330,7 +332,7 @@ public class HBaseBridge {
                 hTableDescriptors = 
getTableDescriptors(matchingNameSpaceDescriptors);
             }
         } else {
-            tableToImport = namespaceToImport +":" + tableToImport;
+            tableToImport = namespaceToImport + ":" + tableToImport;
 
             TableDescriptor[] htds = 
hbaseAdmin.listTables(Pattern.compile(tableToImport));
 
@@ -349,13 +351,13 @@ public class HBaseBridge {
     private List<NamespaceDescriptor> getMatchingNameSpaces(String nameSpace) 
throws Exception {
         List<NamespaceDescriptor> ret                  = new ArrayList<>();
         NamespaceDescriptor[]     namespaceDescriptors = 
hbaseAdmin.listNamespaceDescriptors();
-        Pattern                                pattern = 
Pattern.compile(nameSpace);
+        Pattern                   pattern              = 
Pattern.compile(nameSpace);
 
-        for (NamespaceDescriptor namespaceDescriptor:namespaceDescriptors){
+        for (NamespaceDescriptor namespaceDescriptor : namespaceDescriptors) {
             String  nmSpace = namespaceDescriptor.getName();
             Matcher matcher = pattern.matcher(nmSpace);
 
-            if (matcher.find()){
+            if (matcher.find()) {
                 ret.add(namespaceDescriptor);
             }
         }
@@ -366,7 +368,7 @@ public class HBaseBridge {
     private List<TableDescriptor> 
getTableDescriptors(List<NamespaceDescriptor> namespaceDescriptors) throws 
Exception {
         List<TableDescriptor> ret = new ArrayList<>();
 
-        for(NamespaceDescriptor namespaceDescriptor:namespaceDescriptors) {
+        for (NamespaceDescriptor namespaceDescriptor : namespaceDescriptors) {
             TableDescriptor[] tableDescriptors = 
hbaseAdmin.listTableDescriptorsByNamespace(namespaceDescriptor.getName());
 
             ret.addAll(Arrays.asList(tableDescriptors));
@@ -381,13 +383,13 @@ public class HBaseBridge {
         AtlasEntityWithExtInfo nsEntity        = 
findNameSpaceEntityInAtlas(nsQualifiedName);
 
         if (nsEntity == null) {
-            LOG.info("Importing NameSpace: " + nsQualifiedName);
+            LOG.info("Importing NameSpace: {}", nsQualifiedName);
 
             AtlasEntity entity = getNameSpaceEntity(nsName, null);
 
             nsEntity = createEntityInAtlas(new AtlasEntityWithExtInfo(entity));
         } else {
-            LOG.info("NameSpace already present in Atlas. Updating it..: " + 
nsQualifiedName);
+            LOG.info("NameSpace already present in Atlas. Updating it..: {}", 
nsQualifiedName);
 
             AtlasEntity entity = getNameSpaceEntity(nsName, 
nsEntity.getEntity());
 
@@ -404,13 +406,13 @@ public class HBaseBridge {
         AtlasEntityWithExtInfo ret              = 
findTableEntityInAtlas(tblQualifiedName);
 
         if (ret == null) {
-            LOG.info("Importing Table: " + tblQualifiedName);
+            LOG.info("Importing Table: {}", tblQualifiedName);
 
             AtlasEntity entity = getTableEntity(nameSpace, tableName, owner, 
nameSapceEntity, htd, null);
 
             ret = createEntityInAtlas(new AtlasEntityWithExtInfo(entity));
         } else {
-            LOG.info("Table already present in Atlas. Updating it..: " + 
tblQualifiedName);
+            LOG.info("Table already present in Atlas. Updating it..: {}", 
tblQualifiedName);
 
             AtlasEntity entity = getTableEntity(nameSpace, tableName, owner, 
nameSapceEntity, htd, ret.getEntity());
 
@@ -431,14 +433,15 @@ public class HBaseBridge {
                     
cfIDs.add(AtlasTypeUtil.getAtlasObjectId(cfEntity.getEntity()));
                 }
             }
+
             tableEntity.setRelationshipAttribute(COLUMN_FAMILIES, 
AtlasTypeUtil.getAtlasRelatedObjectIdList(cfIDs, 
HBaseAtlasHook.RELATIONSHIP_HBASE_TABLE_COLUMN_FAMILIES));
         }
 
         return ret;
     }
 
-    protected List<AtlasEntityWithExtInfo> createOrUpdateColumnFamilies(String 
nameSpace, String tableName, String owner, ColumnFamilyDescriptor[] hcdts , 
AtlasEntity tableEntity) throws Exception {
-        List<AtlasEntityWithExtInfo > ret = new ArrayList<>();
+    protected List<AtlasEntityWithExtInfo> createOrUpdateColumnFamilies(String 
nameSpace, String tableName, String owner, ColumnFamilyDescriptor[] hcdts, 
AtlasEntity tableEntity) throws Exception {
+        List<AtlasEntityWithExtInfo> ret = new ArrayList<>();
 
         if (hcdts != null) {
             AtlasObjectId tableId = 
AtlasTypeUtil.getAtlasObjectId(tableEntity);
@@ -449,13 +452,13 @@ public class HBaseBridge {
                 AtlasEntityWithExtInfo cfEntity        = 
findColumnFamiltyEntityInAtlas(cfQualifiedName);
 
                 if (cfEntity == null) {
-                    LOG.info("Importing Column-family: " + cfQualifiedName);
+                    LOG.info("Importing Column-family: {}", cfQualifiedName);
 
                     AtlasEntity entity = getColumnFamilyEntity(nameSpace, 
tableName, owner, columnFamilyDescriptor, tableId, null);
 
                     cfEntity = createEntityInAtlas(new 
AtlasEntityWithExtInfo(entity));
                 } else {
-                    LOG.info("ColumnFamily already present in Atlas. Updating 
it..: " + cfQualifiedName);
+                    LOG.info("ColumnFamily already present in Atlas. Updating 
it..: {}", cfQualifiedName);
 
                     AtlasEntity entity = getColumnFamilyEntity(nameSpace, 
tableName, owner, columnFamilyDescriptor, tableId, cfEntity.getEntity());
 
@@ -476,6 +479,7 @@ public class HBaseBridge {
 
         try {
             ret = findEntityInAtlas(HBaseDataTypes.HBASE_NAMESPACE.getName(), 
nsQualifiedName);
+
             clearRelationshipAttributes(ret);
         } catch (Exception e) {
             ret = null; // entity doesn't exist in Atlas
@@ -489,6 +493,7 @@ public class HBaseBridge {
 
         try {
             ret = findEntityInAtlas(HBaseDataTypes.HBASE_TABLE.getName(), 
tableQualifiedName);
+
             clearRelationshipAttributes(ret);
         } catch (Exception e) {
             ret = null; // entity doesn't exist in Atlas
@@ -502,6 +507,7 @@ public class HBaseBridge {
 
         try {
             ret = 
findEntityInAtlas(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName(), 
columnFamilyQualifiedName);
+
             clearRelationshipAttributes(ret);
         } catch (Exception e) {
             ret = null; // entity doesn't exist in Atlas
@@ -517,7 +523,7 @@ public class HBaseBridge {
     }
 
     private AtlasEntity getNameSpaceEntity(String nameSpace, AtlasEntity 
nsEtity) {
-        AtlasEntity ret  = null ;
+        AtlasEntity ret;
 
         if (nsEtity == null) {
             ret = new AtlasEntity(HBaseDataTypes.HBASE_NAMESPACE.getName());
@@ -536,7 +542,7 @@ public class HBaseBridge {
     }
 
     private AtlasEntity getTableEntity(String nameSpace, String tableName, 
String owner, AtlasEntity nameSpaceEntity, TableDescriptor htd, AtlasEntity 
atlasEntity) {
-        AtlasEntity ret = null;
+        AtlasEntity ret;
 
         if (atlasEntity == null) {
             ret = new AtlasEntity(HBaseDataTypes.HBASE_TABLE.getName());
@@ -563,8 +569,8 @@ public class HBaseBridge {
         return ret;
     }
 
-    private AtlasEntity getColumnFamilyEntity(String nameSpace, String 
tableName, String owner, ColumnFamilyDescriptor hcdt, AtlasObjectId tableId, 
AtlasEntity atlasEntity){
-        AtlasEntity ret = null;
+    private AtlasEntity getColumnFamilyEntity(String nameSpace, String 
tableName, String owner, ColumnFamilyDescriptor hcdt, AtlasObjectId tableId, 
AtlasEntity atlasEntity) {
+        AtlasEntity ret;
 
         if (atlasEntity == null) {
             ret = new 
AtlasEntity(HBaseDataTypes.HBASE_COLUMN_FAMILY.getName());
@@ -582,24 +588,24 @@ public class HBaseBridge {
         ret.setAttribute(DESCRIPTION_ATTR, cfName);
         ret.setAttribute(OWNER, owner);
         ret.setAttribute(ATTR_CF_BLOCK_CACHE_ENABLED, 
hcdt.isBlockCacheEnabled());
-        ret.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, (hcdt.getBloomFilterType() 
!= null ? hcdt.getBloomFilterType().name():null));
+        ret.setAttribute(ATTR_CF_BLOOMFILTER_TYPE, (hcdt.getBloomFilterType() 
!= null ? hcdt.getBloomFilterType().name() : null));
         ret.setAttribute(ATTR_CF_CACHED_BLOOM_ON_WRITE, 
hcdt.isCacheBloomsOnWrite());
         ret.setAttribute(ATTR_CF_CACHED_DATA_ON_WRITE, 
hcdt.isCacheDataOnWrite());
         ret.setAttribute(ATTR_CF_CACHED_INDEXES_ON_WRITE, 
hcdt.isCacheIndexesOnWrite());
-        ret.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, 
(hcdt.getCompactionCompressionType() != null ? 
hcdt.getCompactionCompressionType().name():null));
-        ret.setAttribute(ATTR_CF_COMPRESSION_TYPE, (hcdt.getCompressionType() 
!= null ? hcdt.getCompressionType().name():null));
-        ret.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, 
(hcdt.getDataBlockEncoding() != null ? 
hcdt.getDataBlockEncoding().name():null));
+        ret.setAttribute(ATTR_CF_COMPACTION_COMPRESSION_TYPE, 
(hcdt.getCompactionCompressionType() != null ? 
hcdt.getCompactionCompressionType().name() : null));
+        ret.setAttribute(ATTR_CF_COMPRESSION_TYPE, (hcdt.getCompressionType() 
!= null ? hcdt.getCompressionType().name() : null));
+        ret.setAttribute(ATTR_CF_DATA_BLOCK_ENCODING, 
(hcdt.getDataBlockEncoding() != null ? hcdt.getDataBlockEncoding().name() : 
null));
         ret.setAttribute(ATTR_CF_ENCRYPTION_TYPE, hcdt.getEncryptionType());
         ret.setAttribute(ATTR_CF_EVICT_BLOCK_ONCLOSE, 
hcdt.isEvictBlocksOnClose());
-        ret.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, ( 
hcdt.getKeepDeletedCells() != null ? hcdt.getKeepDeletedCells().name():null));
+        ret.setAttribute(ATTR_CF_KEEP_DELETE_CELLS, 
(hcdt.getKeepDeletedCells() != null ? hcdt.getKeepDeletedCells().name() : 
null));
         ret.setAttribute(ATTR_CF_MAX_VERSIONS, hcdt.getMaxVersions());
         ret.setAttribute(ATTR_CF_MIN_VERSIONS, hcdt.getMinVersions());
         ret.setAttribute(ATTR_CF_PREFETCH_BLOCK_ONOPEN, 
hcdt.isPrefetchBlocksOnOpen());
         ret.setAttribute(ATTR_CF_TTL, hcdt.getTimeToLive());
-        ret.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, 
(hcdt.getInMemoryCompaction() != null ? 
hcdt.getInMemoryCompaction().name():null));
-        ret.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, ( 
hcdt.getMobCompactPartitionPolicy() != null ? 
hcdt.getMobCompactPartitionPolicy().name():null));
-        ret.setAttribute(ATTR_CF_MOB_ENABLED,hcdt.isMobEnabled());
-        
ret.setAttribute(ATTR_CF_NEW_VERSION_BEHAVIOR,hcdt.isNewVersionBehavior());
+        ret.setAttribute(ATTR_CF_INMEMORY_COMPACTION_POLICY, 
(hcdt.getInMemoryCompaction() != null ? hcdt.getInMemoryCompaction().name() : 
null));
+        ret.setAttribute(ATTR_CF_MOB_COMPATCTPARTITION_POLICY, 
(hcdt.getMobCompactPartitionPolicy() != null ? 
hcdt.getMobCompactPartitionPolicy().name() : null));
+        ret.setAttribute(ATTR_CF_MOB_ENABLED, hcdt.isMobEnabled());
+        ret.setAttribute(ATTR_CF_NEW_VERSION_BEHAVIOR, 
hcdt.isNewVersionBehavior());
 
         return ret;
     }
@@ -616,6 +622,7 @@ public class HBaseBridge {
 
             LOG.info("Created {} entity: name={}, guid={}", 
ret.getEntity().getTypeName(), 
ret.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), 
ret.getEntity().getGuid());
         }
+
         return ret;
     }
 
@@ -633,11 +640,13 @@ public class HBaseBridge {
 
                 LOG.info("Updated {} entity: name={}, guid={} ", 
ret.getEntity().getTypeName(), 
ret.getEntity().getAttribute(ATTRIBUTE_QUALIFIED_NAME), 
ret.getEntity().getGuid());
             } else {
-                LOG.info("Entity: name={} ", entity.toString() + " not updated 
as it is unchanged from what is in Atlas" );
+                LOG.info("Entity: name={} not updated as it is unchanged from 
what is in Atlas", entity);
+
                 ret = entity;
             }
         } else {
-            LOG.info("Entity: name={} ", entity.toString() + " not updated as 
it is unchanged from what is in Atlas" );
+            LOG.info("Entity: name={} not updated as it is unchanged from what 
is in Atlas", entity);
+
             ret = entity;
         }
 
@@ -654,6 +663,7 @@ public class HBaseBridge {
      */
     private static String getColumnFamilyQualifiedName(String 
metadataNamespace, String nameSpace, String tableName, String columnFamily) {
         tableName = stripNameSpace(tableName);
+
         return String.format(HBASE_COLUMN_FAMILY_QUALIFIED_NAME_FORMAT, 
nameSpace, tableName, columnFamily, metadataNamespace);
     }
 
@@ -666,6 +676,7 @@ public class HBaseBridge {
      */
     private static String getTableQualifiedName(String metadataNamespace, 
String nameSpace, String tableName) {
         tableName = stripNameSpace(tableName);
+
         return String.format(HBASE_TABLE_QUALIFIED_NAME_FORMAT, nameSpace, 
tableName, metadataNamespace);
     }
 
@@ -679,15 +690,15 @@ public class HBaseBridge {
         return String.format(HBASE_NAMESPACE_QUALIFIED_NAME, nameSpace, 
metadataNamespace);
     }
 
-    private static String stripNameSpace(String tableName){
-        tableName = tableName.substring(tableName.indexOf(":")+1);
+    private static String stripNameSpace(String tableName) {
+        tableName = tableName.substring(tableName.indexOf(":") + 1);
 
         return tableName;
     }
 
     private static void printUsage() {
         System.out.println("Usage 1: import-hbase.sh [-n <namespace regex> OR 
--namespace <namespace regex >] [-t <table regex > OR --table <table regex>]");
-        System.out.println("Usage 2: import-hbase.sh [-f <filename>]" );
+        System.out.println("Usage 2: import-hbase.sh [-f <filename>]");
         System.out.println("   Format:");
         System.out.println("        namespace1:tbl1");
         System.out.println("        namespace1:tbl2");
diff --git 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
index 313132de6..e8037d2de 100644
--- 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
+++ 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/hook/HBaseAtlasCoprocessor.java
@@ -18,15 +18,13 @@
 
 package org.apache.atlas.hbase.hook;
 
-
 import org.apache.atlas.hbase.bridge.HBaseAtlasHook;
-import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.SnapshotDescription;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
-import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@@ -48,87 +46,72 @@ public class HBaseAtlasCoprocessor implements 
MasterCoprocessor, MasterObserver,
 
     @Override
     public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, TableDescriptor tableDescriptor, RegionInfo[] hRegionInfos) 
throws IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postCreateTable()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postCreateTable()");
 
         hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, null, 
HBaseAtlasHook.OPERATION.CREATE_TABLE, observerContext);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postCreateTable()");
-        }
+
+        LOG.debug("<== HBaseAtlasCoprocessor.postCreateTable()");
     }
 
     @Override
     public void postDeleteTable(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, TableName tableName) throws IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postDeleteTable()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postDeleteTable()");
+
         hbaseAtlasHook.sendHBaseTableOperation(null, tableName, 
HBaseAtlasHook.OPERATION.DELETE_TABLE, observerContext);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postDeleteTable()");
     }
 
     @Override
     public void postModifyTable(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, TableName tableName, TableDescriptor tableDescriptor) throws 
IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postModifyTable()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postModifyTable()");
+
         hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, tableName, 
HBaseAtlasHook.OPERATION.ALTER_TABLE, observerContext);
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
-        }
+
+        LOG.debug("<== HBaseAtlasCoprocessor.postModifyTable()");
     }
 
     @Override
     public void 
postCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postCreateNamespace()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postCreateNamespace()");
 
         hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, 
HBaseAtlasHook.OPERATION.CREATE_NAMESPACE, observerContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postCreateNamespace()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postCreateNamespace()");
     }
 
     @Override
     public void 
postDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, String s) throws IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
 
         hbaseAtlasHook.sendHBaseNameSpaceOperation(null, s, 
HBaseAtlasHook.OPERATION.DELETE_NAMESPACE, observerContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
-        }
+        LOG.debug("==> HBaseAtlasCoprocessor.postDeleteNamespace()");
     }
 
     @Override
     public void 
postModifyNamespace(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, NamespaceDescriptor namespaceDescriptor) throws IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postModifyNamespace()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postModifyNamespace()");
 
         hbaseAtlasHook.sendHBaseNameSpaceOperation(namespaceDescriptor, null, 
HBaseAtlasHook.OPERATION.ALTER_NAMESPACE, observerContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postModifyNamespace()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postModifyNamespace()");
     }
 
     @Override
     public void 
postCloneSnapshot(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) 
throws IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postCloneSnapshot()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postCloneSnapshot()");
 
         hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, null, 
HBaseAtlasHook.OPERATION.CREATE_TABLE, observerContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postCloneSnapshot()");
     }
 
     @Override
     public void 
postRestoreSnapshot(ObserverContext<MasterCoprocessorEnvironment> 
observerContext, SnapshotDescription snapshot, TableDescriptor tableDescriptor) 
throws IOException {
-        LOG.info("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
+        LOG.debug("==> HBaseAtlasCoprocessor.postRestoreSnapshot()");
 
         hbaseAtlasHook.sendHBaseTableOperation(tableDescriptor, 
snapshot.getTableName(), HBaseAtlasHook.OPERATION.ALTER_TABLE, observerContext);
 
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()");
-        }
+        LOG.debug("<== HBaseAtlasCoprocessor.postRestoreSnapshot()");
     }
-
 }
-
-
diff --git 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
index 1ef7c07de..63e40f7c4 100644
--- 
a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
+++ 
b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
@@ -21,8 +21,6 @@ package org.apache.atlas.hbase.model;
 import org.apache.atlas.hbase.bridge.HBaseAtlasHook;
 import org.apache.atlas.model.notification.HookNotification;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptor;
@@ -30,7 +28,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 
 import java.util.ArrayList;
 import java.util.List;
-
 import java.util.Map;
 
 public class HBaseOperationContext {
@@ -48,8 +45,7 @@ public class HBaseOperationContext {
     private final ColumnFamilyDescriptor   columnFamilyDescriptor;
 
     public HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, 
String nameSpace, TableDescriptor tableDescriptor, TableName tableName, 
ColumnFamilyDescriptor[] columnFamilyDescriptors,
-                                 ColumnFamilyDescriptor 
columnFamilyDescriptor, String columnFamily, HBaseAtlasHook.OPERATION 
operation, UserGroupInformation ugi , String user, String owner,
-                                 Map<String, String> hbaseConf) {
+                                 ColumnFamilyDescriptor 
columnFamilyDescriptor, String columnFamily, HBaseAtlasHook.OPERATION 
operation, UserGroupInformation ugi, String user, String owner, Map<String, 
String> hbaseConf) {
         this.namespaceDescriptor     = namespaceDescriptor;
         this.nameSpace               = nameSpace;
         this.tableDescriptor         = tableDescriptor;
@@ -64,15 +60,15 @@ public class HBaseOperationContext {
         this.hbaseConf               = hbaseConf;
     }
 
-    public  HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, 
String nameSpace, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi 
, String user, String owner) {
+    public  HBaseOperationContext(NamespaceDescriptor namespaceDescriptor, 
String nameSpace, HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi, 
String user, String owner) {
         this(namespaceDescriptor, nameSpace, null, null, null, null, null, 
operation, ugi, user, owner, null);
     }
 
-    public  HBaseOperationContext(String nameSpace, TableDescriptor 
tableDescriptor, TableName tableName,  ColumnFamilyDescriptor[] 
columnFamilyDescriptors, HBaseAtlasHook.OPERATION operation, 
UserGroupInformation ugi, String user, String owner, Map<String,String> 
hbaseConf) {
+    public  HBaseOperationContext(String nameSpace, TableDescriptor 
tableDescriptor, TableName tableName,  ColumnFamilyDescriptor[] 
columnFamilyDescriptors, HBaseAtlasHook.OPERATION operation, 
UserGroupInformation ugi, String user, String owner, Map<String, String> 
hbaseConf) {
         this(null, nameSpace, tableDescriptor, tableName, 
columnFamilyDescriptors, null, null, operation, ugi, user, owner, hbaseConf);
     }
 
-    public  HBaseOperationContext(String nameSpace, TableName tableName, 
ColumnFamilyDescriptor columnFamilyDescriptor, String columnFamily, 
HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi, String user, 
String owner, Map<String,String> hbaseConf) {
+    public  HBaseOperationContext(String nameSpace, TableName tableName, 
ColumnFamilyDescriptor columnFamilyDescriptor, String columnFamily, 
HBaseAtlasHook.OPERATION operation, UserGroupInformation ugi, String user, 
String owner, Map<String, String> hbaseConf) {
         this(null, nameSpace, null, tableName, null, columnFamilyDescriptor, 
columnFamily, operation, ugi, user, owner, hbaseConf);
     }
 
@@ -137,7 +133,9 @@ public class HBaseOperationContext {
     @Override
     public String toString() {
         StringBuilder sb = new StringBuilder();
+
         toString(sb);
+
         return sb.toString();
     }
 
@@ -145,30 +143,34 @@ public class HBaseOperationContext {
         sb.append("HBaseOperationContext={");
         sb.append("Operation={").append(operation).append("} ");
         sb.append("User ={").append(user).append("} ");
-        if (nameSpace != null ) {
+
+        if (nameSpace != null) {
             sb.append("NameSpace={").append(nameSpace).append("}");
         } else {
             if (namespaceDescriptor != null) {
                 
sb.append("NameSpace={").append(namespaceDescriptor.toString()).append("}");
             }
         }
-        if (tableName != null ) {
+
+        if (tableName != null) {
             sb.append("Table={").append(tableName).append("}");
         } else {
-            if ( columnFamilyDescriptor != null) {
+            if (columnFamilyDescriptor != null) {
                 
sb.append("Table={").append(tableDescriptor.toString()).append("}");
             }
         }
-        if (columnFamily != null ) {
+
+        if (columnFamily != null) {
             sb.append("Columm Family={").append(columnFamily).append("}");
         } else {
-            if ( columnFamilyDescriptor != null) {
+            if (columnFamilyDescriptor != null) {
                 sb.append("Columm 
Family={").append(columnFamilyDescriptor.toString()).append("}");
             }
         }
+
         sb.append("Message ={").append(getMessages()).append("} ");
         sb.append(" }");
+
         return sb;
     }
-
 }
diff --git 
a/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
 
b/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
index e34678871..00d140e0e 100644
--- 
a/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
+++ 
b/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
@@ -28,14 +28,17 @@ import 
org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
 import org.apache.atlas.utils.AuthenticationUtil;
 import org.apache.atlas.utils.ParamChecker;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.testng.Assert;
 import org.testng.annotations.AfterClass;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
@@ -46,21 +49,21 @@ import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 
+import static org.testng.Assert.assertEquals;
 import static org.testng.Assert.assertNotNull;
 import static org.testng.Assert.fail;
 import static org.testng.AssertJUnit.assertFalse;
 
-
 public class HBaseAtlasHookIT {
-    private   static final Logger LOG          = 
LoggerFactory.getLogger(HBaseAtlasHookIT.class);
+    private static final Logger LOG = 
LoggerFactory.getLogger(HBaseAtlasHookIT.class);
+
     protected static final String ATLAS_URL    = "http://localhost:31000/";;
     protected static final String CLUSTER_NAME = "primary";
 
-    private HBaseTestingUtility utility;
+    private HBaseTestingUtility hBaseTestingUtility;
     private int                 port;
     private AtlasClientV2       atlasClient;
 
-
     @BeforeClass
     public void setUp() {
         try {
@@ -74,12 +77,14 @@ public class HBaseAtlasHookIT {
     @AfterClass
     public void cleanup() throws Exception {
         LOG.info("Stopping mini cluster.. ");
-        utility.shutdownMiniCluster();
+
+        hBaseTestingUtility.shutdownMiniCluster();
     }
 
     @Test
     public void testGetMetaTableRows() throws Exception {
-        List<byte[]> results = utility.getMetaTableRows();
+        List<byte[]> results = hBaseTestingUtility.getMetaTableRows();
+
         assertFalse("results should have some entries and is empty.", 
results.isEmpty());
     }
 
@@ -105,9 +110,9 @@ public class HBaseAtlasHookIT {
             AtlasEntityWithExtInfo nameSpaceRef           = 
atlasClient.getEntityByGuid(nameSpace);
             String                 nameSpaceQualifiedName = 
HBaseAtlasHook.getNameSpaceQualifiedName(CLUSTER_NAME, ns.getName());
 
-            
Assert.assertEquals(nameSpaceRef.getEntity().getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
 nameSpaceQualifiedName);
+            
assertEquals(nameSpaceRef.getEntity().getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
 nameSpaceQualifiedName);
         } else {
-            Assert.fail("Unable to create AtlasClient for Testing");
+            fail("Unable to create AtlasClient for Testing");
         }
     }
 
@@ -145,14 +150,13 @@ public class HBaseAtlasHookIT {
             AtlasEntityWithExtInfo tableRef   = 
atlasClient.getEntityByGuid(table);
             String                 entityName = 
HBaseAtlasHook.getTableQualifiedName(CLUSTER_NAME, namespace, tablename);
 
-            
Assert.assertEquals(tableRef.getEntity().getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
 entityName);
+            
assertEquals(tableRef.getEntity().getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
 entityName);
         } else {
-            Assert.fail("Unable to create AtlasClient for Testing");
+            fail("Unable to create AtlasClient for Testing");
         }
     }
 
     // Methods for creating HBase
-
     private void createAtlasClient() {
         try {
             org.apache.commons.configuration.Configuration configuration = 
ApplicationProperties.get();
@@ -161,19 +165,21 @@ public class HBaseAtlasHookIT {
             configuration.setProperty("atlas.cluster.name", CLUSTER_NAME);
 
             if (atlasEndPoint == null || atlasEndPoint.length == 0) {
-                atlasEndPoint = new String[]{ATLAS_URL};
+                atlasEndPoint = new String[] {ATLAS_URL};
             }
 
             Iterator<String> keys = configuration.getKeys();
+
             while (keys.hasNext()) {
                 String key = keys.next();
+
                 LOG.info("{} = {} ", key, configuration.getString(key));
             }
 
             if (AuthenticationUtil.isKerberosAuthenticationEnabled()) {
                 atlasClient = new AtlasClientV2(atlasEndPoint);
             } else {
-                atlasClient = new AtlasClientV2(configuration, atlasEndPoint, 
new String[]{"admin", "admin"});
+                atlasClient = new AtlasClientV2(configuration, atlasEndPoint, 
new String[] {"admin", "admin"});
             }
         } catch (Exception e) {
             LOG.error("Unable to create AtlasClient for Testing ", e);
@@ -192,27 +198,28 @@ public class HBaseAtlasHookIT {
     private void createHBaseCluster() throws Exception {
         LOG.info("Creating Hbase Admin...");
 
-        port    = getFreePort();
-        utility = new HBaseTestingUtility();
+        port                = getFreePort();
+        hBaseTestingUtility = new HBaseTestingUtility();
 
-        
utility.getConfiguration().set("test.hbase.zookeeper.property.clientPort", 
String.valueOf(port));
-        utility.getConfiguration().set("hbase.master.port", 
String.valueOf(getFreePort()));
-        utility.getConfiguration().set("hbase.master.info.port", 
String.valueOf(getFreePort()));
-        utility.getConfiguration().set("hbase.regionserver.port", 
String.valueOf(getFreePort()));
-        utility.getConfiguration().set("hbase.regionserver.info.port", 
String.valueOf(getFreePort()));
-        utility.getConfiguration().set("zookeeper.znode.parent", 
"/hbase-unsecure");
-        utility.getConfiguration().set("hbase.table.sanity.checks", "false");
-        utility.getConfiguration().set("hbase.coprocessor.master.classes", 
"org.apache.atlas.hbase.hook.HBaseAtlasCoprocessor");
+        
hBaseTestingUtility.getConfiguration().set("test.hbase.zookeeper.property.clientPort",
 String.valueOf(port));
+        hBaseTestingUtility.getConfiguration().set("hbase.master.port", 
String.valueOf(getFreePort()));
+        hBaseTestingUtility.getConfiguration().set("hbase.master.info.port", 
String.valueOf(getFreePort()));
+        hBaseTestingUtility.getConfiguration().set("hbase.regionserver.port", 
String.valueOf(getFreePort()));
+        
hBaseTestingUtility.getConfiguration().set("hbase.regionserver.info.port", 
String.valueOf(getFreePort()));
+        hBaseTestingUtility.getConfiguration().set("zookeeper.znode.parent", 
"/hbase-unsecure");
+        
hBaseTestingUtility.getConfiguration().set("hbase.table.sanity.checks", 
"false");
+        
hBaseTestingUtility.getConfiguration().set("hbase.coprocessor.master.classes", 
"org.apache.atlas.hbase.hook.HBaseAtlasCoprocessor");
 
-        utility.startMiniCluster();
+        hBaseTestingUtility.startMiniCluster();
     }
 
-
     public AtlasClientV2 getAtlasClient() {
         AtlasClientV2 ret = null;
+
         if (atlasClient != null) {
             ret = atlasClient;
         }
+
         return ret;
     }
 
@@ -221,12 +228,11 @@ public class HBaseAtlasHookIT {
     }
 
     protected String assertNameSpaceIsRegistered(String nameSpace, 
HBaseAtlasHookIT.AssertPredicate assertPredicate) throws Exception {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Searching for nameSpace {}", nameSpace);
-        }
+        LOG.debug("Searching for nameSpace {}", nameSpace);
+
         String nameSpaceQualifiedName = 
HBaseAtlasHook.getNameSpaceQualifiedName(CLUSTER_NAME, nameSpace);
-        return 
assertEntityIsRegistered(HBaseDataTypes.HBASE_NAMESPACE.getName(), 
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
-                                        nameSpaceQualifiedName, 
assertPredicate);
+
+        return 
assertEntityIsRegistered(HBaseDataTypes.HBASE_NAMESPACE.getName(), 
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, nameSpaceQualifiedName, 
assertPredicate);
     }
 
     protected String assertTableIsRegistered(String nameSpace, String 
tableName) throws Exception {
@@ -234,12 +240,11 @@ public class HBaseAtlasHookIT {
     }
 
     protected String assertTableIsRegistered(String nameSpace, String 
tableName, HBaseAtlasHookIT.AssertPredicate assertPredicate) throws Exception {
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Searching for nameSpace:Table {} {}", nameSpace, 
tableName);
-        }
+        LOG.debug("Searching for nameSpace:Table {} {}", nameSpace, tableName);
+
         String tableQualifiedName = 
HBaseAtlasHook.getTableQualifiedName(CLUSTER_NAME, nameSpace, tableName);
-        return assertEntityIsRegistered(HBaseDataTypes.HBASE_TABLE.getName(), 
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName,
-                                        assertPredicate);
+
+        return assertEntityIsRegistered(HBaseDataTypes.HBASE_TABLE.getName(), 
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName, assertPredicate);
     }
 
     public interface AssertPredicate {
@@ -256,9 +261,7 @@ public class HBaseAtlasHookIT {
         void evaluate() throws Exception;
     }
 
-
-    protected String assertEntityIsRegistered(final String typeName, final 
String property, final String value,
-                                              final 
HBaseAtlasHookIT.AssertPredicate assertPredicate) throws Exception {
+    protected String assertEntityIsRegistered(final String typeName, final 
String property, final String value, final HBaseAtlasHookIT.AssertPredicate 
assertPredicate) throws Exception {
         waitFor(30000, new HBaseAtlasHookIT.Predicate() {
             @Override
             public void evaluate() throws Exception {
@@ -285,6 +288,7 @@ public class HBaseAtlasHookIT {
      */
     protected void waitFor(int timeout, HBaseAtlasHookIT.Predicate predicate) 
throws Exception {
         ParamChecker.notNull(predicate, "predicate");
+
         long mustEnd = System.currentTimeMillis() + timeout;
 
         while (true) {
@@ -295,13 +299,10 @@ public class HBaseAtlasHookIT {
                 if (System.currentTimeMillis() >= mustEnd) {
                     fail("Assertions failed. Failing after waiting for timeout 
" + timeout + " msecs", e);
                 }
-                if (LOG.isDebugEnabled()) {
-                    LOG.debug("Waiting up to {} msec as assertion failed", 
mustEnd - System.currentTimeMillis(), e);
-                }
+
+                LOG.debug("Waiting up to {} msec as assertion failed", mustEnd 
- System.currentTimeMillis(), e);
                 Thread.sleep(5000);
             }
         }
     }
-
-
 }
diff --git a/addons/hbase-testing-util/pom.xml 
b/addons/hbase-testing-util/pom.xml
index 37d6fcb08..50e2dd7b4 100644
--- a/addons/hbase-testing-util/pom.xml
+++ b/addons/hbase-testing-util/pom.xml
@@ -34,6 +34,8 @@
 
     <properties>
         <apache.log4j.version>2.17.1</apache.log4j.version>
+        <checkstyle.failOnViolation>true</checkstyle.failOnViolation>
+        <checkstyle.skip>false</checkstyle.skip>
         <hadoop.version>3.0.3</hadoop.version>
         <jetty.version>9.3.14.v20161028</jetty.version>
     </properties>
diff --git 
a/addons/hbase-testing-util/src/test/java/org/apache/atlas/hbase/TestHBaseTestingUtilSpinup.java
 
b/addons/hbase-testing-util/src/test/java/org/apache/atlas/hbase/TestHBaseTestingUtilSpinup.java
index b870d97ea..4c62eba8e 100644
--- 
a/addons/hbase-testing-util/src/test/java/org/apache/atlas/hbase/TestHBaseTestingUtilSpinup.java
+++ 
b/addons/hbase-testing-util/src/test/java/org/apache/atlas/hbase/TestHBaseTestingUtilSpinup.java
@@ -27,43 +27,43 @@ import java.util.List;
 
 import static org.testng.AssertJUnit.assertFalse;
 
-
 /**
  * Make sure we can spin up a HBTU without a hbase-site.xml
  */
 public class TestHBaseTestingUtilSpinup {
-  private final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+    private final HBaseTestingUtility hBaseTestingUtility = new 
HBaseTestingUtility();
+
+    public TestHBaseTestingUtilSpinup() throws Exception {
+        
hBaseTestingUtility.getConfiguration().set("test.hbase.zookeeper.property.clientPort",
 String.valueOf(getFreePort()));
+        hBaseTestingUtility.getConfiguration().set("hbase.master.port", 
String.valueOf(getFreePort()));
+        hBaseTestingUtility.getConfiguration().set("hbase.master.info.port", 
String.valueOf(getFreePort()));
+        hBaseTestingUtility.getConfiguration().set("hbase.regionserver.port", 
String.valueOf(getFreePort()));
+        
hBaseTestingUtility.getConfiguration().set("hbase.regionserver.info.port", 
String.valueOf(getFreePort()));
+        hBaseTestingUtility.getConfiguration().set("zookeeper.znode.parent", 
"/hbase-unsecure");
+        
hBaseTestingUtility.getConfiguration().set("hbase.table.sanity.checks", 
"false");
+    }
 
-  public TestHBaseTestingUtilSpinup() throws Exception {
-    UTIL.getConfiguration().set("test.hbase.zookeeper.property.clientPort", 
String.valueOf(getFreePort()));
-    UTIL.getConfiguration().set("hbase.master.port", 
String.valueOf(getFreePort()));
-    UTIL.getConfiguration().set("hbase.master.info.port", 
String.valueOf(getFreePort()));
-    UTIL.getConfiguration().set("hbase.regionserver.port", 
String.valueOf(getFreePort()));
-    UTIL.getConfiguration().set("hbase.regionserver.info.port", 
String.valueOf(getFreePort()));
-    UTIL.getConfiguration().set("zookeeper.znode.parent", "/hbase-unsecure");
-    UTIL.getConfiguration().set("hbase.table.sanity.checks", "false");
-  }
+    @Test
+    public void testGetMetaTableRows() throws Exception {
+        try (MiniHBaseCluster miniCluster = 
hBaseTestingUtility.startMiniCluster()) {
+            if 
(!hBaseTestingUtility.getHBaseCluster().waitForActiveAndReadyMaster(30000)) {
+                throw new RuntimeException("Active master not ready");
+            }
 
-  @Test
-  public void testGetMetaTableRows() throws Exception {
-    try (MiniHBaseCluster miniCluster = UTIL.startMiniCluster()) {
-      if (!UTIL.getHBaseCluster().waitForActiveAndReadyMaster(30000)) {
-        throw new RuntimeException("Active master not ready");
-      }
+            List<byte[]> results = hBaseTestingUtility.getMetaTableRows();
 
-      List<byte[]> results = UTIL.getMetaTableRows();
-      assertFalse("results should have some entries and is empty.", 
results.isEmpty());
-    } finally {
-      UTIL.shutdownMiniCluster();
+            assertFalse("results should have some entries and is empty.", 
results.isEmpty());
+        } finally {
+            hBaseTestingUtility.shutdownMiniCluster();
+        }
     }
-  }
 
-  private static int getFreePort() throws IOException {
-    ServerSocket serverSocket = new ServerSocket(0);
-    int          port         = serverSocket.getLocalPort();
+    private static int getFreePort() throws IOException {
+        ServerSocket serverSocket = new ServerSocket(0);
+        int          port         = serverSocket.getLocalPort();
 
-    serverSocket.close();
+        serverSocket.close();
 
-    return port;
-  }
+        return port;
+    }
 }

Reply via email to