This is an automated email from the ASF dual-hosted git repository. shaofengshi pushed a commit to branch 2.5.x in repository https://gitbox.apache.org/repos/asf/kylin.git
commit f3da67746e730b9f69a8bf128854c5c9d2a207a0 Author: Yichen Zhou <zhouy...@gmail.com> AuthorDate: Tue Sep 18 10:30:02 2018 +0800 KYLIN-3543 Export HBase configuration only for Spark engine --- .../java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java index 5e17a4c..37e36ac 100644 --- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java +++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java @@ -52,6 +52,7 @@ import org.apache.kylin.engine.mr.common.AbstractHadoopJob; import org.apache.kylin.engine.mr.common.CubeStatsReader; import org.apache.kylin.engine.mr.common.CuboidShardUtil; import org.apache.kylin.job.exception.ExecuteException; +import org.apache.kylin.metadata.model.IEngineAware; import org.apache.kylin.storage.hbase.HBaseConnection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -118,7 +119,11 @@ public class CreateHTableJob extends AbstractHadoopJob { partitionFilePath.getParent()); CubeHTableUtil.createHTable(cubeSegment, splitKeys); - exportHBaseConfiguration(cubeSegment.getStorageLocationIdentifier()); + + // export configuration in advance to avoid connecting to hbase from spark + if (cubeDesc.getEngineType()== IEngineAware.ID_SPARK){ + exportHBaseConfiguration(cubeSegment.getStorageLocationIdentifier()); + } return 0; }