This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5_beta
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 73f2b6533159b6a3ad200e6cd9778ffcbbeb3f13
Author: Pengfei Zhan <dethr...@gmail.com>
AuthorDate: Mon Apr 10 16:50:35 2023 +0800

    KYLIN-5633 move some layout choosing code to the module of query-common
---
 src/common-booter/src/main/resources/log4j2.xml    |   2 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |  24 +----
 .../kylin/metadata/cube/model/NDataflow.java       |  18 ----
 .../apache/kylin/metadata/model/NDataModel.java    |  14 +++
 .../metadata/realization/CapabilityResult.java     |  20 ++++
 .../metadata/realization/HybridRealization.java    |  64 ------------
 .../kylin/metadata/realization/IRealization.java   |  10 --
 .../src/main/resources/log4j2.xml                  |   2 +-
 ...est.java => DataflowCapabilityCheckerTest.java} |  20 ++--
 .../routing/HeterogeneousSegmentPruningTest.java   |   6 +-
 .../query/routing/QueryLayoutChooserTest.java      |  61 ++++++------
 .../routing/TableIndexAnswerSelectStarTest.java    |   7 +-
 .../query/routing/VacantIndexPruningRuleTest.java  |   3 +-
 src/query-booter/src/main/resources/log4j2.xml     |   2 +-
 .../org/apache/kylin/query/routing/Candidate.java  |  12 +++
 .../query/routing/DataflowCapabilityChecker.java}  | 110 ++++++++++++++-------
 .../kylin/query/routing/QueryLayoutChooser.java}   |  28 +++---
 .../apache/kylin/query/routing/QueryRouter.java    |  23 ++++-
 .../routing/RemoveIncapableRealizationsRule.java   |  30 ++++--
 .../org/apache/kylin/query/schema/OLAPTable.java   |  29 +++---
 .../query/routing/LayoutCandidateSortTest.java     |  37 ++++---
 .../kylin/query/routing/QueryRouterTest.java       |   9 +-
 src/server/src/main/resources/log4j2.xml           |   2 +-
 .../apache/kylin/query/util/RuntimeHelper.scala    |   7 +-
 24 files changed, 269 insertions(+), 271 deletions(-)

diff --git a/src/common-booter/src/main/resources/log4j2.xml 
b/src/common-booter/src/main/resources/log4j2.xml
index 486437182e..9f13899e78 100644
--- a/src/common-booter/src/main/resources/log4j2.xml
+++ b/src/common-booter/src/main/resources/log4j2.xml
@@ -28,7 +28,7 @@
         <Logger name="io.kyligence" level="DEBUG"/>
         <Logger name="org.springframework" level="WARN"/>
         <Logger name="org.apache.kylin" level="DEBUG"/>
-        <Logger 
name="org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser" level="INFO"/>
+        <Logger name="org.apache.kylin.query.routing.NQueryLayoutChooser" 
level="INFO"/>
         <Logger name="org.apache.spark.ui" level="WARN"/>
         <Logger name="org.apache.spark.sql.execution.ui" level="WARN"/>
         <Logger name="org.springframework.security" level="INFO"/>
diff --git 
a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java 
b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index c0797ebc18..b2f8f3d843 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -1304,7 +1304,7 @@ public abstract class KylinConfigBase implements 
Serializable {
     /**
      * Source Name Case Sensitive
      */
-    public Boolean getSourceNameCaseSensitiveEnabled() {
+    public boolean getSourceNameCaseSensitiveEnabled() {
         return 
Boolean.parseBoolean(getOptional("kylin.source.name-case-sensitive-enabled", 
FALSE));
     }
 
@@ -2308,32 +2308,12 @@ public abstract class KylinConfigBase implements 
Serializable {
                         
"org.apache.kylin.query.util.SparkSQLFunctionConverter" });
     }
 
-    public static final String USE_VACANT_INDEXES = "use-vacant-indexes";
-    public static final String USE_TABLE_INDEX_ANSWER_SELECT_STAR = 
"use-table-index-answer-select-star";
-
     public String getQueryIndexMatchRules() {
         return getOptional("kylin.query.index-match-rules", "");
     }
 
-    private Set<String> getPruningRules() {
-        String queryIndexMatchRules = getQueryIndexMatchRules();
-        String[] splitRules = queryIndexMatchRules.split(",");
-        Set<String> configRules = Sets.newHashSet();
-        for (String splitRule : splitRules) {
-            if (StringUtils.isNotBlank(splitRule)) {
-                configRules.add(StringUtils.lowerCase(splitRule));
-            }
-        }
-        return configRules;
-    }
-
-    public boolean isVacantIndexPruningEnabled() {
-        return getPruningRules().contains(KylinConfigBase.USE_VACANT_INDEXES);
-    }
-
     public boolean useTableIndexAnswerSelectStarEnabled() {
-        return 
getPruningRules().contains(KylinConfigBase.USE_TABLE_INDEX_ANSWER_SELECT_STAR)
-                || 
Boolean.parseBoolean(getOptional("kylin.query.use-tableindex-answer-select-star.enabled",
 FALSE));
+        return 
Boolean.parseBoolean(getOptional("kylin.query.use-tableindex-answer-select-star.enabled",
 FALSE));
     }
 
     @ThirdPartyDependencies({
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
index 39c5048f1e..525c45959c 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
+++ 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
@@ -51,11 +51,8 @@ import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.Segments;
 import org.apache.kylin.metadata.model.TableRef;
 import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.metadata.realization.CapabilityResult;
 import org.apache.kylin.metadata.realization.IRealization;
-import org.apache.kylin.metadata.realization.QueryableSeg;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
-import org.apache.kylin.metadata.realization.SQLDigest;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonIgnore;
@@ -206,21 +203,6 @@ public class NDataflow extends RootPersistentEntity 
implements Serializable, IRe
         return NIndexPlanManager.getInstance(config, 
project).getIndexPlan(uuid);
     }
 
-    @Override
-    public CapabilityResult isCapable(SQLDigest digest, List<NDataSegment> 
prunedSegments,
-            Map<String, Set<Long>> chSegToLayoutsMap) {
-        return NDataflowCapabilityChecker.check(this, prunedSegments, digest, 
chSegToLayoutsMap);
-    }
-
-    @Override
-    public CapabilityResult isCapable(SQLDigest digest, QueryableSeg 
queryableSeg) {
-        if (isStreaming()) {
-            return isCapable(digest, queryableSeg.getStreamingSegments(), 
Maps.newHashMap());
-        } else {
-            return isCapable(digest, queryableSeg.getBatchSegments(), 
queryableSeg.getChSegToLayoutsMap());
-        }
-    }
-
     @Override
     public boolean isStreaming() {
         return getModel().isStreaming();
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java
 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java
index 18824457b4..8ecc7fc5a3 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java
+++ 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java
@@ -1488,6 +1488,9 @@ public class NDataModel extends RootPersistentEntity {
         return StringUtils.isNotEmpty(fusionId) && ModelType.HYBRID == 
getModelType();
     }
 
+    /**
+     * Used for checking this model belongs to a fusion model.
+     */
     public boolean isFusionModel() {
         return StringUtils.isNotEmpty(fusionId);
     }
@@ -1496,6 +1499,17 @@ public class NDataModel extends RootPersistentEntity {
         return getModelType() == ModelType.STREAMING || getModelType() == 
ModelType.HYBRID;
     }
 
+    public String getQueryCompatibleFactTable(String factTableOfQuery) {
+        String rootFactTable = this.getRootFactTableName();
+        if (!rootFactTable.equals(factTableOfQuery) && this.isFusionModel() && 
!this.isStreaming()) {
+            NDataModel streamingModel = NDataModelManager
+                    .getInstance(KylinConfig.getInstanceFromEnv(), 
this.getProject())
+                    .getDataModelDesc(this.getFusionId());
+            rootFactTable = streamingModel.getRootFactTableName();
+        }
+        return rootFactTable;
+    }
+
     public boolean isAccessible(boolean turnOnStreaming) {
         return turnOnStreaming || !isStreaming();
     }
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java
 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java
index ddd2f01d67..e21586ec04 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java
+++ 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java
@@ -77,6 +77,26 @@ public class CapabilityResult {
      */
     public List<CapabilityInfluence> influences = 
Lists.newArrayListWithCapacity(1);
 
+    public double getCost(boolean isStreaming) {
+        return isStreaming ? selectedStreamingCandidate.getCost() : 
selectedCandidate.getCost();
+    }
+
+    public void setCandidate(boolean isStreaming, CapabilityResult result) {
+        if (isStreaming) {
+            
setSelectedStreamingCandidate(result.getSelectedStreamingCandidate());
+        } else {
+            setSelectedCandidate(result.getSelectedCandidate());
+        }
+    }
+
+    public void setCandidate(boolean isStreaming, IRealizationCandidate 
candidate) {
+        if (isStreaming) {
+            setSelectedStreamingCandidate(candidate);
+        } else {
+            setSelectedCandidate(candidate);
+        }
+    }
+
     public interface CapabilityInfluence {
         /**
          * Suggest a multiplier to influence query cost
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/HybridRealization.java
 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/HybridRealization.java
index b70691de1a..a74c6acbcd 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/HybridRealization.java
+++ 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/HybridRealization.java
@@ -21,21 +21,16 @@ package org.apache.kylin.metadata.realization;
 import java.util.ArrayList;
 import java.util.LinkedHashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigExt;
 import org.apache.kylin.guava30.shaded.common.collect.Lists;
-import org.apache.kylin.guava30.shaded.common.collect.Maps;
-import org.apache.kylin.metadata.cube.model.NDataSegment;
-import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.IStorageAware;
 import org.apache.kylin.metadata.model.MeasureDesc;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
-import org.apache.kylin.metadata.model.SegmentRange;
 import org.apache.kylin.metadata.model.TblColRef;
 
 import lombok.Getter;
@@ -45,8 +40,6 @@ import lombok.extern.slf4j.Slf4j;
 public class HybridRealization implements IRealization {
 
     public static final String REALIZATION_TYPE = "HYBRID";
-    public static final String HYBRID_CAPABLE_ERROR_MSG = "The fusion model 
can only execute this method separately "
-            + "for the batch model and the stream model it contains.";
 
     @Getter
     private String uuid;
@@ -122,63 +115,6 @@ public class HybridRealization implements IRealization {
         });
     }
 
-    @Override
-    public CapabilityResult isCapable(SQLDigest digest, List<NDataSegment> 
prunedSegments,
-            Map<String, Set<Long>> chSegToLayoutsMap) {
-        throw new IllegalStateException(HYBRID_CAPABLE_ERROR_MSG);
-    }
-
-    public CapabilityResult isCapable(SQLDigest digest, QueryableSeg 
queryableSeg) {
-        CapabilityResult result = new CapabilityResult();
-
-        resolveSegmentsOverlap(queryableSeg.getStreamingSegments());
-        for (IRealization realization : getRealizations()) {
-            CapabilityResult child;
-            if (realization.isStreaming()) {
-                child = realization.isCapable(digest, 
queryableSeg.getStreamingSegments(), Maps.newHashMap());
-                
result.setSelectedStreamingCandidate(child.getSelectedStreamingCandidate());
-                if (child.isCapable()) {
-                    result.setCost(Math.min(result.getCost(), 
child.getSelectedStreamingCandidate().getCost()));
-                }
-            } else {
-                child = realization.isCapable(digest, 
queryableSeg.getBatchSegments(),
-                        queryableSeg.getChSegToLayoutsMap());
-                result.setSelectedCandidate(child.getSelectedCandidate());
-                if (child.isCapable()) {
-                    result.setCost(Math.min(result.getCost(), 
child.getSelectedCandidate().getCost()));
-                }
-            }
-            if (child.isCapable()) {
-                result.setCapable(true);
-                result.influences.addAll(child.influences);
-            } else {
-                result.incapableCause = child.incapableCause;
-            }
-        }
-
-        result.setCost(result.getCost() - 1); // let hybrid win its children
-
-        return result;
-    }
-
-    // Use batch segment when there's overlap of batch and stream segments, 
like follows
-    // batch segments:seg1['2012-01-01', '2012-02-01'], seg2['2012-02-01', 
'2012-03-01'],
-    // stream segments:seg3['2012-02-01', '2012-03-01'], seg4['2012-03-01', 
'2012-04-01']
-    // the chosen segments is: [seg1, seg2, seg4]
-    private void resolveSegmentsOverlap(List<NDataSegment> 
prunedStreamingSegments) {
-        long end = batchRealization.getDateRangeEnd();
-        if (end != Long.MIN_VALUE) {
-            String segments = prunedStreamingSegments.toString();
-            log.info("Before resolve segments overlap between batch and stream 
of fusion model: {}", segments);
-            SegmentRange.BasicSegmentRange range = new 
SegmentRange.KafkaOffsetPartitionedSegmentRange(end,
-                    Long.MAX_VALUE);
-            List<NDataSegment> list = ((NDataflow) 
streamingRealization).getQueryableSegmentsByRange(range);
-            prunedStreamingSegments.removeIf(seg -> !list.contains(seg));
-            segments = prunedStreamingSegments.toString();
-            log.info("After resolve segments overlap between batch and stream 
of fusion model: {}", segments);
-        }
-    }
-
     @Override
     public int getCost() {
         int c = Integer.MAX_VALUE;
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
index 772dfc0b68..0cbe1e0aa9 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
+++ 
b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/IRealization.java
@@ -19,11 +19,9 @@
 package org.apache.kylin.metadata.realization;
 
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.metadata.cube.model.NDataSegment;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.IStorageAware;
 import org.apache.kylin.metadata.model.MeasureDesc;
@@ -32,14 +30,6 @@ import org.apache.kylin.metadata.model.TblColRef;
 
 public interface IRealization extends IStorageAware {
 
-    /**
-     * Given the features of a query, check how capable the realization is to 
answer the query.
-     */
-    CapabilityResult isCapable(SQLDigest digest, List<NDataSegment> 
prunedSegments,
-            Map<String, Set<Long>> chSegToLayoutsMap);
-
-    CapabilityResult isCapable(SQLDigest digest, QueryableSeg queryableSeg);
-
     /**
      * Get whether this specific realization is a cube or InvertedIndex
      */
diff --git a/src/data-loading-booter/src/main/resources/log4j2.xml 
b/src/data-loading-booter/src/main/resources/log4j2.xml
index 486437182e..3db88b28be 100644
--- a/src/data-loading-booter/src/main/resources/log4j2.xml
+++ b/src/data-loading-booter/src/main/resources/log4j2.xml
@@ -28,7 +28,7 @@
         <Logger name="io.kyligence" level="DEBUG"/>
         <Logger name="org.springframework" level="WARN"/>
         <Logger name="org.apache.kylin" level="DEBUG"/>
-        <Logger 
name="org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser" level="INFO"/>
+        <Logger name="org.apache.kylin.query.routing.QueryLayoutChooser" 
level="INFO"/>
         <Logger name="org.apache.spark.ui" level="WARN"/>
         <Logger name="org.apache.spark.sql.execution.ui" level="WARN"/>
         <Logger name="org.springframework.security" level="INFO"/>
diff --git 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/NDataflowCapabilityCheckerTest.java
 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/DataflowCapabilityCheckerTest.java
similarity index 84%
rename from 
src/kylin-it/src/test/java/org/apache/kylin/query/routing/NDataflowCapabilityCheckerTest.java
rename to 
src/kylin-it/src/test/java/org/apache/kylin/query/routing/DataflowCapabilityCheckerTest.java
index 5199633912..0b33b0c382 100644
--- 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/NDataflowCapabilityCheckerTest.java
+++ 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/DataflowCapabilityCheckerTest.java
@@ -23,10 +23,10 @@ import java.util.Map;
 import org.apache.calcite.sql.parser.SqlParseException;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.guava30.shaded.common.collect.Maps;
 import org.apache.kylin.metadata.cube.cuboid.NLookupCandidate;
 import org.apache.kylin.metadata.cube.model.NDataSegment;
 import org.apache.kylin.metadata.cube.model.NDataflow;
-import org.apache.kylin.metadata.cube.model.NDataflowCapabilityChecker;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.cube.model.NDataflowUpdate;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
@@ -36,7 +36,7 @@ import org.apache.kylin.util.OlapContextUtil;
 import org.junit.Assert;
 import org.junit.Test;
 
-public class NDataflowCapabilityCheckerTest extends NLocalWithSparkSessionTest 
{
+public class DataflowCapabilityCheckerTest extends NLocalWithSparkSessionTest {
 
     @Test
     public void testCapabilityResult() throws SqlParseException {
@@ -46,8 +46,8 @@ public class NDataflowCapabilityCheckerTest extends 
NLocalWithSparkSessionTest {
         OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
         Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
         olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-        CapabilityResult result = NDataflowCapabilityChecker.check(dataflow, 
dataflow.getQueryableSegments(),
-                olapContext.getSQLDigest(), null);
+        Candidate candidate = new Candidate(dataflow, olapContext, 
sqlAlias2ModelNameMap);
+        CapabilityResult result = DataflowCapabilityChecker.check(dataflow, 
candidate, olapContext.getSQLDigest());
         Assert.assertNotNull(result);
         Assert.assertEquals(result.getSelectedCandidate().getCost(), 
result.getCost(), 0.001);
     }
@@ -67,8 +67,8 @@ public class NDataflowCapabilityCheckerTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            CapabilityResult result = 
NDataflowCapabilityChecker.check(dataflow, dataflow.getQueryableSegments(),
-                    olapContext.getSQLDigest(), null);
+            Candidate candidate = new Candidate(dataflow, olapContext, 
sqlAlias2ModelNameMap);
+            CapabilityResult result = 
DataflowCapabilityChecker.check(dataflow, candidate, 
olapContext.getSQLDigest());
             Assert.assertNotNull(result);
             Assert.assertTrue(result.getSelectedCandidate() instanceof 
NLookupCandidate);
             
Assert.assertFalse(olapContext.getSQLDigest().allColumns.isEmpty());
@@ -81,8 +81,8 @@ public class NDataflowCapabilityCheckerTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            CapabilityResult result = 
NDataflowCapabilityChecker.check(dataflow, dataflow.getQueryableSegments(),
-                    olapContext.getSQLDigest(), null);
+            Candidate candidate = new Candidate(dataflow, olapContext, 
sqlAlias2ModelNameMap);
+            CapabilityResult result = 
DataflowCapabilityChecker.check(dataflow, candidate, 
olapContext.getSQLDigest());
             Assert.assertNotNull(result);
             Assert.assertTrue(result.getSelectedCandidate() instanceof 
NLookupCandidate);
             
Assert.assertFalse(olapContext.getSQLDigest().allColumns.isEmpty());
@@ -96,8 +96,8 @@ public class NDataflowCapabilityCheckerTest extends 
NLocalWithSparkSessionTest {
             removeAllSegments(dataflow);
             dataflow = 
NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), getProject())
                     .getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
-            CapabilityResult result = 
NDataflowCapabilityChecker.check(dataflow, dataflow.getQueryableSegments(),
-                    olapContext.getSQLDigest(), null);
+            Candidate candidate = new Candidate(dataflow, olapContext, 
Maps.newHashMap());
+            CapabilityResult result = 
DataflowCapabilityChecker.check(dataflow, candidate, 
olapContext.getSQLDigest());
             Assert.assertFalse(result.isCapable());
         }
     }
diff --git 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/HeterogeneousSegmentPruningTest.java
 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/HeterogeneousSegmentPruningTest.java
index 4d46270c35..715591aca2 100644
--- 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/HeterogeneousSegmentPruningTest.java
+++ 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/HeterogeneousSegmentPruningTest.java
@@ -26,7 +26,6 @@ import java.util.stream.Collectors;
 import org.apache.calcite.rex.RexNode;
 import org.apache.calcite.sql.parser.SqlParseException;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.KylinConfigBase;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.Pair;
@@ -614,7 +613,7 @@ public class HeterogeneousSegmentPruningTest extends 
NLocalWithSparkSessionTest
 
         {
             MetadataTestUtils.updateProjectConfig(project, 
"kylin.query.index-match-rules",
-                    KylinConfigBase.USE_VACANT_INDEXES);
+                    QueryRouter.USE_VACANT_INDEXES);
             try (QueryContext queryContext = QueryContext.current()) {
                 OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(project, sql).get(0);
                 StorageContext storageContext = olapContext.storageContext;
@@ -661,8 +660,7 @@ public class HeterogeneousSegmentPruningTest extends 
NLocalWithSparkSessionTest
                 + "where cal_dt between date'2012-01-01' and 
date'2012-01-03'\n" //
                 + "group by cal_dt\n";
 
-        MetadataTestUtils.updateProjectConfig(project, 
"kylin.query.index-match-rules",
-                KylinConfigBase.USE_VACANT_INDEXES);
+        MetadataTestUtils.updateProjectConfig(project, 
"kylin.query.index-match-rules", QueryRouter.USE_VACANT_INDEXES);
         try (QueryContext queryContext = QueryContext.current()) {
             OLAPContext olapContext = OlapContextUtil.getOlapContexts(project, 
sql).get(0);
             StorageContext storageContext = olapContext.storageContext;
diff --git 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/QueryLayoutChooserTest.java
 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/QueryLayoutChooserTest.java
index 42c464a8e4..f767554461 100644
--- 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/QueryLayoutChooserTest.java
+++ 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/QueryLayoutChooserTest.java
@@ -28,7 +28,6 @@ import 
org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
 import org.apache.kylin.guava30.shaded.common.collect.ImmutableList;
 import org.apache.kylin.guava30.shaded.common.collect.Lists;
 import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate;
-import org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
 import org.apache.kylin.metadata.cube.model.LayoutEntity;
 import org.apache.kylin.metadata.cube.model.NDataLayout;
@@ -81,7 +80,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             // model without computedColumns
             String modelWithNoCCId = "abe3bf1a-c4bc-458d-8278-7ea8b00f5e96";
             NDataflow dataflowNoCC = 
dataflowManager.getDataflow(modelWithNoCCId);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflowNoCC,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflowNoCC,
                     dataflowNoCC.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNull(layoutCandidate);
         }
@@ -100,7 +99,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             // model without computedColumns
             String modelWithNoCCId = "abe3bf1a-c4bc-458d-8278-7ea8b00f5e96";
             NDataflow dataflowNoCC = 
dataflowManager.getDataflow(modelWithNoCCId);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflowNoCC,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflowNoCC,
                     dataflowNoCC.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNull(layoutCandidate);
         }
@@ -119,7 +118,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             // model without computedColumns
             String modelWithNoCCId = "abe3bf1a-c4bc-458d-8278-7ea8b00f5e96";
             NDataflow dataflowNoCC = 
dataflowManager.getDataflow(modelWithNoCCId);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflowNoCC,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflowNoCC,
                     dataflowNoCC.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNull(layoutCandidate);
         }
@@ -150,7 +149,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             Assert.assertEquals(10001L, 
layoutCandidate.getLayoutEntity().getId());
@@ -164,7 +163,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertTrue(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -188,7 +187,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             Assert.assertEquals(1010001, 
layoutCandidate.getLayoutEntity().getId());
@@ -200,7 +199,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             Assert.assertEquals(1010002, 
layoutCandidate.getLayoutEntity().getId());
@@ -214,7 +213,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             Assert.assertEquals(1010002, 
layoutCandidate.getLayoutEntity().getId());
@@ -291,7 +290,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
         Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
         olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), olapContext.getSQLDigest(), 
null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(1010001L, 
layoutCandidate.getLayoutEntity().getId());
@@ -359,7 +358,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
         Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
         olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), olapContext.getSQLDigest(), 
null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(20000000001L, 
layoutCandidate.getLayoutEntity().getId());
@@ -387,7 +386,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext oneOlapContext = olapContexts.get(0);
         Map<String, String> oneMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), oneOlapContext);
         oneOlapContext.fixModel(dataflow.getModel(), oneMap);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), 
oneOlapContext.getSQLDigest(), null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(1L, layoutCandidate.getLayoutEntity().getId());
@@ -396,7 +395,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext anotherOlapContext = olapContexts.get(1);
         Map<String, String> anotherMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), anotherOlapContext);
         anotherOlapContext.fixModel(dataflow.getModel(), anotherMap);
-        NLayoutCandidate anotherCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate anotherCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), 
anotherOlapContext.getSQLDigest(), null);
         Assert.assertNotNull(anotherCandidate);
         Assert.assertEquals(1L, anotherCandidate.getLayoutEntity().getId());
@@ -425,7 +424,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext oneOlapContext = olapContexts.get(0);
         Map<String, String> oneMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), oneOlapContext);
         oneOlapContext.fixModel(dataflow.getModel(), oneMap);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), 
oneOlapContext.getSQLDigest(), null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(1L, layoutCandidate.getLayoutEntity().getId());
@@ -434,7 +433,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext anotherOlapContext = olapContexts.get(1);
         Map<String, String> anotherMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), anotherOlapContext);
         anotherOlapContext.fixModel(dataflow.getModel(), anotherMap);
-        NLayoutCandidate anotherCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate anotherCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), 
anotherOlapContext.getSQLDigest(), null);
         Assert.assertNotNull(anotherCandidate);
         Assert.assertEquals(20000000001L, 
anotherCandidate.getLayoutEntity().getId());
@@ -466,7 +465,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             // so 1010001 is skipped, although it has a better dim order
             // 2. trans_id has a higher cardinality, so 1010002 with shard on 
trans_id 
             // is preferred over 1010003 with shard on cal_dt
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             Assert.assertEquals(1010002, 
layoutCandidate.getLayoutEntity().getId());
@@ -478,7 +477,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             Assert.assertEquals(1010003, 
layoutCandidate.getLayoutEntity().getId());
@@ -537,7 +536,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
         Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
         olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), olapContext.getSQLDigest(), 
null);
 
         Assert.assertNotNull(layoutCandidate);
@@ -563,7 +562,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(emptyProject, sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertFalse(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -574,7 +573,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(emptyProject, sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertFalse(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -585,7 +584,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(emptyProject, sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertFalse(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -596,7 +595,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(emptyProject, sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertFalse(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -607,7 +606,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(emptyProject, sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertFalse(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -618,7 +617,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(emptyProject, sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNull(layoutCandidate);
         }
@@ -629,7 +628,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(emptyProject, sql).get(0);
             Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertTrue(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -665,7 +664,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> tableAlias2ModelAliasMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), 
tableAlias2ModelAliasMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertFalse(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -676,7 +675,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> tableAlias2ModelAliasMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), 
tableAlias2ModelAliasMap);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             
Assert.assertTrue(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -694,7 +693,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         NDataflow dataflow = NDataflowManager.getInstance(getTestConfig(), 
project).getDataflow(modelId);
         Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
         olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), olapContext.getSQLDigest(), 
null);
         Assert.assertNotNull(layoutCandidate);
         
Assert.assertTrue(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -717,7 +716,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
 
         Map<String, String> sqlAlias2ModelNameMap = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
         olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelNameMap);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), olapContext.getSQLDigest(), 
null);
         Assert.assertNotNull(layoutCandidate);
         
Assert.assertTrue(layoutCandidate.getLayoutEntity().getIndex().isTableIndex());
@@ -743,7 +742,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
             OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
             Map<String, String> sqlAlias2ModelName = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
             olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelName);
-            NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+            NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                     dataflow.getQueryableSegments(), 
olapContext.getSQLDigest(), null);
             Assert.assertNotNull(layoutCandidate);
             Assert.assertEquals(20000010001L, 
layoutCandidate.getLayoutEntity().getId());
@@ -780,7 +779,7 @@ public class QueryLayoutChooserTest extends 
NLocalWithSparkSessionTest {
         OLAPContext olapContext = 
OlapContextUtil.getOlapContexts(getProject(), sql).get(0);
         Map<String, String> sqlAlias2ModelName = 
OlapContextUtil.matchJoins(dataflow.getModel(), olapContext);
         olapContext.fixModel(dataflow.getModel(), sqlAlias2ModelName);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), olapContext.getSQLDigest(), 
null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(1L, layoutCandidate.getLayoutEntity().getId());
diff --git 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/TableIndexAnswerSelectStarTest.java
 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/TableIndexAnswerSelectStarTest.java
index 9b54de169c..cf46cf7eb0 100644
--- 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/TableIndexAnswerSelectStarTest.java
+++ 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/TableIndexAnswerSelectStarTest.java
@@ -32,7 +32,6 @@ import 
org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
 import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate;
-import org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser;
 import org.apache.kylin.metadata.cube.model.NDataLayout;
 import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
@@ -137,7 +136,7 @@ public class TableIndexAnswerSelectStarTest extends 
NLocalWithSparkSessionTest {
         Assert.assertEquals(dataflow.getAllColumns().size(), 
context.allColumns.size());
         Map<String, String> sqlAlias2ModelName = 
OlapContextUtil.matchJoins(dataflow.getModel(), context);
         context.fixModel(dataflow.getModel(), sqlAlias2ModelName);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), context.getSQLDigest(), null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(20000000001L, 
layoutCandidate.getLayoutEntity().getId());
@@ -153,7 +152,7 @@ public class TableIndexAnswerSelectStarTest extends 
NLocalWithSparkSessionTest {
         Assert.assertEquals(dataflow.getAllColumns().size(), 
context.allColumns.size());
         Map<String, String> sqlAlias2ModelName = 
OlapContextUtil.matchJoins(dataflow.getModel(), context);
         context.fixModel(dataflow.getModel(), sqlAlias2ModelName);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), context.getSQLDigest(), null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(20000010001L, 
layoutCandidate.getLayoutEntity().getId());
@@ -214,7 +213,7 @@ public class TableIndexAnswerSelectStarTest extends 
NLocalWithSparkSessionTest {
 
         Map<String, String> sqlAlias2ModelName = 
OlapContextUtil.matchJoins(dataflow.getModel(), context);
         context.fixModel(dataflow.getModel(), sqlAlias2ModelName);
-        NLayoutCandidate layoutCandidate = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow,
+        NLayoutCandidate layoutCandidate = 
QueryLayoutChooser.selectLayoutCandidate(dataflow,
                 dataflow.getQueryableSegments(), context.getSQLDigest(), null);
         Assert.assertNotNull(layoutCandidate);
         Assert.assertEquals(20000010001L, 
layoutCandidate.getLayoutEntity().getId());
diff --git 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/VacantIndexPruningRuleTest.java
 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/VacantIndexPruningRuleTest.java
index 75ab026e9b..dad29ba967 100644
--- 
a/src/kylin-it/src/test/java/org/apache/kylin/query/routing/VacantIndexPruningRuleTest.java
+++ 
b/src/kylin-it/src/test/java/org/apache/kylin/query/routing/VacantIndexPruningRuleTest.java
@@ -22,7 +22,6 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.calcite.sql.parser.SqlParseException;
-import org.apache.kylin.common.KylinConfigBase;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
 import org.apache.kylin.junit.annotation.MetadataInfo;
@@ -71,7 +70,7 @@ class VacantIndexPruningRuleTest extends 
NLocalWithSparkSessionTest {
                 .forEach(model -> 
cleanAlreadyExistingLayoutsInSegments(model.getId()));
 
         MetadataTestUtils.updateProjectConfig(getProject(), 
"kylin.query.index-match-rules",
-                KylinConfigBase.USE_VACANT_INDEXES);
+                QueryRouter.USE_VACANT_INDEXES);
         try (QueryContext queryContext = QueryContext.current()) {
             String sql = "select max(LO_ORDERDATE) from ssb.lineorder";
             List<OLAPContext> olapContexts = 
OlapContextUtil.getOlapContexts(getProject(), sql);
diff --git a/src/query-booter/src/main/resources/log4j2.xml 
b/src/query-booter/src/main/resources/log4j2.xml
index cd292d4519..f32941c7d9 100644
--- a/src/query-booter/src/main/resources/log4j2.xml
+++ b/src/query-booter/src/main/resources/log4j2.xml
@@ -28,7 +28,7 @@
         <Logger name="io.kyligence" level="DEBUG"/>
         <Logger name="org.springframework" level="WARN"/>
         <Logger name="org.apache.kylin" level="DEBUG"/>
-        <Logger 
name="org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser" level="INFO"/>
+        <Logger name="org.apache.kylin.query.routing.QueryLayoutChooser" 
level="INFO"/>
         <Logger name="org.apache.spark.ui" level="WARN"/>
         <Logger name="org.apache.spark.sql.execution.ui" level="WARN"/>
         <Logger name="org.springframework.security" level="INFO"/>
diff --git 
a/src/query-common/src/main/java/org/apache/kylin/query/routing/Candidate.java 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/Candidate.java
index 3db016ffbf..462dbe9071 100644
--- 
a/src/query-common/src/main/java/org/apache/kylin/query/routing/Candidate.java
+++ 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/Candidate.java
@@ -61,6 +61,18 @@ public class Candidate {
 
     private final QueryableSeg queryableSeg = new QueryableSeg();
 
+    public List<NDataSegment> getPrunedSegments(NDataflow df) {
+        if (df.isStreaming()) {
+            return queryableSeg.getStreamingSegments();
+        } else {
+            return queryableSeg.getBatchSegments();
+        }
+    }
+
+    public Map<String, Set<Long>> getChSegToLayoutsMap(NDataflow df) {
+        return df.isStreaming() ? Maps.newHashMap() : 
queryableSeg.getChSegToLayoutsMap();
+    }
+
     public void setPrunedSegments(Segments<NDataSegment> prunedSegments, 
NDataflow df) {
         if (df.isStreaming()) {
             queryableSeg.setStreamingSegments(prunedSegments);
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflowCapabilityChecker.java
 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/DataflowCapabilityChecker.java
similarity index 51%
rename from 
src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflowCapabilityChecker.java
rename to 
src/query-common/src/main/java/org/apache/kylin/query/routing/DataflowCapabilityChecker.java
index e066bbed45..28a9f94010 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflowCapabilityChecker.java
+++ 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/DataflowCapabilityChecker.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.kylin.metadata.cube.model;
+package org.apache.kylin.query.routing;
 
 import java.util.Collection;
 import java.util.List;
@@ -29,75 +29,73 @@ import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.guava30.shaded.common.collect.Sets;
 import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate;
 import org.apache.kylin.metadata.cube.cuboid.NLookupCandidate;
-import org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.SegmentRange;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.realization.CapabilityResult;
+import org.apache.kylin.metadata.realization.HybridRealization;
+import org.apache.kylin.metadata.realization.IRealization;
 import org.apache.kylin.metadata.realization.IRealizationCandidate;
 import org.apache.kylin.metadata.realization.SQLDigest;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
-public class NDataflowCapabilityChecker {
-    private static final Logger logger = 
LoggerFactory.getLogger(NDataflowCapabilityChecker.class);
+import lombok.extern.slf4j.Slf4j;
 
-    public static CapabilityResult check(NDataflow dataflow, 
List<NDataSegment> prunedSegments, SQLDigest digest,
-            Map<String, Set<Long>> secondStorageSegmentLayoutMap) {
-        logger.info("Matching Layout in dataflow {}, SQL digest {}", dataflow, 
digest);
+@Slf4j
+public class DataflowCapabilityChecker {
+
+    private DataflowCapabilityChecker() {
+    }
+
+    public static CapabilityResult check(NDataflow dataflow, Candidate 
candidate, SQLDigest digest) {
+        log.info("Matching Layout in dataflow {}, SQL digest {}", dataflow, 
digest);
         CapabilityResult result = new CapabilityResult();
         if (digest.limitPrecedesAggr) {
-            logger.info("Exclude NDataflow {} because there's limit preceding 
aggregation", dataflow);
+            log.info("Exclude NDataflow {} because there's limit preceding 
aggregation", dataflow);
             result.incapableCause = CapabilityResult.IncapableCause
                     .create(CapabilityResult.IncapableType.LIMIT_PRECEDE_AGGR);
             return result;
         }
 
         // 1. match joins is ensured at model select
-        String rootFactTable = dataflow.getModel().getRootFactTableName();
-        NDataModel model = dataflow.getModel();
-        if (!rootFactTable.equals(digest.factTable) && model.isFusionModel() 
&& !dataflow.isStreaming()) {
-            NDataModel streamingModel = NDataModelManager
-                    .getInstance(KylinConfig.getInstanceFromEnv(), 
dataflow.getProject())
-                    .getDataModelDesc(model.getFusionId());
-            rootFactTable = streamingModel.getRootFactTableName();
-        }
+        String factTableOfQuery = digest.factTable;
+        String modelFactTable = 
dataflow.getModel().getQueryCompatibleFactTable(factTableOfQuery);
         IRealizationCandidate chosenCandidate = null;
-        if (digest.joinDescs.isEmpty() && 
!rootFactTable.equals(digest.factTable)) {
-            logger.trace("Snapshot dataflow matching");
+        if (digest.joinDescs.isEmpty() && 
!modelFactTable.equals(factTableOfQuery)) {
+            log.trace("Snapshot dataflow matching");
             chosenCandidate = tryMatchLookup(dataflow, digest, result);
             if (chosenCandidate != null) {
-                logger.info("Matched table {} snapshot in dataflow {} ", 
digest.factTable, dataflow);
+                log.info("Matched table {} snapshot in dataflow {} ", 
factTableOfQuery, dataflow);
             }
         } else {
             // for query-on-fact-table
-            logger.trace("Normal dataflow matching");
-            NLayoutCandidate candidateAndInfluence = 
NQueryLayoutChooser.selectLayoutCandidate(dataflow, prunedSegments,
+            log.trace("Normal dataflow matching");
+            List<NDataSegment> prunedSegments = 
candidate.getPrunedSegments(dataflow);
+            Map<String, Set<Long>> secondStorageSegmentLayoutMap = 
candidate.getChSegToLayoutsMap(dataflow);
+            NLayoutCandidate candidateAndInfluence = 
QueryLayoutChooser.selectLayoutCandidate(dataflow, prunedSegments,
                     digest, secondStorageSegmentLayoutMap);
             if (candidateAndInfluence == null && 
QueryContext.current().isPartialMatchIndex()) {
                 // This branch is customized requirements
-                logger.trace("Partial dataflow matching");
-                candidateAndInfluence = 
NQueryLayoutChooser.selectPartialLayoutCandidate(dataflow, prunedSegments,
+                log.trace("Partial dataflow matching");
+                candidateAndInfluence = 
QueryLayoutChooser.selectPartialLayoutCandidate(dataflow, prunedSegments,
                         digest, secondStorageSegmentLayoutMap);
             } else if (candidateAndInfluence == null) {
-                logger.debug("select the layout candidate with high data 
integrity.");
-                candidateAndInfluence = 
NQueryLayoutChooser.selectHighIntegrityCandidate(dataflow, prunedSegments,
+                log.debug("select the layout candidate with high data 
integrity.");
+                candidateAndInfluence = 
QueryLayoutChooser.selectHighIntegrityCandidate(dataflow, prunedSegments,
                         digest);
             }
             if (candidateAndInfluence != null) {
                 chosenCandidate = candidateAndInfluence;
                 
result.influences.addAll(candidateAndInfluence.getCapabilityResult().influences);
-                logger.info("Matched layout {} snapshot in dataflow {} ", 
chosenCandidate, dataflow);
+                log.info("Matched layout {} snapshot in dataflow {} ", 
chosenCandidate, dataflow);
             }
         }
         if (chosenCandidate != null) {
             result.setCapable(true);
-            if (dataflow.isStreaming()) {
-                result.setSelectedStreamingCandidate(chosenCandidate);
-            } else {
-                result.setSelectedCandidate(chosenCandidate);
-            }
+            result.setCandidate(dataflow.isStreaming(), chosenCandidate);
             result.setCost(chosenCandidate.getCost());
         } else {
             result.setCapable(false);
@@ -113,7 +111,7 @@ public class NDataflowCapabilityChecker {
             return null;
 
         if 
(StringUtils.isEmpty(nTableMetadataManager.getTableDesc(digest.factTable).getLastSnapshotPath()))
 {
-            logger.info("Exclude NDataflow {} because snapshot of table {} 
does not exist", dataflow, digest.factTable);
+            log.info("Exclude NDataflow {} because snapshot of table {} does 
not exist", dataflow, digest.factTable);
             result.incapableCause = CapabilityResult.IncapableCause
                     .create(CapabilityResult.IncapableType.NOT_EXIST_SNAPSHOT);
             result.setCapable(false);
@@ -133,11 +131,53 @@ public class NDataflowCapabilityChecker {
         }
 
         if (!unmatchedCols.isEmpty()) {
-            logger.info("Exclude NDataflow {} because unmatched dimensions 
[{}] in Snapshot", dataflow, unmatchedCols);
+            log.info("Exclude NDataflow {} because unmatched dimensions [{}] 
in Snapshot", dataflow, unmatchedCols);
             result.incapableCause = 
CapabilityResult.IncapableCause.unmatchedDimensions(unmatchedCols);
             return null;
         } else {
             return new NLookupCandidate(digest.factTable, true);
         }
     }
+
+    public static CapabilityResult hybridRealizationCheck(HybridRealization r, 
Candidate candidate, SQLDigest digest) {
+        CapabilityResult result = new CapabilityResult();
+
+        resolveSegmentsOverlap(r, 
candidate.getQueryableSeg().getStreamingSegments());
+        for (IRealization realization : r.getRealizations()) {
+            NDataflow df = (NDataflow) realization;
+            CapabilityResult child = DataflowCapabilityChecker.check(df, 
candidate, digest);
+            result.setCandidate(df.isStreaming(), child);
+            if (child.isCapable()) {
+                result.setCost(Math.min(result.getCost(), 
child.getCost(df.isStreaming())));
+                result.setCapable(true);
+                result.influences.addAll(child.influences);
+            } else {
+                result.incapableCause = child.incapableCause;
+            }
+        }
+
+        result.setCost(result.getCost() - 1); // let hybrid win its children
+
+        return result;
+    }
+
+    // Use batch segment when there's overlap of batch and stream segments, 
like follows
+    // batch segments:seg1['2012-01-01', '2012-02-01'], seg2['2012-02-01', 
'2012-03-01'],
+    // stream segments:seg3['2012-02-01', '2012-03-01'], seg4['2012-03-01', 
'2012-04-01']
+    // the chosen segments is: [seg1, seg2, seg4]
+    private static void resolveSegmentsOverlap(HybridRealization realization,
+            List<NDataSegment> prunedStreamingSegments) {
+        long end = realization.getBatchRealization().getDateRangeEnd();
+        if (end != Long.MIN_VALUE) {
+            String segments = prunedStreamingSegments.toString();
+            log.info("Before resolve segments overlap between batch and stream 
of fusion model: {}", segments);
+            SegmentRange.BasicSegmentRange range = new 
SegmentRange.KafkaOffsetPartitionedSegmentRange(end,
+                    Long.MAX_VALUE);
+            List<NDataSegment> list = ((NDataflow) 
realization.getStreamingRealization())
+                    .getQueryableSegmentsByRange(range);
+            prunedStreamingSegments.removeIf(seg -> !list.contains(seg));
+            segments = prunedStreamingSegments.toString();
+            log.info("After resolve segments overlap between batch and stream 
of fusion model: {}", segments);
+        }
+    }
 }
diff --git 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/NQueryLayoutChooser.java
 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/QueryLayoutChooser.java
similarity index 95%
rename from 
src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/NQueryLayoutChooser.java
rename to 
src/query-common/src/main/java/org/apache/kylin/query/routing/QueryLayoutChooser.java
index 4b28ad7c7a..77fea3c143 100644
--- 
a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/NQueryLayoutChooser.java
+++ 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/QueryLayoutChooser.java
@@ -16,7 +16,7 @@
  * limitations under the License.
  */
 
-package org.apache.kylin.metadata.cube.cuboid;
+package org.apache.kylin.query.routing;
 
 import java.util.Collection;
 import java.util.Comparator;
@@ -35,6 +35,10 @@ import org.apache.kylin.guava30.shaded.common.collect.Lists;
 import org.apache.kylin.guava30.shaded.common.collect.Maps;
 import org.apache.kylin.guava30.shaded.common.collect.Ordering;
 import org.apache.kylin.guava30.shaded.common.collect.Sets;
+import org.apache.kylin.metadata.cube.cuboid.ChooserContext;
+import org.apache.kylin.metadata.cube.cuboid.ComparatorUtils;
+import org.apache.kylin.metadata.cube.cuboid.IndexMatcher;
+import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
 import org.apache.kylin.metadata.cube.model.LayoutEntity;
 import org.apache.kylin.metadata.cube.model.NDataLayout;
@@ -52,9 +56,9 @@ import lombok.var;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
-public class NQueryLayoutChooser {
+public class QueryLayoutChooser {
 
-    private NQueryLayoutChooser() {
+    private QueryLayoutChooser() {
     }
 
     public static NLayoutCandidate selectPartialLayoutCandidate(NDataflow 
dataflow, List<NDataSegment> prunedSegments,
@@ -66,10 +70,10 @@ public class NQueryLayoutChooser {
             if (candidate == null) {
                 candidate = selectLayoutCandidate(dataflow, 
Lists.newArrayList(segment), sqlDigest,
                         chSegmentToLayoutsMap);
-                if (candidate == null) {
-                    toRemovedSegments.add(segment);
-                }
-            } else if 
(segment.getSegDetails().getLayoutById(candidate.getLayoutEntity().getId()) == 
null) {
+            }
+
+            long layoutId = candidate == null ? -1L : 
candidate.getLayoutEntity().getId();
+            if (segment.getSegDetails().getLayoutById(layoutId) == null) {
                 toRemovedSegments.add(segment);
             }
         }
@@ -110,7 +114,7 @@ public class NQueryLayoutChooser {
             }
 
             if (!matchResult.isMatched()) {
-                log.trace("The [{}] cannot match with the {}", 
chooserContext.sqlDigest.toString(), layout);
+                log.trace("The [{}] cannot match with the {}", 
chooserContext.getSqlDigest().toString(), layout);
                 continue;
             }
 
@@ -137,7 +141,7 @@ public class NQueryLayoutChooser {
     private static long[] calcSegRangeAndMaxEnd(ChooserContext chooserContext, 
NDataflow df,
             List<NDataLayout> dataLayouts) {
         long[] rangeAndLatest = new long[2];
-        if (!chooserContext.getKylinConfig().isVacantIndexPruningEnabled()) {
+        if 
(!QueryRouter.isVacantIndexPruningEnabled(chooserContext.getKylinConfig())) {
             return rangeAndLatest;
         }
         List<String> segmentNameList = Lists.newArrayList();
@@ -155,7 +159,7 @@ public class NQueryLayoutChooser {
 
     public static NLayoutCandidate selectHighIntegrityCandidate(NDataflow 
dataflow, List<NDataSegment> prunedSegments,
             SQLDigest digest) {
-        if 
(!NProjectManager.getProjectConfig(dataflow.getProject()).isVacantIndexPruningEnabled())
 {
+        if 
(!QueryRouter.isVacantIndexPruningEnabled(NProjectManager.getProjectConfig(dataflow.getProject())))
 {
             return null;
         }
         if (CollectionUtils.isEmpty(prunedSegments)) {
@@ -176,7 +180,7 @@ public class NQueryLayoutChooser {
             });
         }
 
-        List<NLayoutCandidate> allLayoutCandidates = 
NQueryLayoutChooser.collectAllLayoutCandidates(dataflow,
+        List<NLayoutCandidate> allLayoutCandidates = 
QueryLayoutChooser.collectAllLayoutCandidates(dataflow,
                 chooserContext, idToDataLayoutsMap);
         return chooseBestLayoutCandidate(dataflow, digest, chooserContext, 
allLayoutCandidates,
                 "selectHighIntegrityCandidate");
@@ -238,7 +242,7 @@ public class NQueryLayoutChooser {
             SQLDigest sqlDigest) {
         List<Integer> filterColIds = getFilterColIds(chooserContext, 
sqlDigest);
         List<Integer> nonFilterColIds = getNonFilterColIds(chooserContext, 
sqlDigest);
-        Ordering<NLayoutCandidate> ordering = 
chooserContext.getKylinConfig().isVacantIndexPruningEnabled()
+        Ordering<NLayoutCandidate> ordering = 
QueryRouter.isVacantIndexPruningEnabled(chooserContext.getKylinConfig())
                 ? getEnhancedSorter(filterColIds, nonFilterColIds)
                 : getDefaultSorter(filterColIds, nonFilterColIds);
         candidates.sort(ordering);
diff --git 
a/src/query-common/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
index 3222c8dec6..153da8540b 100644
--- 
a/src/query-common/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
+++ 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/QueryRouter.java
@@ -20,19 +20,40 @@ package org.apache.kylin.query.routing;
 
 import java.util.Comparator;
 import java.util.List;
+import java.util.Set;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.guava30.shaded.common.collect.Lists;
 import org.apache.kylin.guava30.shaded.common.collect.Ordering;
+import org.apache.kylin.guava30.shaded.common.collect.Sets;
 import org.apache.kylin.metadata.project.NProjectManager;
 
 import lombok.Getter;
 
 public class QueryRouter {
 
+    public static final String USE_VACANT_INDEXES = "use-vacant-indexes";
+
     private QueryRouter() {
     }
 
+    private static Set<String> getPruningRules(KylinConfig config) {
+        String queryIndexMatchRules = config.getQueryIndexMatchRules();
+        String[] splitRules = queryIndexMatchRules.split(",");
+        Set<String> configRules = Sets.newHashSet();
+        for (String splitRule : splitRules) {
+            if (StringUtils.isNotBlank(splitRule)) {
+                configRules.add(StringUtils.lowerCase(splitRule.trim()));
+            }
+        }
+        return configRules;
+    }
+
+    public static boolean isVacantIndexPruningEnabled(KylinConfig config) {
+        return getPruningRules(config).contains(USE_VACANT_INDEXES);
+    }
+
     public static void applyRules(Candidate candidate) {
         Strategy pruningStrategy = 
getStrategy(candidate.getCtx().olapSchema.getProjectName());
         for (PruningRule r : pruningStrategy.getRules()) {
@@ -70,7 +91,7 @@ public class QueryRouter {
             rules.add(SEGMENT_PRUNING);
             rules.add(PARTITION_PRUNING);
             rules.add(REMOVE_INCAPABLE_REALIZATIONS);
-            if (config.isVacantIndexPruningEnabled()) {
+            if (QueryRouter.isVacantIndexPruningEnabled(config)) {
                 rules.add(VACANT_INDEX_PRUNING);
             }
 
diff --git 
a/src/query-common/src/main/java/org/apache/kylin/query/routing/RemoveIncapableRealizationsRule.java
 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/RemoveIncapableRealizationsRule.java
index 29886fb4aa..188d53e567 100644
--- 
a/src/query-common/src/main/java/org/apache/kylin/query/routing/RemoveIncapableRealizationsRule.java
+++ 
b/src/query-common/src/main/java/org/apache/kylin/query/routing/RemoveIncapableRealizationsRule.java
@@ -20,7 +20,11 @@ package org.apache.kylin.query.routing;
 
 import org.apache.kylin.guava30.shaded.common.collect.BiMap;
 import org.apache.kylin.guava30.shaded.common.collect.HashBiMap;
+import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.realization.CapabilityResult;
+import org.apache.kylin.metadata.realization.HybridRealization;
+import org.apache.kylin.metadata.realization.IRealization;
+import org.apache.kylin.metadata.realization.SQLDigest;
 import org.apache.kylin.query.util.ComputedColumnRewriter;
 import org.apache.kylin.query.util.QueryAliasMatchInfo;
 
@@ -36,23 +40,33 @@ public class RemoveIncapableRealizationsRule extends 
PruningRule {
             return;
         }
         candidate.getCtx().resetSQLDigest();
-        CapabilityResult capability = 
candidate.getRealization().isCapable(candidate.getCtx().getSQLDigest(),
-                candidate.getQueryableSeg());
+        CapabilityResult capability = getCapabilityResult(candidate);
 
-        if (!capability.isCapable() && 
!candidate.getRealization().getModel().getComputedColumnDescs().isEmpty()) {
+        IRealization realization = candidate.getRealization();
+        if (!capability.isCapable() && 
!realization.getModel().getComputedColumnDescs().isEmpty()) {
             log.info("{}({}/{}): try rewrite computed column and then check 
whether the realization is capable.",
-                    this.getClass().getName(), 
candidate.getRealization().getProject(),
-                    candidate.getRealization().getCanonicalName());
+                    this.getClass().getName(), realization.getProject(), 
realization.getCanonicalName());
             BiMap<String, String> aliasMapping = HashBiMap.create();
             aliasMapping.putAll(candidate.getMatchedJoinsGraphAliasMap());
-            ComputedColumnRewriter.rewriteCcInnerCol(candidate.getCtx(), 
candidate.getRealization().getModel(),
+            ComputedColumnRewriter.rewriteCcInnerCol(candidate.getCtx(), 
realization.getModel(),
                     new QueryAliasMatchInfo(aliasMapping, null));
             candidate.getCtx().resetSQLDigest();
-            capability = 
candidate.getRealization().isCapable(candidate.getCtx().getSQLDigest(),
-                    candidate.getQueryableSeg());
+            capability = getCapabilityResult(candidate);
         }
 
         candidate.setCapability(capability);
     }
 
+    private CapabilityResult getCapabilityResult(Candidate candidate) {
+        IRealization realization = candidate.getRealization();
+        SQLDigest sqlDigest = candidate.getCtx().getSQLDigest();
+        CapabilityResult capability;
+        if (realization instanceof HybridRealization) {
+            capability = 
DataflowCapabilityChecker.hybridRealizationCheck((HybridRealization) 
realization, candidate,
+                    sqlDigest);
+        } else {
+            capability = DataflowCapabilityChecker.check((NDataflow) 
realization, candidate, sqlDigest);
+        }
+        return capability;
+    }
 }
diff --git 
a/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java 
b/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
index 2641f9eab9..01860665d0 100644
--- 
a/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
+++ 
b/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
@@ -55,6 +55,8 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.util.CollectionUtil;
+import org.apache.kylin.guava30.shaded.common.collect.Iterables;
+import org.apache.kylin.guava30.shaded.common.collect.Lists;
 import org.apache.kylin.measure.topn.TopNMeasureType;
 import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
@@ -67,7 +69,6 @@ import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.util.ComputedColumnUtil;
 import org.apache.kylin.metadata.project.NProjectManager;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.query.QueryExtension;
 import org.apache.kylin.query.enumerator.OLAPQuery;
@@ -76,9 +77,6 @@ import org.apache.kylin.rest.constant.Constant;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import org.apache.kylin.guava30.shaded.common.collect.Iterables;
-import org.apache.kylin.guava30.shaded.common.collect.Lists;
-
 import lombok.val;
 
 /**
@@ -194,25 +192,23 @@ public class OLAPTable extends AbstractQueryableTable 
implements TranslatableTab
         return this.rowType;
     }
 
-    @SuppressWarnings("deprecation")
     private RelDataType deriveRowType(RelDataTypeFactory typeFactory) {
-        //TODO Add Fluid API to build a list of fields, TypeFactory.Builder
         KylinRelDataTypeFactoryImpl kylinRelDataTypeFactory = new 
KylinRelDataTypeFactoryImpl(typeFactory);
         List<String> fieldNameList = Lists.newArrayList();
         List<RelDataType> typeList = Lists.newArrayList();
         List<KylinRelDataTypeFieldImpl.ColumnType> colTypes = 
Lists.newArrayList();
+        KylinConfig config = this.sourceTable != null //
+                ? 
NProjectManager.getProjectConfig(this.sourceTable.getProject())
+                : KylinConfig.getInstanceFromEnv();
         for (ColumnDesc column : sourceColumns) {
             RelDataType sqlType = createSqlType(kylinRelDataTypeFactory, 
column.getUpgradedType(), column.isNullable());
             sqlType = SqlTypeUtil.addCharsetAndCollation(sqlType, 
kylinRelDataTypeFactory);
             typeList.add(sqlType);
-            String project = this.sourceTable != null ? 
this.sourceTable.getProject() : null;
-            KylinConfig projectKylinConfig = StringUtils.isNotEmpty(project)
-                    ? 
NProjectManager.getInstance(KylinConfig.getInstanceFromEnv()).getProject(project).getConfig()
-                    : KylinConfig.getInstanceFromEnv();
-            String columnName = 
projectKylinConfig.getSourceNameCaseSensitiveEnabled()
-                    ? StringUtils.isNotEmpty(column.getCaseSensitiveName()) ? 
column.getCaseSensitiveName()
-                            : column.getName()
-                    : column.getName();
+
+            String columnName = config.getSourceNameCaseSensitiveEnabled()
+                    && StringUtils.isNotBlank(column.getCaseSensitiveName()) //
+                            ? column.getCaseSensitiveName()
+                            : column.getName();
             if (column.isComputedColumn()) {
                 fieldNameList.add(columnName);
                 colTypes.add(KylinRelDataTypeFieldImpl.ColumnType.CC_FIELD);
@@ -260,11 +256,10 @@ public class OLAPTable extends AbstractQueryableTable 
implements TranslatableTab
             return allColumns;
         }
 
-        ProjectInstance projectInstance = 
NProjectManager.getInstance(olapSchema.getConfig())
-                .getProject(sourceTable.getProject());
+        KylinConfig projectConfig = 
NProjectManager.getProjectConfig(sourceTable.getProject());
         NDataflowManager dataflowManager = 
NDataflowManager.getInstance(olapSchema.getConfig(),
                 sourceTable.getProject());
-        if 
(projectInstance.getConfig().useTableIndexAnswerSelectStarEnabled()) {
+        if (projectConfig.useTableIndexAnswerSelectStarEnabled()) {
             Set<ColumnDesc> exposeColumnDescSet = new HashSet<>();
             String tableName = sourceTable.getIdentity();
             List<NDataModel> modelList = modelsMap.get(tableName);
diff --git 
a/src/query/src/test/java/org/apache/kylin/query/routing/LayoutCandidateSortTest.java
 
b/src/query/src/test/java/org/apache/kylin/query/routing/LayoutCandidateSortTest.java
index 0666ef3a72..3f7e1c7e76 100644
--- 
a/src/query/src/test/java/org/apache/kylin/query/routing/LayoutCandidateSortTest.java
+++ 
b/src/query/src/test/java/org/apache/kylin/query/routing/LayoutCandidateSortTest.java
@@ -30,7 +30,6 @@ import org.apache.kylin.guava30.shaded.common.collect.Lists;
 import org.apache.kylin.guava30.shaded.common.collect.Maps;
 import org.apache.kylin.junit.annotation.MetadataInfo;
 import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate;
-import org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
 import org.apache.kylin.metadata.cube.model.LayoutEntity;
 import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
@@ -51,7 +50,7 @@ class LayoutCandidateSortTest {
     void testPreferAggComparator() {
         MockEntity mock1 = new MockEntity(IndexEntity.TABLE_INDEX_START_ID + 
1, ImmutableList.of(1), 2);
         MockEntity mock2 = new MockEntity(1L, ImmutableList.of(1), 3);
-        assertSortedResult(1L, NQueryLayoutChooser.preferAggComparator(), 
mock1, mock2);
+        assertSortedResult(1L, QueryLayoutChooser.preferAggComparator(), 
mock1, mock2);
     }
 
     @Test
@@ -59,7 +58,7 @@ class LayoutCandidateSortTest {
         MockEntity mock1 = new MockEntity(IndexEntity.TABLE_INDEX_START_ID + 
1, ImmutableList.of(1, 2), 5000, 2000);
         MockEntity mock2 = new MockEntity(IndexEntity.TABLE_INDEX_START_ID + 
IndexEntity.INDEX_ID_STEP + 1,
                 ImmutableList.of(1, 3), 2000, 2000);
-        assertSortedResult(IndexEntity.TABLE_INDEX_START_ID + 1, 
NQueryLayoutChooser.segmentRangeComparator(), mock1,
+        assertSortedResult(IndexEntity.TABLE_INDEX_START_ID + 1, 
QueryLayoutChooser.segmentRangeComparator(), mock1,
                 mock2);
     }
 
@@ -68,7 +67,7 @@ class LayoutCandidateSortTest {
         MockEntity mock1 = new MockEntity(IndexEntity.TABLE_INDEX_START_ID + 
1, ImmutableList.of(1, 2), 1000, 3000);
         MockEntity mock2 = new MockEntity(IndexEntity.TABLE_INDEX_START_ID + 
IndexEntity.INDEX_ID_STEP + 1,
                 ImmutableList.of(1, 3), 1000, 2000);
-        assertSortedResult(IndexEntity.TABLE_INDEX_START_ID + 1, 
NQueryLayoutChooser.segmentEffectivenessComparator(),
+        assertSortedResult(IndexEntity.TABLE_INDEX_START_ID + 1, 
QueryLayoutChooser.segmentEffectivenessComparator(),
                 mock1, mock2);
     }
 
@@ -77,7 +76,7 @@ class LayoutCandidateSortTest {
         MockEntity mock1 = new MockEntity(1L, ImmutableList.of(1, 2), 90);
         MockEntity mock2 = new MockEntity(IndexEntity.INDEX_ID_STEP + 1L, 
ImmutableList.of(1, 4), 30);
         MockEntity mock3 = new MockEntity(2 * IndexEntity.INDEX_ID_STEP + 1L, 
ImmutableList.of(1, 5), 10);
-        assertSortedResult(2 * IndexEntity.INDEX_ID_STEP + 1L, 
NQueryLayoutChooser.rowSizeComparator(), mock1, mock2,
+        assertSortedResult(2 * IndexEntity.INDEX_ID_STEP + 1L, 
QueryLayoutChooser.rowSizeComparator(), mock1, mock2,
                 mock3);
     }
 
@@ -89,7 +88,7 @@ class LayoutCandidateSortTest {
                 ImmutableMap.of(3, mockDeriveInfo));
         MockEntity mock3 = new MockEntity(IndexEntity.INDEX_ID_STEP + 1L, 
ImmutableList.of(1, 3), ImmutableMap.of());
 
-        Comparator<NLayoutCandidate> comparator = 
NQueryLayoutChooser.derivedLayoutComparator();
+        Comparator<NLayoutCandidate> comparator = 
QueryLayoutChooser.derivedLayoutComparator();
 
         // both not empty, choose the first one
         assertSortedResult(1L, comparator, mock1, mock2);
@@ -117,10 +116,10 @@ class LayoutCandidateSortTest {
 
             // all layout candidates have shardBy column
             List<Integer> sortedFilters = Lists.newArrayList(2, 1, 0);
-            assertSortedResult(2L, 
NQueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(2L, 
QueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
 
             sortedFilters = Lists.newArrayList(2, 0, 1);
-            assertSortedResult(1L, 
NQueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(1L, 
QueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
         }
 
         {
@@ -129,7 +128,7 @@ class LayoutCandidateSortTest {
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(1, 0, 2), 
ImmutableList.of(), ImmutableList.of(1));
 
             List<Integer> sortedFilters = Lists.newArrayList(2, 1, 0);
-            assertSortedResult(2L, 
NQueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(2L, 
QueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
         }
 
         {
@@ -138,7 +137,7 @@ class LayoutCandidateSortTest {
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(1, 0, 2), 
ImmutableList.of(), ImmutableList.of());
 
             List<Integer> sortedFilters = Lists.newArrayList(2, 1, 0);
-            assertSortedResult(1L, 
NQueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(1L, 
QueryLayoutChooser.shardByComparator(sortedFilters), mock1, mock2);
         }
     }
 
@@ -149,10 +148,10 @@ class LayoutCandidateSortTest {
             MockEntity mock1 = new MockEntity(1L, ImmutableList.of(1, 2, 3), 
ImmutableList.of(), ImmutableList.of(1));
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(2, 1, 3), 
ImmutableList.of(), ImmutableList.of(2));
             List<Integer> sortedFilters = Lists.newArrayList(1, 2, 3);
-            assertSortedResult(1L, 
NQueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(1L, 
QueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
 
             sortedFilters = Lists.newArrayList(3, 2, 1);
-            assertSortedResult(2L, 
NQueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(2L, 
QueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
         }
 
         {
@@ -160,7 +159,7 @@ class LayoutCandidateSortTest {
             MockEntity mock1 = new MockEntity(1L, ImmutableList.of(2, 1, 3), 
ImmutableList.of(), ImmutableList.of());
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(1, 2, 3), 
ImmutableList.of(), ImmutableList.of(1));
             List<Integer> sortedFilters = Lists.newArrayList(1, 2, 3);
-            assertSortedResult(2L, 
NQueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(2L, 
QueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
         }
 
         {
@@ -168,7 +167,7 @@ class LayoutCandidateSortTest {
             MockEntity mock1 = new MockEntity(1L, ImmutableList.of(2, 1, 3), 
ImmutableList.of(), ImmutableList.of(2));
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(1, 2, 3), 
ImmutableList.of(), ImmutableList.of());
             List<Integer> sortedFilters = Lists.newArrayList(1, 2, 3);
-            assertSortedResult(1L, 
NQueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(1L, 
QueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
         }
 
         {
@@ -176,7 +175,7 @@ class LayoutCandidateSortTest {
             MockEntity mock1 = new MockEntity(1L, ImmutableList.of(2, 1, 3), 
ImmutableMap.of());
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(1, 2, 3), 
ImmutableMap.of());
             List<Integer> sortedFilters = Lists.newArrayList(1, 2, 3);
-            assertSortedResult(2L, 
NQueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(2L, 
QueryLayoutChooser.filterColumnComparator(sortedFilters), mock1, mock2);
         }
     }
 
@@ -186,14 +185,14 @@ class LayoutCandidateSortTest {
             MockEntity mock1 = new MockEntity(1L, ImmutableList.of(2, 1, 3), 
ImmutableMap.of());
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(1, 2, 3), 
ImmutableMap.of());
             List<Integer> sortedFilters = Lists.newArrayList(1, 2, 3);
-            assertSortedResult(2L, 
NQueryLayoutChooser.nonFilterColumnComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(2L, 
QueryLayoutChooser.nonFilterColumnComparator(sortedFilters), mock1, mock2);
         }
 
         {
             MockEntity mock1 = new MockEntity(1L, ImmutableList.of(2, 1, 3), 
ImmutableMap.of());
             MockEntity mock2 = new MockEntity(2L, ImmutableList.of(1, 2, 3), 
ImmutableMap.of());
             List<Integer> sortedFilters = Lists.newArrayList(2, 1, 3);
-            assertSortedResult(1L, 
NQueryLayoutChooser.nonFilterColumnComparator(sortedFilters), mock1, mock2);
+            assertSortedResult(1L, 
QueryLayoutChooser.nonFilterColumnComparator(sortedFilters), mock1, mock2);
         }
     }
 
@@ -202,7 +201,7 @@ class LayoutCandidateSortTest {
         MockEntity mock1 = new MockEntity(1L, ImmutableList.of(0), 
ImmutableList.of(100_000, 100_001));
         MockEntity mock2 = new MockEntity(10_001, ImmutableList.of(0), 
ImmutableList.of(100_000));
         List<NLayoutCandidate> layoutCandidates = mockLayouts(mock1, mock2);
-        layoutCandidates.sort(NQueryLayoutChooser.measureSizeComparator());
+        layoutCandidates.sort(QueryLayoutChooser.measureSizeComparator());
         Assertions.assertEquals(10_001L, 
layoutCandidates.get(0).getLayoutEntity().getId());
     }
 
@@ -211,7 +210,7 @@ class LayoutCandidateSortTest {
         MockEntity mock1 = new MockEntity(1L, ImmutableList.of(0, 1, 2), 
ImmutableList.of());
         MockEntity mock2 = new MockEntity(10_001L, ImmutableList.of(0, 1, 2, 
3), ImmutableList.of());
         List<NLayoutCandidate> layoutCandidates = mockLayouts(mock1, mock2);
-        layoutCandidates.sort(NQueryLayoutChooser.dimensionSizeComparator());
+        layoutCandidates.sort(QueryLayoutChooser.dimensionSizeComparator());
         Assertions.assertEquals(1L, 
layoutCandidates.get(0).getLayoutEntity().getId());
     }
 
diff --git 
a/src/query/src/test/java/org/apache/kylin/query/routing/QueryRouterTest.java 
b/src/query/src/test/java/org/apache/kylin/query/routing/QueryRouterTest.java
index 6f5fc71a7d..6fe463e0bb 100644
--- 
a/src/query/src/test/java/org/apache/kylin/query/routing/QueryRouterTest.java
+++ 
b/src/query/src/test/java/org/apache/kylin/query/routing/QueryRouterTest.java
@@ -20,7 +20,6 @@ package org.apache.kylin.query.routing;
 
 import java.util.List;
 
-import org.apache.kylin.common.KylinConfigBase;
 import org.apache.kylin.guava30.shaded.common.collect.Lists;
 import org.apache.kylin.guava30.shaded.common.collect.Maps;
 import org.apache.kylin.junit.annotation.MetadataInfo;
@@ -106,19 +105,19 @@ class QueryRouterTest {
     void testSortWithVacantPruningRule() {
         // This property does affect the sorting of candidates in different 
models.
         MetadataTestUtils.updateProjectConfig("default", 
"kylin.query.index-match-rules",
-                KylinConfigBase.USE_VACANT_INDEXES);
+                QueryRouter.USE_VACANT_INDEXES);
         testSort();
     }
 
     @Test
     void testTableIndexAnswerSelectStar() {
-        MetadataTestUtils.updateProjectConfig("default", 
"kylin.query.index-match-rules",
-                KylinConfigBase.USE_TABLE_INDEX_ANSWER_SELECT_STAR);
+        String useTableIndexAnswerSelectStar = 
"kylin.query.use-tableindex-answer-select-star.enabled";
+        MetadataTestUtils.updateProjectConfig("default", 
useTableIndexAnswerSelectStar, "true");
         Candidate c1 = CandidateTestUtils.mockCandidate("model0001", "modelA", 
2, 1, 1);
         Candidate c2 = CandidateTestUtils.mockCandidate("model0002", "modelB", 
1, 1, 2);
         assertSortedResults(c1, Lists.newArrayList(c1, c2));
 
-        MetadataTestUtils.updateProjectConfig("default", 
"kylin.query.index-match-rules", "");
+        MetadataTestUtils.updateProjectConfig("default", 
useTableIndexAnswerSelectStar, "false");
         assertSortedResults(c2, Lists.newArrayList(c1, c2));
     }
 
diff --git a/src/server/src/main/resources/log4j2.xml 
b/src/server/src/main/resources/log4j2.xml
index 4f3e36988e..d3b9e32a6e 100644
--- a/src/server/src/main/resources/log4j2.xml
+++ b/src/server/src/main/resources/log4j2.xml
@@ -28,7 +28,7 @@
         <Logger name="io.kyligence" level="INFO"/>
         <Logger name="org.springframework" level="INFO"/>
         <Logger name="org.apache.kylin" level="DEBUG"/>
-        <Logger 
name="org.apache.kylin.metadata.cube.cuboid.NQueryLayoutChooser" level="INFO"/>
+        <Logger name="org.apache.kylin.query.routing.QueryLayoutChooser" 
level="INFO"/>
         <Logger name="org.apache.spark.ui" level="WARN"/>
         <Logger name="org.apache.spark.sql.execution.ui" level="WARN"/>
         <Logger name="org.springframework.security" level="WARN"/>
diff --git 
a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala
 
b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala
index 3e6d0b1933..61ca3307b2 100644
--- 
a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala
+++ 
b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala
@@ -18,7 +18,6 @@
 
 package org.apache.kylin.query.util
 
-import org.apache.kylin.common.KylinConfig
 import org.apache.kylin.common.util.ImmutableBitSet
 import org.apache.kylin.metadata.datatype.DataType
 import org.apache.kylin.metadata.model.DeriveInfo.DeriveType
@@ -97,8 +96,7 @@ object RuntimeHelper extends Logging {
       }.toMap
     }
 
-    val projectInstance = 
NProjectManager.getInstance(KylinConfig.getInstanceFromEnv)
-      .getProject(derivedUtil.model.getProject)
+    val projectConfig = 
NProjectManager.getProjectConfig(derivedUtil.model.getProject)
     // may have multi TopN measures.
     val topNIndexs = 
sourceSchema.fields.map(_.dataType).zipWithIndex.filter(_._1.isInstanceOf[ArrayType])
     allColumns.indices
@@ -120,8 +118,7 @@ object RuntimeHelper extends Logging {
               if (hasTopN && topNIndexs.map(_._2).contains(gTInfoIndex)) {
                 // topn measure will be erase when calling inline
                 literalOne.as(s"${factTableName}_${columnName}")
-              } else if 
(projectInstance.getConfig.useTableIndexAnswerSelectStarEnabled()
-                && gTInfoIndex < 0) {
+              } else if (projectConfig.useTableIndexAnswerSelectStarEnabled() 
&& gTInfoIndex < 0) {
                 if (column.getColumnDesc.getType.isNumberFamily) {
                   literalZero.as(s"${factTableName}_${columnName}")
                 } else {


Reply via email to