This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit dc5f2951f1c0791d62afcd0c741e2e08d8cc958c
Author: Zhichao Zhang <441586...@qq.com>
AuthorDate: Fri Jan 15 00:33:22 2021 +0800

    Fix test case and fix system-cube.sh script
    
    Refine system cube desc to reduce cuboid number
    
    Add queryExecutionMetricsMap shutdown
    
    Change Decimal datatype to Double datatype
---
 .github/workflows/maven.yml                        |   2 +-
 build/bin/system-cube.sh                           |  20 +-
 .../apache/kylin/metrics/QuerySparkMetrics.java    |   6 +
 .../kylin/metrics/lib/impl/MetricsSystem.java      |   2 +
 .../kylin/rest/controller/CubeController.java      |   9 +-
 .../kylin/rest/response/SQLResponseTest.java       |   9 +-
 .../kylin/rest/metrics/QueryMetricsTest.java       |  23 ---
 .../tool/metrics/systemcube/CubeDescCreator.java   | 206 +++++++++++++++------
 webapp/app/js/services/cubes.js                    |   4 +-
 9 files changed, 180 insertions(+), 101 deletions(-)

diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml
index 77ff0a9..3bec681 100644
--- a/.github/workflows/maven.yml
+++ b/.github/workflows/maven.yml
@@ -38,4 +38,4 @@ jobs:
       with:
         java-version: 1.8
     - name: Build with Maven
-      run: mvn -B package --file pom.xml
+      run: mvn clean -Dpre-commit apache-rat:check test -Dlicense.skip=false
diff --git a/build/bin/system-cube.sh b/build/bin/system-cube.sh
index ca35970..3ca9b40 100644
--- a/build/bin/system-cube.sh
+++ b/build/bin/system-cube.sh
@@ -38,9 +38,9 @@ OUTPUT_FORDER=$KYLIN_HOME/system_cube
 KYLIN_ENV=`grep "^kylin.env=" $KYLIN_HOME/conf/kylin.properties | cut -d "=" 
-f 2`
 KYLIN_ENV=${KYLIN_ENV:-"QA"}
 
-SC_NAME_1="KYLIN_HIVE_METRICS_QUERY_${KYLIN_ENV}"
-SC_NAME_2="KYLIN_HIVE_METRICS_QUERY_CUBE_${KYLIN_ENV}"
-SC_NAME_3="KYLIN_HIVE_METRICS_QUERY_RPC_${KYLIN_ENV}"
+SC_NAME_1="KYLIN_HIVE_METRICS_QUERY_EXECUTION_${KYLIN_ENV}"
+SC_NAME_2="KYLIN_HIVE_METRICS_QUERY_SPARK_JOB_${KYLIN_ENV}"
+SC_NAME_3="KYLIN_HIVE_METRICS_QUERY_SPARK_STAGE_${KYLIN_ENV}"
 SC_NAME_4="KYLIN_HIVE_METRICS_JOB_${KYLIN_ENV}"
 SC_NAME_5="KYLIN_HIVE_METRICS_JOB_EXCEPTION_${KYLIN_ENV}"
 
@@ -73,7 +73,6 @@ then
 
        cat <<-EOF > ${SINK_TOOLS_FILE}
        [
-         [
     {
        "sink": "hive",
        "storage_type": 4,
@@ -81,7 +80,6 @@ then
          "kylin.cube.max-building-segments": "1"
        }
     }
-    ]
        ]
        EOF
   $KYLIN_HOME/bin/kylin.sh org.apache.kylin.tool.metrics.systemcube.SCCreator \
@@ -91,10 +89,12 @@ then
   hive_client_mode=`bash ${KYLIN_HOME}/bin/get-properties.sh 
kylin.source.hive.client`
 
   # Get Database
-  system_database=`bash ${KYLIN_HOME}/bin/get-properties.sh 
kylin.source.hive.database-for-flat-table | tr [a-z] [A-Z]`
+  system_database_tmp=`bash ${KYLIN_HOME}/bin/get-properties.sh 
kylin.metrics.prefix`
+  system_database=${system_database_tmp:-"KYLIN"}
+  system_database=`echo ${system_database} | tr [a-z] [A-Z]`
 
   # 'create database' failed will not exit when donot have permission to 
create database;
-  sed -i -e 's/CREATE DATABASE /#CREATE DATABASE /g' 
${OUTPUT_FORDER}/create_hive_tables_for_system_cubes.sql
+  sed -i -e 's/CREATE DATABASE /-- CREATE DATABASE /g' 
${OUTPUT_FORDER}/create_hive_tables_for_system_cubes.sql
 
   if [ "${hive_client_mode}" == "beeline" ]
   then
@@ -104,15 +104,15 @@ then
       hive2_url=`expr match "${beeline_params}" 
'.*\(hive2:.*:[0-9]\{4,6\}\/\)'`
       if [ -z ${hive2_url} ]; then
           hive2_url=`expr match "${beeline_params}" 
'.*\(hive2:.*:[0-9]\{4,6\}\)'`
-          
beeline_params=${beeline_params/${hive2_url}/${hive2_url}/${sample_database}}
+          
beeline_params=${beeline_params/${hive2_url}/${hive2_url}/${system_database}}
       else
-          
beeline_params=${beeline_params/${hive2_url}/${hive2_url}${sample_database}}
+          
beeline_params=${beeline_params/${hive2_url}/${hive2_url}${system_database}}
       fi
 
       beeline ${beeline_params} -f 
${OUTPUT_FORDER}/create_hive_tables_for_system_cubes.sql  || { exit 1; }
   else
       hive -e "CREATE DATABASE IF NOT EXISTS "$system_database
-      hive --database $sample_database -f 
${OUTPUT_FORDER}/create_hive_tables_for_system_cubes.sql  || { exit 1; }
+      hive --database $system_database -f 
${OUTPUT_FORDER}/create_hive_tables_for_system_cubes.sql  || { exit 1; }
   fi
 
   $KYLIN_HOME/bin/metastore.sh restore ${OUTPUT_FORDER}
diff --git 
a/core-metrics/src/main/java/org/apache/kylin/metrics/QuerySparkMetrics.java 
b/core-metrics/src/main/java/org/apache/kylin/metrics/QuerySparkMetrics.java
index a0efe64..0a62533 100644
--- a/core-metrics/src/main/java/org/apache/kylin/metrics/QuerySparkMetrics.java
+++ b/core-metrics/src/main/java/org/apache/kylin/metrics/QuerySparkMetrics.java
@@ -24,6 +24,7 @@ import org.apache.kylin.metrics.lib.impl.TimedRecordEvent;
 import org.apache.kylin.metrics.property.QuerySparkExecutionEnum;
 import org.apache.kylin.metrics.property.QuerySparkJobEnum;
 import org.apache.kylin.metrics.property.QuerySparkStageEnum;
+import org.apache.kylin.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.kylin.shaded.com.google.common.cache.Cache;
 import org.apache.kylin.shaded.com.google.common.cache.CacheBuilder;
 import org.apache.kylin.shaded.com.google.common.cache.RemovalListener;
@@ -89,7 +90,12 @@ public class QuerySparkMetrics {
                 
KylinConfig.getInstanceFromEnv().getKylinMetricsCacheExpireSeconds(), 
TimeUnit.SECONDS);
     }
 
+    private void shutdown() {
+        queryExecutionMetricsMap.invalidateAll();
+    }
+
     // only for test case
+    @VisibleForTesting
     public static void init(RemovalListener removalListener) {
         instance = new QuerySparkMetrics(removalListener);
     }
diff --git 
a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/MetricsSystem.java
 
b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/MetricsSystem.java
index 1e46bce..4051419 100644
--- 
a/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/MetricsSystem.java
+++ 
b/core-metrics/src/main/java/org/apache/kylin/metrics/lib/impl/MetricsSystem.java
@@ -26,6 +26,7 @@ import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metrics.QuerySparkMetrics;
 import org.apache.kylin.metrics.lib.ActiveReservoir;
 import org.apache.kylin.metrics.lib.ActiveReservoirRecordFilter;
 import org.slf4j.Logger;
@@ -55,6 +56,7 @@ public class MetricsSystem extends MetricRegistry {
     }
 
     public void shutdown() throws IOException {
+        
QuerySparkMetrics.getInstance().getQueryExecutionMetricsMap().invalidateAll();
         for (ActiveReservoir entry : activeReservoirs.values()) {
             entry.close();
         }
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
 
b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index 059915c..60bbf6c 100644
--- 
a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ 
b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -895,12 +895,13 @@ public class CubeController extends BasicController {
 
         Map<Long, Long> hitFrequencyMap = null;
         Map<Long, Long> queryMatchMap = null;
-        try {
+        // currently not support to collect these metrics
+        /*try {
             hitFrequencyMap = getTargetCuboidHitFrequency(cubeName);
             queryMatchMap = cubeService.getCuboidQueryMatchCount(cubeName);
         } catch (Exception e) {
             logger.warn("Fail to query on system cube due to " + e);
-        }
+        }*/
 
         Set<Long> currentCuboidSet = 
cube.getCuboidScheduler().getAllCuboidIds();
         return cubeService.getCuboidTreeResponse(cuboidScheduler, 
cuboidStatsMap, hitFrequencyMap, queryMatchMap,
@@ -912,7 +913,9 @@ public class CubeController extends BasicController {
     public CuboidTreeResponse getRecommendCuboids(@PathVariable String 
cubeName) throws IOException {
         checkCubeExists(cubeName);
         CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);
-        Map<Long, Long> recommendCuboidStatsMap = getRecommendCuboidList(cube);
+        // currently not support to collect these metrics
+        // Map<Long, Long> recommendCuboidStatsMap = 
getRecommendCuboidList(cube);
+        Map<Long, Long> recommendCuboidStatsMap = null;
         if (recommendCuboidStatsMap == null || 
recommendCuboidStatsMap.isEmpty()) {
             return new CuboidTreeResponse();
         }
diff --git 
a/server-base/src/test/java/org/apache/kylin/rest/response/SQLResponseTest.java 
b/server-base/src/test/java/org/apache/kylin/rest/response/SQLResponseTest.java
index f1c704e..370b1ea 100644
--- 
a/server-base/src/test/java/org/apache/kylin/rest/response/SQLResponseTest.java
+++ 
b/server-base/src/test/java/org/apache/kylin/rest/response/SQLResponseTest.java
@@ -32,10 +32,11 @@ public class SQLResponseTest {
 
     @Test
     public void testInterfaceConsistency() throws IOException {
-        String[] attrArray = new String[] { "columnMetas", "results", "cube", 
"cuboidIds", "affectedRowCount", "isException",
-                "exceptionMessage", "duration", "partial", "totalScanCount", 
"hitExceptionCache", "storageCacheUsed",
-                "sparkPool", "pushDown", "traceUrl", "totalScanBytes", 
"totalScanFiles",
-                "metadataTime", "totalSparkScanTime" };
+        String[] attrArray = new String[] { "columnMetas", "results", "cube", 
"cuboidIds",
+                "realizationTypes", "affectedRowCount", "isException",
+                "exceptionMessage", "duration", "partial", "totalScanCount", 
"hitExceptionCache",
+                "storageCacheUsed", "sparkPool", "pushDown", "traceUrl", 
"totalScanBytes",
+                "totalScanFiles", "metadataTime", "totalSparkScanTime" };
 
         SQLResponse sqlResponse = new SQLResponse(null, null, "learn_cube", 
100, false, null, false, false);
         String jsonStr = JsonUtil.writeValueAsString(sqlResponse);
diff --git 
a/server/src/test/java/org/apache/kylin/rest/metrics/QueryMetricsTest.java 
b/server/src/test/java/org/apache/kylin/rest/metrics/QueryMetricsTest.java
index 3ad7eea..8c69198 100644
--- a/server/src/test/java/org/apache/kylin/rest/metrics/QueryMetricsTest.java
+++ b/server/src/test/java/org/apache/kylin/rest/metrics/QueryMetricsTest.java
@@ -202,25 +202,6 @@ public class QueryMetricsTest extends ServiceTestBase {
         updateSparkMetrics(queryId1);
 
         
Assert.assertTrue(QuerySparkMetrics.getInstance().getQueryExecutionMetrics(queryId1)
 != null);
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"QueryCount"));
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"QuerySuccessCount"));
-        Assert.assertEquals(0L, mBeanServer.getAttribute(objectName, 
"QueryFailCount"));
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"CacheHitCount"));
-
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"ScanRowCountNumOps"));
-        Assert.assertEquals(100.0, mBeanServer.getAttribute(objectName, 
"ScanRowCountAvgTime"));
-        Assert.assertEquals(100.0, mBeanServer.getAttribute(objectName, 
"ScanRowCountMaxTime"));
-        Assert.assertEquals(100.0, mBeanServer.getAttribute(objectName, 
"ScanRowCountMinTime"));
-
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"ResultRowCountNumOps"));
-        Assert.assertEquals(2.0, mBeanServer.getAttribute(objectName, 
"ResultRowCountMaxTime"));
-        Assert.assertEquals(2.0, mBeanServer.getAttribute(objectName, 
"ResultRowCountAvgTime"));
-        Assert.assertEquals(2.0, mBeanServer.getAttribute(objectName, 
"ResultRowCountMinTime"));
-
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"QueryLatencyNumOps"));
-        Assert.assertEquals(10.0, mBeanServer.getAttribute(objectName, 
"QueryLatencyMaxTime"));
-        Assert.assertEquals(10.0, mBeanServer.getAttribute(objectName, 
"QueryLatencyAvgTime"));
-        Assert.assertEquals(10.0, mBeanServer.getAttribute(objectName, 
"QueryLatencyMinTime"));
 
         String queryId2 = "2";
         generateSparkMetrics(queryId2);
@@ -241,10 +222,6 @@ public class QueryMetricsTest extends ServiceTestBase {
 
         Thread.sleep(5000);
 
-        Assert.assertEquals(2L, mBeanServer.getAttribute(objectName, 
"QueryCount"));
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"QuerySuccessCount"));
-        Assert.assertEquals(1L, mBeanServer.getAttribute(objectName, 
"QueryFailCount"));
-
         
Assert.assertTrue(QuerySparkMetrics.getInstance().getQueryExecutionMetrics(queryId2)
 == null);
         Assert.assertEquals(2, sparkMetricsReportCnt.get());
         System.clearProperty("kylin.server.query-metrics-enabled");
diff --git 
a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeDescCreator.java
 
b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeDescCreator.java
index f3e7168..66f1877 100644
--- 
a/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeDescCreator.java
+++ 
b/tool/src/main/java/org/apache/kylin/tool/metrics/systemcube/CubeDescCreator.java
@@ -118,26 +118,44 @@ public class CubeDescCreator {
         rowKeyDesc.setRowkeyColumns(rowKeyColDescs);
 
         //Set for aggregation group
-        String[][] hierarchy_dims = new String[4][];
-        hierarchy_dims[0] = getTimeHierarchy();
-        hierarchy_dims[1] = new String[3];
-        hierarchy_dims[1][0] = 
QuerySparkExecutionEnum.REALIZATION_TYPE.toString();
-        hierarchy_dims[1][1] = QuerySparkExecutionEnum.REALIZATION.toString();
-        hierarchy_dims[1][2] = QuerySparkExecutionEnum.CUBOID_IDS.toString();
-        hierarchy_dims[2] = new String[2];
-        hierarchy_dims[2][0] = QuerySparkExecutionEnum.START_TIME.toString();
-        hierarchy_dims[2][1] = QuerySparkExecutionEnum.END_TIME.toString();
-        hierarchy_dims[3] = new String[2];
-        hierarchy_dims[3][0] = QuerySparkExecutionEnum.SPARDER_NAME.toString();
-        hierarchy_dims[3][1] = 
RecordEvent.RecordReserveKeyEnum.HOST.toString();
-        for (int i = 0; i < hierarchy_dims.length; i++) {
-            hierarchy_dims[i] = refineColumnWithTable(tableName, 
hierarchy_dims[i]);
+        String[][] hierarchyDims = new String[1][];
+        hierarchyDims[0] = getTimeHierarchy();
+        for (int i = 0; i < hierarchyDims.length; i++) {
+            hierarchyDims[i] = refineColumnWithTable(tableName, 
hierarchyDims[i]);
         }
 
+        String[] mandatoryDims = new String[] {refineColumnWithTable(tableName,
+                QuerySparkExecutionEnum.PROJECT.toString())};
+
+        String[][] jointDims = new String[5][];
+        jointDims[0] = new String[]{
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_HOUR.toString()),
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_MINUTE.toString())
+        };
+        jointDims[1] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.START_TIME.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.END_TIME.toString())
+        };
+        jointDims[2] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.REALIZATION_TYPE.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.REALIZATION.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.CUBOID_IDS.toString())
+        };
+        jointDims[3] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.SPARDER_NAME.toString()),
+                refineColumnWithTable(tableName, 
RecordEvent.RecordReserveKeyEnum.HOST.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.EXCEPTION.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.TYPE.toString())
+        };
+        jointDims[4] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.USER.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkExecutionEnum.QUERY_ID.toString())
+        };
+
         SelectRule selectRule = new SelectRule();
-        selectRule.mandatoryDims = new String[0];
-        selectRule.hierarchyDims = hierarchy_dims;
-        selectRule.jointDims = new String[0][0];
+        selectRule.mandatoryDims = mandatoryDims;
+        selectRule.hierarchyDims = hierarchyDims;
+        selectRule.jointDims = jointDims;
 
         AggregationGroup aggGroup = new AggregationGroup();
         aggGroup.setIncludes(refineColumnWithTable(tableName, dimensions));
@@ -159,7 +177,6 @@ public class CubeDescCreator {
         dimensions.remove(TimePropertyEnum.DAY_TIME.toString());
         dimensions.remove(RecordEvent.RecordReserveKeyEnum.TIME.toString());
         dimensions.remove(RecordEvent.RecordReserveKeyEnum.HOST.toString());
-        dimensions.remove(QuerySparkJobEnum.PROJECT.toString());
 
         List<DimensionDesc> dimensionDescList = 
Lists.newArrayListWithExpectedSize(dimensions.size());
         for (String dimensionName : dimensions) {
@@ -194,27 +211,44 @@ public class CubeDescCreator {
         idx++;
         rowKeyColDescs[idx] = getRowKeyColDesc(tableName, 
QuerySparkJobEnum.END_TIME.toString(), idx + 1);
         idx++;
+        rowKeyColDescs[idx] = getRowKeyColDesc(tableName, 
QuerySparkJobEnum.PROJECT.toString(), idx + 1);
+        idx++;
         rowKeyColDescs[idx] = getRowKeyColDesc(tableName, 
QuerySparkJobEnum.IF_SUCCESS.toString(), idx + 1);
         idx++;
 
         RowKeyDesc rowKeyDesc = new RowKeyDesc();
         rowKeyDesc.setRowkeyColumns(rowKeyColDescs);
 
-        String[][] hierarchy_dims = new String[2][];
-        hierarchy_dims[0] = getTimeHierarchy();
-        hierarchy_dims[1] = new String[3];
-        hierarchy_dims[1][0] = QuerySparkJobEnum.QUERY_ID.toString();
-        hierarchy_dims[1][1] = QuerySparkJobEnum.EXECUTION_ID.toString();
-        hierarchy_dims[1][2] = QuerySparkJobEnum.JOB_ID.toString();
-
-        for (int i = 0; i < hierarchy_dims.length; i++) {
-            hierarchy_dims[i] = refineColumnWithTable(tableName, 
hierarchy_dims[i]);
+        //Set for aggregation group
+        String[][] hierarchyDims = new String[1][];
+        hierarchyDims[0] = getTimeHierarchy();
+        for (int i = 0; i < hierarchyDims.length; i++) {
+            hierarchyDims[i] = refineColumnWithTable(tableName, 
hierarchyDims[i]);
         }
 
+        String[] mandatoryDims = new String[] {refineColumnWithTable(tableName,
+                QuerySparkJobEnum.PROJECT.toString())};
+
+        String[][] jointDims = new String[3][];
+        jointDims[0] = new String[]{
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_HOUR.toString()),
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_MINUTE.toString())
+        };
+        jointDims[1] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkJobEnum.START_TIME.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkJobEnum.END_TIME.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkJobEnum.IF_SUCCESS.toString())
+        };
+        jointDims[2] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkJobEnum.QUERY_ID.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkJobEnum.EXECUTION_ID.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkJobEnum.JOB_ID.toString())
+        };
+
         SelectRule selectRule = new SelectRule();
-        selectRule.mandatoryDims = new String[0];
-        selectRule.hierarchyDims = hierarchy_dims;
-        selectRule.jointDims = new String[0][0];
+        selectRule.mandatoryDims = mandatoryDims;
+        selectRule.hierarchyDims = hierarchyDims;
+        selectRule.jointDims = jointDims;
 
         AggregationGroup aggGroup = new AggregationGroup();
         aggGroup.setIncludes(refineColumnWithTable(tableName, dimensions));
@@ -284,21 +318,39 @@ public class CubeDescCreator {
         rowKeyDesc.setRowkeyColumns(rowKeyColDescs);
 
         //Set for aggregation group
-        String[][] hierarchy_dims = new String[2][];
-        hierarchy_dims[0] = getTimeHierarchy();
-        hierarchy_dims[1] = new String[4];
-        hierarchy_dims[1][0] = QuerySparkStageEnum.QUERY_ID.toString();
-        hierarchy_dims[1][1] = QuerySparkStageEnum.EXECUTION_ID.toString();
-        hierarchy_dims[1][2] = QuerySparkStageEnum.JOB_ID.toString();
-        hierarchy_dims[1][3] = QuerySparkStageEnum.STAGE_ID.toString();
-        for (int i = 0; i < hierarchy_dims.length; i++) {
-            hierarchy_dims[i] = refineColumnWithTable(tableName, 
hierarchy_dims[i]);
+        String[][] hierarchyDims = new String[1][];
+        hierarchyDims[0] = getTimeHierarchy();
+        for (int i = 0; i < hierarchyDims.length; i++) {
+            hierarchyDims[i] = refineColumnWithTable(tableName, 
hierarchyDims[i]);
         }
 
+        String[] mandatoryDims = new String[] {refineColumnWithTable(tableName,
+                QuerySparkStageEnum.PROJECT.toString())};
+
+        String[][] jointDims = new String[4][];
+        jointDims[0] = new String[]{
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_HOUR.toString()),
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_MINUTE.toString())
+        };
+        jointDims[1] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkStageEnum.REALIZATION.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkStageEnum.CUBOID_ID.toString())
+        };
+        jointDims[2] = new String[]{
+                refineColumnWithTable(tableName, 
RecordEvent.RecordReserveKeyEnum.HOST.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkStageEnum.IF_SUCCESS.toString())
+        };
+        jointDims[3] = new String[]{
+                refineColumnWithTable(tableName, 
QuerySparkStageEnum.QUERY_ID.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkStageEnum.EXECUTION_ID.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkStageEnum.JOB_ID.toString()),
+                refineColumnWithTable(tableName, 
QuerySparkStageEnum.STAGE_ID.toString())
+        };
+
         SelectRule selectRule = new SelectRule();
-        selectRule.mandatoryDims = new String[0];
-        selectRule.hierarchyDims = hierarchy_dims;
-        selectRule.jointDims = new String[0][0];
+        selectRule.mandatoryDims = mandatoryDims;
+        selectRule.hierarchyDims = hierarchyDims;
+        selectRule.jointDims = jointDims;
 
         AggregationGroup aggGroup = new AggregationGroup();
         aggGroup.setIncludes(refineColumnWithTable(tableName, dimensions));
@@ -369,16 +421,33 @@ public class CubeDescCreator {
         rowKeyDesc.setRowkeyColumns(rowKeyColDescs);
 
         //Set for aggregation group
-        String[][] hierarchy_dims = new String[1][];
-        hierarchy_dims[0] = getTimeHierarchy();
-        for (int i = 0; i < hierarchy_dims.length; i++) {
-            hierarchy_dims[i] = refineColumnWithTable(tableName, 
hierarchy_dims[i]);
+        String[][] hierarchyDims = new String[1][];
+        hierarchyDims[0] = getTimeHierarchy();
+        for (int i = 0; i < hierarchyDims.length; i++) {
+            hierarchyDims[i] = refineColumnWithTable(tableName, 
hierarchyDims[i]);
         }
 
+        String[] mandatoryDims = new String[] {refineColumnWithTable(tableName,
+                JobPropertyEnum.PROJECT.toString())};
+
+        String[][] jointDims = new String[3][];
+        jointDims[0] = new String[] {
+                refineColumnWithTable(tableName, 
JobPropertyEnum.CUBE.toString()),
+                refineColumnWithTable(tableName, 
JobPropertyEnum.ALGORITHM.toString())
+        };
+        jointDims[1] = new String[] {
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_HOUR.toString()),
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_MINUTE.toString())
+        };
+        jointDims[2] = new String[] {
+                refineColumnWithTable(tableName, 
JobPropertyEnum.USER.toString()),
+                refineColumnWithTable(tableName, 
JobPropertyEnum.TYPE.toString())
+        };
+
         SelectRule selectRule = new SelectRule();
-        selectRule.mandatoryDims = new String[0];
-        selectRule.hierarchyDims = hierarchy_dims;
-        selectRule.jointDims = new String[0][0];
+        selectRule.mandatoryDims = mandatoryDims;
+        selectRule.hierarchyDims = hierarchyDims;
+        selectRule.jointDims = jointDims;
 
         AggregationGroup aggGroup = new AggregationGroup();
         aggGroup.setIncludes(refineColumnWithTable(tableName, dimensions));
@@ -433,16 +502,33 @@ public class CubeDescCreator {
         rowKeyDesc.setRowkeyColumns(rowKeyColDescs);
 
         //Set for aggregation group
-        String[][] hierarchy_dims = new String[1][];
-        hierarchy_dims[0] = getTimeHierarchy();
-        for (int i = 0; i < hierarchy_dims.length; i++) {
-            hierarchy_dims[i] = refineColumnWithTable(tableName, 
hierarchy_dims[i]);
+        String[][] hierarchyDims = new String[1][];
+        hierarchyDims[0] = getTimeHierarchy();
+        for (int i = 0; i < hierarchyDims.length; i++) {
+            hierarchyDims[i] = refineColumnWithTable(tableName, 
hierarchyDims[i]);
         }
 
+        String[] mandatoryDims = new String[] {refineColumnWithTable(tableName,
+                JobPropertyEnum.PROJECT.toString())};
+
+        String[][] jointDims = new String[3][];
+        jointDims[0] = new String[] {
+                refineColumnWithTable(tableName, 
JobPropertyEnum.CUBE.toString()),
+                refineColumnWithTable(tableName, 
JobPropertyEnum.ALGORITHM.toString())
+        };
+        jointDims[1] = new String[] {
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_HOUR.toString()),
+                refineColumnWithTable(tableName, 
TimePropertyEnum.TIME_MINUTE.toString())
+        };
+        jointDims[2] = new String[] {
+                refineColumnWithTable(tableName, 
JobPropertyEnum.USER.toString()),
+                refineColumnWithTable(tableName, 
JobPropertyEnum.TYPE.toString())
+        };
+
         SelectRule selectRule = new SelectRule();
-        selectRule.mandatoryDims = new String[0];
-        selectRule.hierarchyDims = hierarchy_dims;
-        selectRule.jointDims = new String[0][0];
+        selectRule.mandatoryDims = mandatoryDims;
+        selectRule.hierarchyDims = hierarchyDims;
+        selectRule.jointDims = jointDims;
 
         AggregationGroup aggGroup = new AggregationGroup();
         aggGroup.setIncludes(refineColumnWithTable(tableName, dimensions));
@@ -525,6 +611,10 @@ public class CubeDescCreator {
         return result;
     }
 
+    public static String refineColumnWithTable(String tableName, String 
column) {
+        return tableName.substring(tableName.lastIndexOf(".") + 1) + "." + 
column;
+    }
+
     public static String[] refineColumnWithTable(String tableName, 
List<String> columns) {
         String[] dimensions = new String[columns.size()];
         for (int i = 0; i < dimensions.length; i++) {
@@ -599,9 +689,7 @@ public class CubeDescCreator {
         FunctionDesc function = new FunctionDesc();
         function.setExpression(FunctionDesc.FUNC_SUM);
         function.setParameter(parameterDesc);
-        
function.setReturnType(dataType.equals(HiveTableCreator.HiveTypeEnum.HDOUBLE.toString())
-                ? HiveTableCreator.HiveTypeEnum.HDECIMAL.toString()
-                : dataType);
+        function.setReturnType(dataType);
 
         MeasureDesc result = new MeasureDesc();
         result.setName(column + "_SUM");
diff --git a/webapp/app/js/services/cubes.js b/webapp/app/js/services/cubes.js
index 53f490d..fea92ce 100644
--- a/webapp/app/js/services/cubes.js
+++ b/webapp/app/js/services/cubes.js
@@ -33,7 +33,9 @@ KylinApp.factory('CubeService', ['$resource', function 
($resource, config) {
         });
       }
     };
-    iterator(data.root, data.root.row_count);
+    if (data.root) {
+      iterator(data.root, data.root.row_count);
+    }
     return cuboids;
   };
   return $resource(Config.service.url + 
'cubes/:cubeId/:propName/:propValue/:action', {}, {

Reply via email to