Repository: kylin
Updated Branches:
  refs/heads/1.5.x-CDH5.7 159d9fd8b -> b46029fba (forced update)


http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/protobuf/II.proto
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/protobuf/II.proto
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/protobuf/II.proto
deleted file mode 100644
index d2c34d4..0000000
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/protobuf/II.proto
+++ /dev/null
@@ -1,65 +0,0 @@
-//
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// usage:
-// protoc  --java_out=./storage-hbase/src/main/java  
./storage-hbase/src/main/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/protobuf/II.proto
-
-option java_package = 
"org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.generated";
-
-option java_outer_classname = "IIProtos";
-
-option java_generic_services = true;
-
-option java_generate_equals_and_hash = true;
-
-option optimize_for = SPEED;
-
-message IIRequest {
-    required bytes type = 1;
-    required bytes filter = 2;
-    required bytes projector = 3;
-    required bytes aggregator = 4;
-    optional bytes tsRange = 5;
-}
-
-message IIResponseInternal {
-    message IIRow {
-        required bytes columns = 1;
-        optional bytes measures = 2;
-    }
-    //all entries in this struct be optional to conveniently add more entries 
in the future
-    message Stats {
-        optional int32 myShard = 1;
-        optional int64 latestDataTime = 2;
-        optional int64 serviceStartTime = 3;
-        optional int64 serviceEndTime = 4;
-        optional int32 scannedSlices = 5;
-    }
-    repeated IIRow rows = 1;
-    required Stats stats = 2;
-}
-
-message IIResponse {
-    required bytes blob = 1;
-}
-
-
-
-service RowsService {
-    rpc getRows (IIRequest) returns (IIResponse);
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput.java
index 11c1711..c634a1d 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMROutput.java
@@ -20,7 +20,6 @@ package org.apache.kylin.storage.hbase.steps;
 
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.mr.IMROutput;
-import org.apache.kylin.invertedindex.IISegment;
 import org.apache.kylin.job.execution.DefaultChainedExecutable;
 
 public class HBaseMROutput implements IMROutput {
@@ -43,23 +42,6 @@ public class HBaseMROutput implements IMROutput {
     }
 
     @Override
-    public IMRBatchInvertedIndexingOutputSide 
getBatchInvertedIndexingOutputSide(final IISegment seg) {
-        return new IMRBatchInvertedIndexingOutputSide() {
-            HBaseMRSteps steps = new HBaseMRSteps(seg);
-
-            @Override
-            public void addStepPhase3_BuildII(DefaultChainedExecutable 
jobFlow, String rootPath) {
-                steps.addSaveIIToHTableSteps(jobFlow, rootPath);
-            }
-
-            @Override
-            public void addStepPhase4_Cleanup(DefaultChainedExecutable 
jobFlow) {
-                steps.addInvertedIndexGarbageCollectionSteps(jobFlow);
-            }
-        };
-    }
-
-    @Override
     public IMRBatchMergeOutputSide getBatchMergeOutputSide(final CubeSegment 
seg) {
         return new IMRBatchMergeOutputSide() {
             HBaseMRSteps steps = new HBaseMRSteps(seg);

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
index dcff5e9..9102fbc 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/HBaseMRSteps.java
@@ -32,9 +32,6 @@ import org.apache.kylin.job.constant.ExecutableConstants;
 import org.apache.kylin.job.execution.DefaultChainedExecutable;
 import org.apache.kylin.metadata.realization.IRealizationSegment;
 import org.apache.kylin.storage.hbase.HBaseConnection;
-import org.apache.kylin.storage.hbase.ii.IIBulkLoadJob;
-import org.apache.kylin.storage.hbase.ii.IICreateHFileJob;
-import org.apache.kylin.storage.hbase.ii.IICreateHTableJob;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -201,79 +198,4 @@ public class HBaseMRSteps extends JobBuilderSupport {
         jobFlow.addTask(step);
     }
 
-    public void addSaveIIToHTableSteps(DefaultChainedExecutable jobFlow, 
String rootPath) {
-        // create htable if it doesn't exist
-        jobFlow.addTask(createCreateIIHTableStep(seg));
-
-        final String iiPath = rootPath + "*";
-
-        // generate hfiles step
-        jobFlow.addTask(createConvertIIToHfileStep(seg, iiPath, 
jobFlow.getId()));
-
-        // bulk load step
-        jobFlow.addTask(createIIBulkLoadStep(seg, jobFlow.getId()));
-
-    }
-
-    public void 
addInvertedIndexGarbageCollectionSteps(DefaultChainedExecutable jobFlow) {
-        String jobId = jobFlow.getId();
-
-        List<String> toDeletePaths = new ArrayList<>();
-        toDeletePaths.add(getJobWorkingDir(jobId));
-
-        HDFSPathGarbageCollectionStep step = new 
HDFSPathGarbageCollectionStep();
-        step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION);
-        step.setDeletePaths(toDeletePaths);
-        step.setJobId(jobId);
-
-        jobFlow.addTask(step);
-    }
-
-    private HadoopShellExecutable createCreateIIHTableStep(IRealizationSegment 
seg) {
-        HadoopShellExecutable createHtableStep = new HadoopShellExecutable();
-        
createHtableStep.setName(ExecutableConstants.STEP_NAME_CREATE_HBASE_TABLE);
-        StringBuilder cmd = new StringBuilder();
-        appendExecCmdParameters(cmd, BatchConstants.ARG_II_NAME, 
seg.getRealization().getName());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_HTABLE_NAME, 
seg.getStorageLocationIdentifier());
-
-        createHtableStep.setJobParams(cmd.toString());
-        createHtableStep.setJobClass(IICreateHTableJob.class);
-
-        return createHtableStep;
-    }
-
-    private MapReduceExecutable createConvertIIToHfileStep(IRealizationSegment 
seg, String inputPath, String jobId) {
-        MapReduceExecutable createHFilesStep = new MapReduceExecutable();
-        
createHFilesStep.setName(ExecutableConstants.STEP_NAME_CONVERT_II_TO_HFILE);
-        StringBuilder cmd = new StringBuilder();
-
-        appendMapReduceParameters(cmd);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_II_NAME, 
seg.getRealization().getName());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, inputPath);
-        appendExecCmdParameters(cmd, BatchConstants.ARG_OUTPUT, 
getHFilePath(jobId));
-        appendExecCmdParameters(cmd, BatchConstants.ARG_HTABLE_NAME, 
seg.getStorageLocationIdentifier());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_JOB_NAME, 
"Kylin_HFile_Generator_" + seg.getRealization().getName() + "_Step");
-
-        createHFilesStep.setMapReduceParams(cmd.toString());
-        createHFilesStep.setMapReduceJobClass(IICreateHFileJob.class);
-
-        return createHFilesStep;
-    }
-
-    private HadoopShellExecutable createIIBulkLoadStep(IRealizationSegment 
seg, String jobId) {
-        HadoopShellExecutable bulkLoadStep = new HadoopShellExecutable();
-        bulkLoadStep.setName(ExecutableConstants.STEP_NAME_BULK_LOAD_HFILE);
-
-        StringBuilder cmd = new StringBuilder();
-        appendExecCmdParameters(cmd, BatchConstants.ARG_INPUT, 
getHFilePath(jobId));
-        appendExecCmdParameters(cmd, BatchConstants.ARG_HTABLE_NAME, 
seg.getStorageLocationIdentifier());
-        appendExecCmdParameters(cmd, BatchConstants.ARG_II_NAME, 
seg.getRealization().getName());
-
-        bulkLoadStep.setJobParams(cmd.toString());
-        bulkLoadStep.setJobClass(IIBulkLoadJob.class);
-
-        return bulkLoadStep;
-
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
index 270da13..63e8ac1 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/DeployCoprocessorCLI.java
@@ -18,17 +18,7 @@
 
 package org.apache.kylin.storage.hbase.util;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Matcher;
-
+import com.google.common.collect.Lists;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -48,17 +38,17 @@ import org.apache.kylin.common.util.Bytes;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
-import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.*;
+import java.util.regex.Matcher;
 
 /**
  */
@@ -184,7 +174,6 @@ public class DeployCoprocessorCLI {
 
     public static void addCoprocessorOnHTable(HTableDescriptor desc, Path 
hdfsCoprocessorJar) throws IOException {
         logger.info("Add coprocessor on " + desc.getNameAsString());
-        desc.addCoprocessor(IIEndpointClass, hdfsCoprocessorJar, 1000, null);
         desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
         desc.addCoprocessor(CubeObserverClass, hdfsCoprocessorJar, 1002, null);
     }
@@ -384,18 +373,6 @@ public class DeployCoprocessorCLI {
             }
         }
 
-        for (IIInstance ii : IIManager.getInstance(config).listAllIIs()) {
-            if (ii.getStatus() == RealizationStatusEnum.READY) {
-                for (IISegment seg : ii.getSegments()) {//streaming segment is 
never "READY"
-                    String tableName = seg.getStorageLocationIdentifier();
-                    if (StringUtils.isBlank(tableName) == false) {
-                        result.add(tableName);
-                        System.out.println("added new table: " + tableName);
-                    }
-                }
-            }
-        }
-
         return result;
     }
 }

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/IIDeployCoprocessorCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/IIDeployCoprocessorCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/IIDeployCoprocessorCLI.java
deleted file mode 100644
index 93a9e67..0000000
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/IIDeployCoprocessorCLI.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.storage.hbase.util;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.engine.mr.HadoopUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * THIS IS A TAILORED DUPLICATE OF 
org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI TO AVOID CYCLIC
- * DEPENDENCY. INVERTED-INDEX CODE NOW SPLITTED ACROSS kylin-invertedindex AND 
kylin-storage-hbase.
- * DEFENITELY NEED FURTHER REFACTOR.
- */
-public class IIDeployCoprocessorCLI {
-
-    private static final Logger logger = 
LoggerFactory.getLogger(IIDeployCoprocessorCLI.class);
-
-    public static final String CubeObserverClass = 
"org.apache.kylin.storage.hbase.cube.v1.coprocessor.observer.AggregateRegionObserver";
-    public static final String CubeEndpointClass = 
"org.apache.kylin.storage.hbase.cube.v2.coprocessor.endpoint.CubeVisitService";
-    public static final String IIEndpointClass = 
"org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.IIEndpoint";
-
-    public static void deployCoprocessor(HTableDescriptor tableDesc) {
-        try {
-            initHTableCoprocessor(tableDesc);
-            logger.info("hbase table " + tableDesc.getName() + " deployed with 
coprocessor.");
-
-        } catch (Exception ex) {
-            logger.error("Error deploying coprocessor on " + 
tableDesc.getName(), ex);
-            logger.error("Will try creating the table without coprocessor.");
-        }
-    }
-
-    private static void initHTableCoprocessor(HTableDescriptor desc) throws 
IOException {
-        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        Configuration hconf = HadoopUtil.getCurrentConfiguration();
-        FileSystem fileSystem = FileSystem.get(hconf);
-
-        String localCoprocessorJar = kylinConfig.getCoprocessorLocalJar();
-        Path hdfsCoprocessorJar = uploadCoprocessorJar(localCoprocessorJar, 
fileSystem, null);
-
-        addCoprocessorOnHTable(desc, hdfsCoprocessorJar);
-    }
-
-    private static void addCoprocessorOnHTable(HTableDescriptor desc, Path 
hdfsCoprocessorJar) throws IOException {
-        logger.info("Add coprocessor on " + desc.getNameAsString());
-        desc.addCoprocessor(IIEndpointClass, hdfsCoprocessorJar, 1000, null);
-        desc.addCoprocessor(CubeEndpointClass, hdfsCoprocessorJar, 1001, null);
-        desc.addCoprocessor(CubeObserverClass, hdfsCoprocessorJar, 1002, null);
-    }
-
-    private static Path uploadCoprocessorJar(String localCoprocessorJar, 
FileSystem fileSystem, Set<String> oldJarPaths) throws IOException {
-        Path uploadPath = null;
-        File localCoprocessorFile = new File(localCoprocessorJar);
-
-        // check existing jars
-        if (oldJarPaths == null) {
-            oldJarPaths = new HashSet<String>();
-        }
-        Path coprocessorDir = getCoprocessorHDFSDir(fileSystem, 
KylinConfig.getInstanceFromEnv());
-        for (FileStatus fileStatus : fileSystem.listStatus(coprocessorDir)) {
-            if (isSame(localCoprocessorFile, fileStatus)) {
-                uploadPath = fileStatus.getPath();
-                break;
-            }
-            String filename = fileStatus.getPath().toString();
-            if (filename.endsWith(".jar")) {
-                oldJarPaths.add(filename);
-            }
-        }
-
-        // upload if not existing
-        if (uploadPath == null) {
-            // figure out a unique new jar file name
-            Set<String> oldJarNames = new HashSet<String>();
-            for (String path : oldJarPaths) {
-                oldJarNames.add(new Path(path).getName());
-            }
-            String baseName = getBaseFileName(localCoprocessorJar);
-            String newName = null;
-            int i = 0;
-            while (newName == null) {
-                newName = baseName + "-" + (i++) + ".jar";
-                if (oldJarNames.contains(newName))
-                    newName = null;
-            }
-
-            // upload
-            uploadPath = new Path(coprocessorDir, newName);
-            FileInputStream in = null;
-            FSDataOutputStream out = null;
-            try {
-                in = new FileInputStream(localCoprocessorFile);
-                out = fileSystem.create(uploadPath);
-                IOUtils.copy(in, out);
-            } finally {
-                IOUtils.closeQuietly(in);
-                IOUtils.closeQuietly(out);
-            }
-
-            fileSystem.setTimes(uploadPath, 
localCoprocessorFile.lastModified(), -1);
-
-        }
-
-        uploadPath = uploadPath.makeQualified(fileSystem.getUri(), null);
-        return uploadPath;
-    }
-
-    private static boolean isSame(File localCoprocessorFile, FileStatus 
fileStatus) {
-        return fileStatus.getLen() == localCoprocessorFile.length() && 
fileStatus.getModificationTime() == localCoprocessorFile.lastModified();
-    }
-
-    private static String getBaseFileName(String localCoprocessorJar) {
-        File localJar = new File(localCoprocessorJar);
-        String baseName = localJar.getName();
-        if (baseName.endsWith(".jar"))
-            baseName = baseName.substring(0, baseName.length() - 
".jar".length());
-        return baseName;
-    }
-
-    private static Path getCoprocessorHDFSDir(FileSystem fileSystem, 
KylinConfig config) throws IOException {
-        String hdfsWorkingDirectory = config.getHdfsWorkingDirectory();
-        Path coprocessorDir = new Path(hdfsWorkingDirectory, "coprocessor");
-        fileSystem.mkdirs(coprocessorDir);
-        return coprocessorDir;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
index af64df7..3f4a6d5 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/StorageCleanupJob.java
@@ -50,9 +50,6 @@ import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
 import org.apache.kylin.engine.mr.common.AbstractHadoopJob;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
 import org.apache.kylin.job.JobInstance;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.execution.ExecutableState;
@@ -107,7 +104,6 @@ public class StorageCleanupJob extends AbstractHadoopJob {
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException 
{
         CubeManager cubeMgr = 
CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
-        IIManager iiManager = 
IIManager.getInstance(KylinConfig.getInstanceFromEnv());
         long TIME_THREADSHOLD = 
KylinConfig.getInstanceFromEnv().getStorageCleanupTimeThreshold();
         // get all kylin hbase tables
         HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
@@ -138,18 +134,6 @@ public class StorageCleanupJob extends AbstractHadoopJob {
             }
         }
 
-        // remove every ii segment htable from drop list
-        for (IIInstance ii : iiManager.listAllIIs()) {
-            for (IISegment seg : ii.getSegments()) {
-                String tablename = seg.getStorageLocationIdentifier();
-
-                if (allTablesNeedToBeDropped.contains(tablename)) {
-                    allTablesNeedToBeDropped.remove(tablename);
-                    logger.info("Exclude table " + tablename + " from drop 
list, as the table belongs to ii " + ii.getName() + " with status " + 
ii.getStatus());
-                }
-            }
-        }
-
         if (delete == true) {
             // drop tables
             ExecutorService executorService = 
Executors.newSingleThreadExecutor();

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
index 26ee055..b1f275f 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/UpdateHTableHostCLI.java
@@ -33,9 +33,6 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.cube.CubeManager;
 import org.apache.kylin.cube.CubeSegment;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.realization.IRealizationConstants;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
@@ -129,18 +126,6 @@ public class UpdateHTableHostCLI {
             }
         }
 
-        for (IIInstance ii : IIManager.getInstance(config).listAllIIs()) {
-            if (ii.getStatus() == RealizationStatusEnum.READY) {
-                for (IISegment seg : ii.getSegments()) {//streaming segment is 
never "READY"
-                    String tableName = seg.getStorageLocationIdentifier();
-                    if (!StringUtils.isBlank(tableName)) {
-                        result.add(tableName);
-                        System.out.println("added new table: " + tableName);
-                    }
-                }
-            }
-        }
-
         return result;
     }
 

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TsConditionEraserTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TsConditionEraserTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TsConditionEraserTest.java
deleted file mode 100644
index 90f3370..0000000
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/TsConditionEraserTest.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.storage.hbase.common;
-
-import java.io.IOException;
-import java.util.Arrays;
-
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.filter.ColumnTupleFilter;
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
-import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.LogicalTupleFilter;
-import org.apache.kylin.metadata.filter.StringCodeSystem;
-import org.apache.kylin.metadata.filter.TsConditionEraser;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilterSerializer;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorFilter;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.common.collect.Lists;
-
-/**
- */
-public class TsConditionEraserTest extends LocalFileMetadataTestCase {
-    IIInstance ii;
-    TableRecordInfo tableRecordInfo;
-    CoprocessorFilter filter;
-    TableDesc factTableDesc;
-
-    TblColRef caldt;
-    TblColRef siteId;
-
-    @Before
-    public void setup() throws IOException {
-        this.createTestMetadata();
-        IIManager iiManager = IIManager.getInstance(getTestConfig());
-        this.ii = iiManager.getII("test_kylin_ii_left_join");
-        IISegment segment = iiManager.buildSegment(ii, 0, 
System.currentTimeMillis());
-        ii.getSegments().add(segment);
-        this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment());
-        this.factTableDesc = 
MetadataManager.getInstance(getTestConfig()).getTableDesc("DEFAULT.TEST_KYLIN_FACT");
-        this.caldt = 
this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "CAL_DT");
-        this.siteId = 
this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", 
"LSTG_SITE_ID");
-    }
-
-    @After
-    public void cleanUp() {
-        cleanupTestMetadata();
-    }
-
-    private TupleFilter mockFilter1(int year) {
-        CompareTupleFilter aFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
-        aFilter.addChild(new ColumnTupleFilter(caldt));
-        aFilter.addChild(new ConstantTupleFilter(year + "-01-01"));
-
-        CompareTupleFilter bFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        bFilter.addChild(new ColumnTupleFilter(caldt));
-        bFilter.addChild(new ConstantTupleFilter(year + "-01-04"));
-
-        CompareTupleFilter cFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        cFilter.addChild(new ColumnTupleFilter(caldt));
-        cFilter.addChild(new ConstantTupleFilter(year + "-01-03"));
-
-        CompareTupleFilter dFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ);
-        dFilter.addChild(new ColumnTupleFilter(siteId));
-        dFilter.addChild(new ConstantTupleFilter("0"));
-
-        LogicalTupleFilter subRoot = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        subRoot.addChildren(Lists.newArrayList(aFilter, bFilter, cFilter, 
dFilter));
-
-        CompareTupleFilter outFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        outFilter.addChild(new ColumnTupleFilter(caldt));
-        outFilter.addChild(new ConstantTupleFilter(year + "-01-02"));
-
-        LogicalTupleFilter root = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        root.addChildren(Lists.newArrayList(subRoot, outFilter));
-        return root;
-    }
-
-    private TupleFilter mockFilter2(int year) {
-        CompareTupleFilter aFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
-        aFilter.addChild(new ColumnTupleFilter(caldt));
-        aFilter.addChild(new ConstantTupleFilter(year + "-01-01"));
-
-        CompareTupleFilter bFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        bFilter.addChild(new ColumnTupleFilter(caldt));
-        bFilter.addChild(new ConstantTupleFilter(year + "-01-04"));
-
-        CompareTupleFilter cFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        cFilter.addChild(new ColumnTupleFilter(caldt));
-        cFilter.addChild(new ConstantTupleFilter(year + "-01-03"));
-
-        CompareTupleFilter dFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ);
-        dFilter.addChild(new ColumnTupleFilter(siteId));
-        dFilter.addChild(new ConstantTupleFilter("0"));
-
-        LogicalTupleFilter subRoot = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.OR);
-        subRoot.addChildren(Lists.newArrayList(aFilter, bFilter, cFilter, 
dFilter));
-
-        CompareTupleFilter outFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        outFilter.addChild(new ColumnTupleFilter(caldt));
-        outFilter.addChild(new ConstantTupleFilter(year + "-01-02"));
-
-        LogicalTupleFilter root = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        root.addChildren(Lists.newArrayList(subRoot, outFilter));
-        return root;
-    }
-
-    @Test
-    public void positiveTest() {
-
-        TupleFilter a = mockFilter1(2000);
-        TupleFilter b = mockFilter1(2001);
-
-        TsConditionEraser decoratorA = new TsConditionEraser(caldt, a);
-        byte[] aBytes = TupleFilterSerializer.serialize(a, decoratorA, 
StringCodeSystem.INSTANCE);
-        TsConditionEraser decoratorB = new TsConditionEraser(caldt, b);
-        byte[] bBytes = TupleFilterSerializer.serialize(b, decoratorB, 
StringCodeSystem.INSTANCE);
-        Assert.assertArrayEquals(aBytes, bBytes);
-
-    }
-
-    @Test
-    public void negativeTest() {
-        TupleFilter a = mockFilter2(2000);
-        TupleFilter b = mockFilter2(2001);
-
-        TsConditionEraser decoratorA = new TsConditionEraser(caldt, a);
-        byte[] aBytes = TupleFilterSerializer.serialize(a, decoratorA, 
StringCodeSystem.INSTANCE);
-        TsConditionEraser decoratorB = new TsConditionEraser(caldt, b);
-        byte[] bBytes = TupleFilterSerializer.serialize(b, decoratorB, 
StringCodeSystem.INSTANCE);
-        Assert.assertFalse(Arrays.equals(aBytes, bBytes));
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/BitMapFilterEvaluatorTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/BitMapFilterEvaluatorTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/BitMapFilterEvaluatorTest.java
deleted file mode 100644
index e2eeddb..0000000
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/BitMapFilterEvaluatorTest.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.storage.hbase.ii.coprocessor.endpoint;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.ArrayList;
-
-import org.apache.kylin.common.util.Dictionary;
-import org.apache.kylin.metadata.filter.ColumnTupleFilter;
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
-import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.LogicalTupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter.FilterOperatorEnum;
-import org.apache.kylin.metadata.model.ColumnDesc;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import 
org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.BitMapFilterEvaluator.BitMapProvider;
-import org.junit.Test;
-
-import com.google.common.collect.Lists;
-
-import it.uniroma3.mat.extendedset.intset.ConciseSet;
-
-public class BitMapFilterEvaluatorTest {
-
-    static TblColRef colA;
-    static TblColRef colB;
-
-    static {
-        TableDesc table = TableDesc.mockup("DEFAULT.TABLE");
-        colA = ColumnDesc.mockup(table, 1, "colA", "string").getRef();
-        colB = ColumnDesc.mockup(table, 1, "colB", "string").getRef();
-    }
-
-    static class MockBitMapProivder implements BitMapProvider {
-
-        private static final int MAX_ID = 8;
-        private static final int REC_COUNT = 10;
-
-        @Override
-        public ConciseSet getBitMap(TblColRef col, Integer startId, Integer 
endId) {
-            if (!col.equals(colA))
-                return null;
-
-            // i-th record has value ID i, and last record has value null
-            if (startId == null && endId == null) {
-                //entry for getting null value
-                ConciseSet s = new ConciseSet();
-                s.add(getRecordCount() - 1);
-                return s;
-            }
-
-            int start = 0;
-            int end = MAX_ID;
-            if (startId != null) {
-                start = startId;
-            }
-            if (endId != null) {
-                end = endId;
-            }
-
-            ConciseSet ret = new ConciseSet();
-            for (int i = start; i <= end; ++i) {
-                ConciseSet temp = getBitMap(col, i);
-                ret.addAll(temp);
-            }
-            return ret;
-        }
-
-        public ConciseSet getBitMap(TblColRef col, int valueId) {
-            if (!col.equals(colA))
-                return null;
-
-            // i-th record has value ID i, and last record has value null
-            ConciseSet bitMap = new ConciseSet();
-            if (valueId < 0 || valueId > getMaxValueId(col)) // null
-                bitMap.add(getRecordCount() - 1);
-            else
-                bitMap.add(valueId);
-
-            return bitMap;
-        }
-
-        @Override
-        public int getRecordCount() {
-            return REC_COUNT;
-        }
-
-        @Override
-        public int getMaxValueId(TblColRef col) {
-            return MAX_ID;
-        }
-    }
-
-    BitMapFilterEvaluator eval = new BitMapFilterEvaluator(new 
MockBitMapProivder());
-    ArrayList<CompareTupleFilter> basicFilters = Lists.newArrayList();
-    ArrayList<ConciseSet> basicResults = Lists.newArrayList();
-
-    public BitMapFilterEvaluatorTest() {
-        basicFilters.add(compare(colA, FilterOperatorEnum.ISNULL));
-        basicResults.add(set(9));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.ISNOTNULL));
-        basicResults.add(set(0, 1, 2, 3, 4, 5, 6, 7, 8));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.EQ, 0));
-        basicResults.add(set(0));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.NEQ, 0));
-        basicResults.add(set(1, 2, 3, 4, 5, 6, 7, 8));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.IN, 0, 5));
-        basicResults.add(set(0, 5));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.NOTIN, 0, 5));
-        basicResults.add(set(1, 2, 3, 4, 6, 7, 8));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.LT, 3));
-        basicResults.add(set(0, 1, 2));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.LTE, 3));
-        basicResults.add(set(0, 1, 2, 3));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.GT, 3));
-        basicResults.add(set(4, 5, 6, 7, 8));
-
-        basicFilters.add(compare(colA, FilterOperatorEnum.GTE, 3));
-        basicResults.add(set(3, 4, 5, 6, 7, 8));
-    }
-
-    @Test
-    public void testBasics() {
-        for (int i = 0; i < basicFilters.size(); i++) {
-            assertEquals(basicResults.get(i), 
eval.evaluate(basicFilters.get(i)));
-        }
-    }
-
-    @Test
-    public void testLogicalAnd() {
-        for (int i = 0; i < basicFilters.size(); i++) {
-            for (int j = 0; j < basicFilters.size(); j++) {
-                LogicalTupleFilter f = logical(FilterOperatorEnum.AND, 
basicFilters.get(i), basicFilters.get(j));
-                ConciseSet r = basicResults.get(i).clone();
-                r.retainAll(basicResults.get(j));
-                assertEquals(r, eval.evaluate(f));
-            }
-        }
-    }
-
-    @Test
-    public void testLogicalOr() {
-        for (int i = 0; i < basicFilters.size(); i++) {
-            for (int j = 0; j < basicFilters.size(); j++) {
-                LogicalTupleFilter f = logical(FilterOperatorEnum.OR, 
basicFilters.get(i), basicFilters.get(j));
-                ConciseSet r = basicResults.get(i).clone();
-                r.addAll(basicResults.get(j));
-                assertEquals(r, eval.evaluate(f));
-            }
-        }
-    }
-
-    @Test
-    public void testNotEvaluable() {
-        CompareTupleFilter notEvaluable = compare(colB, FilterOperatorEnum.EQ, 
0);
-        assertEquals(null, eval.evaluate(notEvaluable));
-
-        LogicalTupleFilter or = logical(FilterOperatorEnum.OR, 
basicFilters.get(1), notEvaluable);
-        assertEquals(null, eval.evaluate(or));
-
-        LogicalTupleFilter and = logical(FilterOperatorEnum.AND, 
basicFilters.get(1), notEvaluable);
-        assertEquals(basicResults.get(1), eval.evaluate(and));
-    }
-
-    public static CompareTupleFilter compare(TblColRef col, 
TupleFilter.FilterOperatorEnum op, int... ids) {
-        CompareTupleFilter filter = new CompareTupleFilter(op);
-        filter.addChild(columnFilter(col));
-        for (int i : ids) {
-            filter.addChild(constFilter(i));
-        }
-        return filter;
-    }
-
-    public static LogicalTupleFilter logical(TupleFilter.FilterOperatorEnum 
op, TupleFilter... filters) {
-        LogicalTupleFilter filter = new LogicalTupleFilter(op);
-        for (TupleFilter f : filters)
-            filter.addChild(f);
-        return filter;
-    }
-
-    public static ColumnTupleFilter columnFilter(TblColRef col) {
-        return new ColumnTupleFilter(col);
-    }
-
-    public static ConstantTupleFilter constFilter(int id) {
-        return new ConstantTupleFilter(idToStr(id));
-    }
-
-    public static ConciseSet set(int... ints) {
-        ConciseSet set = new ConciseSet();
-        for (int i : ints)
-            set.add(i);
-        return set;
-    }
-
-    public static String idToStr(int id) {
-        byte[] bytes = new byte[] { (byte) id };
-        return Dictionary.dictIdToString(bytes, 0, bytes.length);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
deleted file mode 100644
index ac9e995..0000000
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/EndpointAggregationTest.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.storage.hbase.ii.coprocessor.endpoint;
-
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.invertedindex.measure.FixedLenMeasureCodec;
-import org.apache.kylin.measure.MeasureAggregator;
-import org.apache.kylin.metadata.datatype.LongMutable;
-import org.apache.kylin.metadata.model.FunctionDesc;
-import org.apache.kylin.metadata.model.ParameterDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.storage.hbase.common.coprocessor.AggrKey;
-import org.apache.kylin.storage.hbase.common.coprocessor.CoprocessorProjector;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.common.collect.Lists;
-
-/**
- *
- * ii test
- */
-public class EndpointAggregationTest extends LocalFileMetadataTestCase {
-
-    @Before
-    public void setup() throws IOException {
-        this.createTestMetadata();
-    }
-
-    @After
-    public void cleanUp() {
-        cleanupTestMetadata();
-    }
-
-    private List<FunctionDesc> buildAggregations() {
-        List<FunctionDesc> functions = new ArrayList<FunctionDesc>();
-
-        FunctionDesc f1 = new FunctionDesc();
-        f1.setExpression("SUM");
-        ParameterDesc p1 = new ParameterDesc();
-        p1.setType("column");
-        p1.setValue("PRICE");
-        f1.setParameter(p1);
-        f1.setReturnType("decimal");
-        functions.add(f1);
-
-        FunctionDesc f2 = new FunctionDesc();
-        f2.setExpression("MIN");
-        ParameterDesc p2 = new ParameterDesc();
-        p2.setType("column");
-        p2.setValue("PRICE");
-        f2.setParameter(p2);
-        f2.setReturnType("decimal");
-        functions.add(f2);
-
-        return functions;
-    }
-
-    @Test
-    public void testSerializeAggregator() {
-        final IIInstance ii = 
IIManager.getInstance(getTestConfig()).getII("test_kylin_ii_left_join");
-        if (ii.getFirstSegment() == null) {
-            IISegment segment = 
IIManager.getInstance(getTestConfig()).buildSegment(ii, 0, 
System.currentTimeMillis());
-            ii.getSegments().add(segment);
-        }
-        final TableRecordInfo tableRecordInfo = new 
TableRecordInfo(ii.getFirstSegment());
-        final EndpointAggregators endpointAggregators = 
EndpointAggregators.fromFunctions(tableRecordInfo, buildAggregations());
-        byte[] x = EndpointAggregators.serialize(endpointAggregators);
-        final EndpointAggregators result = EndpointAggregators.deserialize(x);
-        assertArrayEquals(endpointAggregators.dataTypes, result.dataTypes);
-        assertArrayEquals(endpointAggregators.funcNames, result.funcNames);
-        assertArrayEquals(endpointAggregators.metricValues, 
result.metricValues);
-        assertEquals(endpointAggregators.rawTableRecord.getBytes().length, 
result.rawTableRecord.getBytes().length);
-    }
-
-    private byte[] randomBytes(final int length) {
-        byte[] result = new byte[length];
-        Random random = new Random();
-        for (int i = 0; i < length; i++) {
-            random.nextBytes(result);
-        }
-        return result;
-    }
-
-    private List<byte[]> mockData(TableRecordInfo tableRecordInfo) {
-        ArrayList<byte[]> result = Lists.newArrayList();
-        final int priceColumnIndex = 23;
-        final int groupByColumnIndex = 0;
-        TblColRef column = 
tableRecordInfo.getDescriptor().listAllColumns().get(priceColumnIndex);
-        FixedLenMeasureCodec codec = 
FixedLenMeasureCodec.get(column.getType());
-
-        byte[] data = 
randomBytes(tableRecordInfo.getDigest().getByteFormLen());
-        byte[] groupOne = 
randomBytes(tableRecordInfo.getDigest().length(groupByColumnIndex));
-        codec.write(codec.valueOf("199.99"), data, 
tableRecordInfo.getDigest().offset(priceColumnIndex));
-        System.arraycopy(groupOne, 0, data, 
tableRecordInfo.getDigest().offset(groupByColumnIndex), groupOne.length);
-        result.add(data);
-
-        data = randomBytes(tableRecordInfo.getDigest().getByteFormLen());
-        codec.write(codec.valueOf("2.09"), data, 
tableRecordInfo.getDigest().offset(priceColumnIndex));
-        System.arraycopy(groupOne, 0, data, 
tableRecordInfo.getDigest().offset(groupByColumnIndex), groupOne.length);
-        result.add(data);
-
-        byte[] groupTwo = 
randomBytes(tableRecordInfo.getDigest().length(groupByColumnIndex));
-        data = randomBytes(tableRecordInfo.getDigest().getByteFormLen());
-        System.arraycopy(groupTwo, 0, data, 
tableRecordInfo.getDigest().offset(groupByColumnIndex), groupTwo.length);
-        codec.write(codec.valueOf("100"), data, 
tableRecordInfo.getDigest().offset(priceColumnIndex));
-        result.add(data);
-
-        return result;
-    }
-
-    @Test
-    public void basicTest() {
-        final IIInstance ii = 
IIManager.getInstance(getTestConfig()).getII("test_kylin_ii_left_join");
-        if (ii.getFirstSegment() == null) {
-            IISegment segment = 
IIManager.getInstance(getTestConfig()).buildSegment(ii, 0, 
System.currentTimeMillis());
-            ii.getSegments().add(segment);
-        }
-        final TableRecordInfo tableRecordInfo = new 
TableRecordInfo(ii.getFirstSegment());
-        final EndpointAggregators aggregators = 
EndpointAggregators.fromFunctions(tableRecordInfo, buildAggregations());
-        final EndpointAggregationCache aggCache = new 
EndpointAggregationCache(aggregators);
-        final Collection<TblColRef> dims = new HashSet<>();
-        final TblColRef groupByColumn = 
ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "LSTG_FORMAT_NAME");
-        dims.add(groupByColumn);
-        CoprocessorProjector projector = 
CoprocessorProjector.makeForEndpoint(tableRecordInfo, dims);
-        List<byte[]> rawData = mockData(tableRecordInfo);
-        for (int i = 0; i < rawData.size(); ++i) {
-            byte[] data = rawData.get(i);
-            AggrKey aggKey = projector.getAggrKey(data);
-            MeasureAggregator[] bufs = aggCache.getBuffer(aggKey);
-            aggregators.aggregate(bufs, data);
-            aggCache.checkMemoryUsage();
-        }
-        long sumTotal = 0;
-        long minTotal = 0;
-        for (Map.Entry<AggrKey, MeasureAggregator[]> entry : 
aggCache.getAllEntries()) {
-            sumTotal += ((LongMutable) entry.getValue()[0].getState()).get();
-            minTotal += ((LongMutable) entry.getValue()[1].getState()).get();
-
-        }
-        assertEquals(3020800, sumTotal);
-        assertEquals(1020900, minTotal);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TableRecordInfoTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TableRecordInfoTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TableRecordInfoTest.java
deleted file mode 100644
index 3e34495..0000000
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TableRecordInfoTest.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.storage.hbase.ii.coprocessor.endpoint;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.invertedindex.index.TableRecordInfoDigest;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- */
-public class TableRecordInfoTest extends LocalFileMetadataTestCase {
-    IIInstance ii;
-    TableRecordInfo tableRecordInfo;
-
-    @Before
-    public void setup() throws IOException {
-        this.createTestMetadata();
-        this.ii = 
IIManager.getInstance(getTestConfig()).getII("test_kylin_ii_left_join");
-        if (ii.getFirstSegment() == null) {
-            IISegment segment = 
IIManager.getInstance(getTestConfig()).buildSegment(ii, 0, 
System.currentTimeMillis());
-            ii.getSegments().add(segment);
-        }
-        this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment());
-    }
-
-    @Test
-    public void testSerialize() {
-        byte[] x = 
TableRecordInfoDigest.serialize(this.tableRecordInfo.getDigest());
-        TableRecordInfoDigest d = TableRecordInfoDigest.deserialize(x);
-        assertEquals(d.getColumnCount(), 25);
-    }
-
-    @After
-    public void cleanUp() {
-        cleanupTestMetadata();
-    }
-}

http://git-wip-us.apache.org/repos/asf/kylin/blob/2cc0b9c4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TsConditionExtractorTest.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TsConditionExtractorTest.java
 
b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TsConditionExtractorTest.java
deleted file mode 100644
index 4e5a9d9..0000000
--- 
a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/coprocessor/endpoint/TsConditionExtractorTest.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-package org.apache.kylin.storage.hbase.ii.coprocessor.endpoint;
-
-import java.io.IOException;
-
-import org.apache.kylin.common.util.LocalFileMetadataTestCase;
-import org.apache.kylin.invertedindex.IIInstance;
-import org.apache.kylin.invertedindex.IIManager;
-import org.apache.kylin.invertedindex.IISegment;
-import org.apache.kylin.invertedindex.index.TableRecordInfo;
-import org.apache.kylin.metadata.MetadataManager;
-import org.apache.kylin.metadata.filter.ColumnTupleFilter;
-import org.apache.kylin.metadata.filter.CompareTupleFilter;
-import org.apache.kylin.metadata.filter.ConstantTupleFilter;
-import org.apache.kylin.metadata.filter.LogicalTupleFilter;
-import org.apache.kylin.metadata.filter.TupleFilter;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.storage.cache.TsConditionExtractor;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.google.common.collect.BoundType;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Range;
-
-/**
- *
- * ii test
- */
-public class TsConditionExtractorTest extends LocalFileMetadataTestCase {
-    IIInstance ii;
-    TableRecordInfo tableRecordInfo;
-    TableDesc factTableDesc;
-
-    TblColRef calDt;
-    TblColRef siteId;
-
-    @Before
-    public void setup() throws IOException {
-        this.createTestMetadata();
-        this.ii = 
IIManager.getInstance(getTestConfig()).getII("test_kylin_ii_left_join");
-        if (ii.getFirstSegment() == null) {
-            IISegment segment = 
IIManager.getInstance(getTestConfig()).buildSegment(ii, 0, 
System.currentTimeMillis());
-            ii.getSegments().add(segment);
-        }
-        this.tableRecordInfo = new TableRecordInfo(ii.getFirstSegment());
-        this.factTableDesc = 
MetadataManager.getInstance(getTestConfig()).getTableDesc("DEFAULT.TEST_KYLIN_FACT");
-        this.calDt = 
this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", "CAL_DT");
-        this.siteId = 
this.ii.getDescriptor().findColumnRef("DEFAULT.TEST_KYLIN_FACT", 
"LSTG_SITE_ID");
-    }
-
-    @After
-    public void cleanUp() {
-        cleanupTestMetadata();
-    }
-
-    @Test
-    public void testSimpleFilter() {
-        CompareTupleFilter aFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
-        aFilter.addChild(new ColumnTupleFilter(calDt));
-        aFilter.addChild(new ConstantTupleFilter("2000-01-01"));
-
-        Range<Long> range = 
TsConditionExtractor.extractTsCondition(ii.getAllColumns().get(tableRecordInfo.getTimestampColumn()),
 aFilter);
-        Assert.assertEquals(946684800000L, range.lowerEndpoint().longValue());
-        Assert.assertEquals(BoundType.OPEN, range.lowerBoundType());
-        Assert.assertTrue(!range.hasUpperBound());
-    }
-
-    @Test
-    public void testComplexFilter() {
-        CompareTupleFilter aFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
-        aFilter.addChild(new ColumnTupleFilter(calDt));
-        aFilter.addChild(new ConstantTupleFilter("2000-01-01"));
-
-        CompareTupleFilter bFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        bFilter.addChild(new ColumnTupleFilter(calDt));
-        bFilter.addChild(new ConstantTupleFilter("2000-01-03"));
-
-        CompareTupleFilter cFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        cFilter.addChild(new ColumnTupleFilter(calDt));
-        cFilter.addChild(new ConstantTupleFilter("2000-01-02"));
-
-        CompareTupleFilter dFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ);
-        dFilter.addChild(new ColumnTupleFilter(siteId));
-        dFilter.addChild(new ConstantTupleFilter("0"));
-
-        LogicalTupleFilter rootFilter = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        rootFilter.addChildren(Lists.newArrayList(aFilter, bFilter, cFilter, 
dFilter));
-
-        Range<Long> range = 
TsConditionExtractor.extractTsCondition(ii.getAllColumns().get(tableRecordInfo.getTimestampColumn()),
 rootFilter);
-
-        Assert.assertEquals(946684800000L, range.lowerEndpoint().longValue());
-        Assert.assertEquals(946771200000L, range.upperEndpoint().longValue());
-        Assert.assertEquals(BoundType.OPEN, range.lowerBoundType());
-        Assert.assertEquals(BoundType.CLOSED, range.upperBoundType());
-    }
-
-    @Test
-    public void testMoreComplexFilter() {
-        CompareTupleFilter aFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
-        aFilter.addChild(new ColumnTupleFilter(calDt));
-        aFilter.addChild(new ConstantTupleFilter("2000-01-01"));
-
-        CompareTupleFilter bFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        bFilter.addChild(new ColumnTupleFilter(calDt));
-        bFilter.addChild(new ConstantTupleFilter("2000-01-04"));
-
-        CompareTupleFilter cFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        cFilter.addChild(new ColumnTupleFilter(calDt));
-        cFilter.addChild(new ConstantTupleFilter("2000-01-03"));
-
-        CompareTupleFilter dFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ);
-        dFilter.addChild(new ColumnTupleFilter(siteId));
-        dFilter.addChild(new ConstantTupleFilter("0"));
-
-        LogicalTupleFilter subRoot = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        subRoot.addChildren(Lists.newArrayList(aFilter, bFilter, cFilter, 
dFilter));
-
-        CompareTupleFilter outFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        outFilter.addChild(new ColumnTupleFilter(calDt));
-        outFilter.addChild(new ConstantTupleFilter("2000-01-02"));
-
-        LogicalTupleFilter root = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        root.addChildren(Lists.newArrayList(subRoot, outFilter));
-
-        Range<Long> range = 
TsConditionExtractor.extractTsCondition(ii.getAllColumns().get(tableRecordInfo.getTimestampColumn()),
 root);
-
-        Assert.assertEquals(946684800000L, range.lowerEndpoint().longValue());
-        Assert.assertEquals(946771200000L, range.upperEndpoint().longValue());
-        Assert.assertEquals(BoundType.OPEN, range.lowerBoundType());
-        Assert.assertEquals(BoundType.CLOSED, range.upperBoundType());
-    }
-
-    @Test
-    public void testComplexConflictFilter() {
-        CompareTupleFilter aFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
-        aFilter.addChild(new ColumnTupleFilter(calDt));
-        aFilter.addChild(new ConstantTupleFilter("2000-01-01"));
-
-        CompareTupleFilter bFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        bFilter.addChild(new ColumnTupleFilter(calDt));
-        bFilter.addChild(new ConstantTupleFilter("1999-01-03"));
-
-        CompareTupleFilter cFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        cFilter.addChild(new ColumnTupleFilter(calDt));
-        cFilter.addChild(new ConstantTupleFilter("2000-01-02"));
-
-        CompareTupleFilter dFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ);
-        dFilter.addChild(new ColumnTupleFilter(siteId));
-        dFilter.addChild(new ConstantTupleFilter("0"));
-
-        LogicalTupleFilter rootFilter = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        rootFilter.addChildren(Lists.newArrayList(aFilter, bFilter, cFilter, 
dFilter));
-
-        Range<Long> range = 
TsConditionExtractor.extractTsCondition(ii.getAllColumns().get(tableRecordInfo.getTimestampColumn()),
 rootFilter);
-
-        Assert.assertTrue(range == null);
-
-    }
-
-    @Test
-    public void testMoreComplexConflictFilter() {
-        CompareTupleFilter aFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
-        aFilter.addChild(new ColumnTupleFilter(calDt));
-        aFilter.addChild(new ConstantTupleFilter("2000-01-01"));
-
-        CompareTupleFilter bFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        bFilter.addChild(new ColumnTupleFilter(calDt));
-        bFilter.addChild(new ConstantTupleFilter("2000-01-04"));
-
-        CompareTupleFilter cFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        cFilter.addChild(new ColumnTupleFilter(calDt));
-        cFilter.addChild(new ConstantTupleFilter("2000-01-03"));
-
-        CompareTupleFilter dFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ);
-        dFilter.addChild(new ColumnTupleFilter(siteId));
-        dFilter.addChild(new ConstantTupleFilter("0"));
-
-        LogicalTupleFilter subRoot = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        subRoot.addChildren(Lists.newArrayList(aFilter, bFilter, cFilter, 
dFilter));
-
-        CompareTupleFilter outFilter = new 
CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE);
-        outFilter.addChild(new ColumnTupleFilter(calDt));
-        outFilter.addChild(new ConstantTupleFilter("1999-01-02"));
-
-        LogicalTupleFilter root = new 
LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
-        root.addChildren(Lists.newArrayList(subRoot, outFilter));
-
-        Range<Long> range = 
TsConditionExtractor.extractTsCondition(ii.getAllColumns().get(tableRecordInfo.getTimestampColumn()),
 root);
-
-        Assert.assertTrue(range == null);
-
-    }
-}

Reply via email to