catch up with cleanup jobs

Project: http://git-wip-us.apache.org/repos/asf/kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/03f38579
Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/03f38579
Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/03f38579

Branch: refs/heads/KYLIN-1971
Commit: 03f385795fd11daaef637cd9be837158cebc2c9c
Parents: b80762c
Author: Hongbin Ma <mahong...@apache.org>
Authored: Thu Nov 3 14:11:34 2016 +0800
Committer: Hongbin Ma <mahong...@apache.org>
Committed: Thu Nov 3 14:11:44 2016 +0800

----------------------------------------------------------------------
 .../apache/kylin/tool/ExtendCubeToHybridCLI.java   |  2 +-
 .../org/apache/kylin/tool/MetadataCleanupJob.java  |  2 +-
 .../org/apache/kylin/tool/StorageCleanupJob.java   | 17 +++++------------
 3 files changed, 7 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kylin/blob/03f38579/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
----------------------------------------------------------------------
diff --git 
a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java 
b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
index 27fa973..dbf367f 100644
--- a/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
+++ b/tool/src/main/java/org/apache/kylin/tool/ExtendCubeToHybridCLI.java
@@ -170,7 +170,7 @@ public class ExtendCubeToHybridCLI {
         CubeDesc newCubeDesc = CubeDesc.getCopyOf(cubeDesc);
         newCubeDesc.setName(newCubeDescName);
         newCubeDesc.updateRandomUuid();
-        newCubeDesc.init(kylinConfig, metadataManager.getAllTablesMap());
+        newCubeDesc.init(kylinConfig);
         newCubeDesc.setPartitionDateEnd(partitionDate);
         newCubeDesc.calculateSignature();
         cubeDescManager.createCubeDesc(newCubeDesc);

http://git-wip-us.apache.org/repos/asf/kylin/blob/03f38579/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java 
b/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
index 94962ff..7040dbb 100644
--- a/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
+++ b/tool/src/main/java/org/apache/kylin/tool/MetadataCleanupJob.java
@@ -54,7 +54,7 @@ public class MetadataCleanupJob extends AbstractHadoopJob {
 
     private KylinConfig config = null;
 
-    public static final long TIME_THREADSHOLD = 2 * 24 * 3600 * 1000L; // 2 
days
+    public static final long TIME_THREADSHOLD = 1 * 3600 * 1000L; // 1 hour
     public static final long TIME_THREADSHOLD_FOR_JOB = 30 * 24 * 3600 * 
1000L; // 30 days
 
     /*

http://git-wip-us.apache.org/repos/asf/kylin/blob/03f38579/tool/src/main/java/org/apache/kylin/tool/StorageCleanupJob.java
----------------------------------------------------------------------
diff --git a/tool/src/main/java/org/apache/kylin/tool/StorageCleanupJob.java 
b/tool/src/main/java/org/apache/kylin/tool/StorageCleanupJob.java
index 2a2d1f3..3f82e94 100644
--- a/tool/src/main/java/org/apache/kylin/tool/StorageCleanupJob.java
+++ b/tool/src/main/java/org/apache/kylin/tool/StorageCleanupJob.java
@@ -35,7 +35,6 @@ import javax.annotation.Nullable;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -71,10 +70,10 @@ public class StorageCleanupJob extends AbstractApplication {
 
     @SuppressWarnings("static-access")
     protected static final Option OPTION_DELETE = 
OptionBuilder.withArgName("delete").hasArg().isRequired(false).withDescription("Delete
 the unused storage").create("delete");
-    protected static final Option OPTION_FORCE = 
OptionBuilder.withArgName("force").hasArg().isRequired(false).withDescription("Warning:
 will delete any intermediate hive tables").create("force");
+    protected static final Option OPTION_FORCE = 
OptionBuilder.withArgName("force").hasArg().isRequired(false).withDescription("Warning:
 will delete all kylin intermediate hive tables").create("force");
 
     protected static final Logger logger = 
LoggerFactory.getLogger(StorageCleanupJob.class);
-    public static final int TIME_THRESHOLD_DELETE_HTABLE = 10; // Unit minute
+    public static final int deleteTimeout = 10; // Unit minute
 
     protected boolean delete = false;
     protected boolean force = false;
@@ -82,7 +81,6 @@ public class StorageCleanupJob extends AbstractApplication {
 
     private void cleanUnusedHBaseTables(Configuration conf) throws IOException 
{
         CubeManager cubeMgr = 
CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
-        long TIME_THREADSHOLD = 
KylinConfig.getInstanceFromEnv().getStorageCleanupTimeThreshold();
         // get all kylin hbase tables
         HBaseAdmin hbaseAdmin = new HBaseAdmin(conf);
         String tableNamePrefix = 
IRealizationConstants.SharedHbaseStorageLocationPrefix;
@@ -90,14 +88,9 @@ public class StorageCleanupJob extends AbstractApplication {
         List<String> allTablesNeedToBeDropped = new ArrayList<String>();
         for (HTableDescriptor desc : tableDescriptors) {
             String host = desc.getValue(IRealizationConstants.HTableTag);
-            String creationTime = 
desc.getValue(IRealizationConstants.HTableCreationTime);
             if 
(KylinConfig.getInstanceFromEnv().getMetadataUrlPrefix().equalsIgnoreCase(host))
 {
                 //only take care htables that belongs to self, and created 
more than 2 days
-                if (StringUtils.isEmpty(creationTime) || 
(System.currentTimeMillis() - Long.valueOf(creationTime) > TIME_THREADSHOLD)) {
-                    
allTablesNeedToBeDropped.add(desc.getTableName().getNameAsString());
-                } else {
-                    logger.info("Exclude table " + 
desc.getTableName().getNameAsString() + " from drop list, as it is newly 
created");
-                }
+                
allTablesNeedToBeDropped.add(desc.getTableName().getNameAsString());
             }
         }
 
@@ -119,9 +112,9 @@ public class StorageCleanupJob extends AbstractApplication {
                 FutureTask futureTask = new FutureTask(new 
DeleteHTableRunnable(hbaseAdmin, htableName));
                 executorService.execute(futureTask);
                 try {
-                    futureTask.get(TIME_THRESHOLD_DELETE_HTABLE, 
TimeUnit.MINUTES);
+                    futureTask.get(deleteTimeout, TimeUnit.MINUTES);
                 } catch (TimeoutException e) {
-                    logger.warn("It fails to delete htable " + htableName + ", 
for it cost more than " + TIME_THRESHOLD_DELETE_HTABLE + " minutes!");
+                    logger.warn("It fails to delete htable " + htableName + ", 
for it cost more than " + deleteTimeout + " minutes!");
                     futureTask.cancel(true);
                 } catch (Exception e) {
                     e.printStackTrace();

Reply via email to