KYLIN-1054 Support Hive client Beeline
Project: http://git-wip-us.apache.org/repos/asf/kylin/repo Commit: http://git-wip-us.apache.org/repos/asf/kylin/commit/f4bd22a7 Tree: http://git-wip-us.apache.org/repos/asf/kylin/tree/f4bd22a7 Diff: http://git-wip-us.apache.org/repos/asf/kylin/diff/f4bd22a7 Branch: refs/heads/1.x-HBase1.1.3 Commit: f4bd22a7acde3f2bffc8ca651a986d9226268938 Parents: cb9782d Author: lidongsjtu <[email protected]> Authored: Wed Feb 24 15:02:48 2016 +0800 Committer: lidongsjtu <[email protected]> Committed: Thu Feb 25 13:53:21 2016 +0800 ---------------------------------------------------------------------- bin/find-hive-dependency.sh | 13 ++- .../org/apache/kylin/common/KylinConfig.java | 8 ++ conf/kylin.properties | 4 + .../test_case_data/sandbox/kylin.properties | 4 + .../apache/kylin/job/AbstractJobBuilder.java | 22 ++-- .../apache/kylin/job/common/HiveCmdBuilder.java | 100 +++++++++++++++++++ .../kylin/job/cube/GarbageCollectionStep.java | 11 +- .../job/hadoop/cube/StorageCleanupJob.java | 22 ++-- .../hadoop/invertedindex/IIFlattenHiveJob.java | 19 ++-- .../kylin/job/BuildCubeWithEngineTest.java | 16 ++- .../apache/kylin/job/HiveCmdBuilderTest.java | 80 +++++++++++++++ 11 files changed, 258 insertions(+), 41 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/bin/find-hive-dependency.sh ---------------------------------------------------------------------- diff --git a/bin/find-hive-dependency.sh b/bin/find-hive-dependency.sh index 5994dda..7e434ba 100755 --- a/bin/find-hive-dependency.sh +++ b/bin/find-hive-dependency.sh @@ -17,7 +17,17 @@ # limitations under the License. # -hive_env=`hive -e set | grep 'env:CLASSPATH'` +client_mode=`sh ${KYLIN_HOME}/bin/get-properties.sh kylin.hive.client` +hive_env= + +if [ "${client_mode}" == "cli" ] +then + hive_env=`hive -e set | grep 'env:CLASSPATH'` +elif [ "${client_mode}" == "beeline" ] +then + beeline_params=`sh ${KYLIN_HOME}/bin/get-properties.sh kylin.hive.beeline.params` + hive_env=`beeline ${beeline_params} --outputformat=dsv -e set | grep 'env:CLASSPATH'` +fi hive_classpath=`echo $hive_env | grep 'env:CLASSPATH' | awk -F '=' '{print $2}'` arr=(`echo $hive_classpath | cut -d ":" --output-delimiter=" " -f 1-`) @@ -63,7 +73,6 @@ then exit 1 fi - hive_lib=`find -L "$(dirname $hive_exec_path)" -name '*.jar' ! -name '*calcite*' -printf '%p:' | sed 's/:$//'` hive_dependency=${hive_conf_path}:${hive_lib}:${hcatalog} echo "hive dependency: $hive_dependency" http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/common/src/main/java/org/apache/kylin/common/KylinConfig.java ---------------------------------------------------------------------- diff --git a/common/src/main/java/org/apache/kylin/common/KylinConfig.java b/common/src/main/java/org/apache/kylin/common/KylinConfig.java index 59cb86b..7203065 100644 --- a/common/src/main/java/org/apache/kylin/common/KylinConfig.java +++ b/common/src/main/java/org/apache/kylin/common/KylinConfig.java @@ -289,6 +289,14 @@ public class KylinConfig { return getOptional("kylin.route.hive.password", ""); } + public String getHiveClientMode() { + return getOptional("kylin.hive.client", "cli"); + } + + public String getHiveBeelineParams() { + return getOptional("kylin.hive.beeline.params", ""); + } + public String getStorageUrl() { return storageUrl; } http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/conf/kylin.properties ---------------------------------------------------------------------- diff --git a/conf/kylin.properties b/conf/kylin.properties index e0727ed..07af97b 100644 --- a/conf/kylin.properties +++ b/conf/kylin.properties @@ -70,6 +70,10 @@ kylin.job.status.with.kerberos=false # Hive database name for putting the intermediate flat tables kylin.job.hive.database.for.intermediatetable=default +# Hive client modes: cli or beeline, cli by default +kylin.hive.client= +kylin.hive.beeline.params= + #default compression codec for htable,snappy,lzo,gzip,lz4 kylin.hbase.default.compression.codec=snappy http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/examples/test_case_data/sandbox/kylin.properties ---------------------------------------------------------------------- diff --git a/examples/test_case_data/sandbox/kylin.properties b/examples/test_case_data/sandbox/kylin.properties index 3a7db5e..1901a64 100644 --- a/examples/test_case_data/sandbox/kylin.properties +++ b/examples/test_case_data/sandbox/kylin.properties @@ -59,6 +59,10 @@ kylin.job.yarn.app.rest.check.interval.seconds=10 # Hive database name for putting the intermediate flat tables kylin.job.hive.database.for.intermediatetable=default +# Hive client modes: cli or beeline, cli by default +kylin.hive.client= +kylin.hive.beeline.params=-u 'jdbc:hive2://localhost:10000' + #default compression codec for htable,snappy,lzo,gzip,lz4 kylin.hbase.default.compression.codec=gzip http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java ---------------------------------------------------------------------- diff --git a/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java b/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java index 87c4705..1c9d019 100644 --- a/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java +++ b/job/src/main/java/org/apache/kylin/job/AbstractJobBuilder.java @@ -20,9 +20,7 @@ package org.apache.kylin.job; import java.io.IOException; -import org.apache.hadoop.fs.FileSystem; -import org.apache.kylin.common.KylinConfig; -import org.apache.kylin.common.util.HadoopUtil; +import org.apache.kylin.job.common.HiveCmdBuilder; import org.apache.kylin.job.common.ShellExecutable; import org.apache.kylin.job.constant.ExecutableConstants; import org.apache.kylin.job.engine.JobEngineConfig; @@ -63,7 +61,6 @@ public abstract class AbstractJobBuilder { } protected AbstractExecutable createIntermediateHiveTableStep(IJoinedFlatTableDesc intermediateTableDesc, String jobId) { - final String useDatabaseHql = "USE " + engineConfig.getConfig().getHiveDatabaseForIntermediateTable() + ";"; final String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobId); final String createTableHql = JoinedFlatTable.generateCreateTableStatement(intermediateTableDesc, getJobWorkingDir(jobId), jobId); @@ -75,17 +72,14 @@ public abstract class AbstractJobBuilder { throw new RuntimeException("Failed to generate insert data SQL for intermediate table."); } + HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); + hiveCmdBuilder.addStatement(useDatabaseHql); + hiveCmdBuilder.addStatement(dropTableHql); + hiveCmdBuilder.addStatement(createTableHql); + hiveCmdBuilder.addStatement(insertDataHqls); + ShellExecutable step = new ShellExecutable(); - StringBuffer buf = new StringBuffer(); - buf.append("hive "); - buf.append(" -e \""); - buf.append(useDatabaseHql + "\n"); - buf.append(dropTableHql + "\n"); - buf.append(createTableHql + "\n"); - buf.append(insertDataHqls + "\n"); - buf.append("\""); - - step.setCmd(buf.toString()); + step.setCmd(hiveCmdBuilder.build()); step.setName(ExecutableConstants.STEP_NAME_CREATE_FLAT_HIVE_TABLE); return step; http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/job/src/main/java/org/apache/kylin/job/common/HiveCmdBuilder.java ---------------------------------------------------------------------- diff --git a/job/src/main/java/org/apache/kylin/job/common/HiveCmdBuilder.java b/job/src/main/java/org/apache/kylin/job/common/HiveCmdBuilder.java new file mode 100644 index 0000000..abfd5e3 --- /dev/null +++ b/job/src/main/java/org/apache/kylin/job/common/HiveCmdBuilder.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kylin.job.common; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; + +import org.apache.commons.io.IOUtils; +import org.apache.kylin.common.KylinConfig; + +import com.google.common.collect.Lists; + +/** + * Created by dongli on 2/21/16. + */ +public class HiveCmdBuilder { + public enum HiveClientMode { + CLI, BEELINE + } + + private HiveClientMode clientMode; + private KylinConfig kylinConfig; + final private ArrayList<String> statements = Lists.newArrayList(); + + public HiveCmdBuilder() { + kylinConfig = KylinConfig.getInstanceFromEnv(); + clientMode = HiveClientMode.valueOf(kylinConfig.getHiveClientMode().toUpperCase()); + } + + public String build() { + StringBuffer buf = new StringBuffer(); + + switch (clientMode) { + case CLI: + buf.append("hive -e \""); + for (String statement : statements) { + buf.append(statement).append("\n"); + } + buf.append("\""); + break; + case BEELINE: + BufferedWriter bw = null; + try { + File tmpHql = File.createTempFile("beeline", ".hql"); + bw = new BufferedWriter(new FileWriter(tmpHql)); + for (String statement : statements) { + bw.write(statement); + bw.newLine(); + } + buf.append("beeline "); + buf.append(kylinConfig.getHiveBeelineParams()); + buf.append(" -f "); + buf.append(tmpHql.getAbsolutePath()); + buf.append(";rm -f "); + buf.append(tmpHql.getAbsolutePath()); + } catch (IOException e) { + throw new RuntimeException(e); + } finally { + IOUtils.closeQuietly(bw); + } + break; + default: + throw new RuntimeException("Hive client cannot be recognized: " + clientMode); + } + + return buf.toString(); + } + + public void reset() { + statements.clear(); + } + + public void addStatement(String statement) { + statements.add(statement); + } + + @Override + public String toString() { + return build(); + } +} http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java ---------------------------------------------------------------------- diff --git a/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java b/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java index f2f1fc0..b076aa7 100644 --- a/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java +++ b/job/src/main/java/org/apache/kylin/job/cube/GarbageCollectionStep.java @@ -34,6 +34,8 @@ import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.util.Bytes; import org.apache.kylin.common.util.HadoopUtil; import org.apache.kylin.job.cmd.ShellCmdOutput; +import org.apache.kylin.job.common.HiveCmdBuilder; +import org.apache.kylin.job.common.ShellExecutable; import org.apache.kylin.job.exception.ExecuteException; import org.apache.kylin.job.execution.AbstractExecutable; import org.apache.kylin.job.execution.ExecutableContext; @@ -83,8 +85,13 @@ public class GarbageCollectionStep extends AbstractExecutable { private void dropHiveTable(ExecutableContext context) throws IOException { final String hiveTable = this.getOldHiveTable(); if (StringUtils.isNotEmpty(hiveTable)) { - final String dropSQL = "USE " + KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable() + ";" + " DROP TABLE IF EXISTS " + hiveTable + ";"; - final String dropHiveCMD = "hive -e \"" + dropSQL + "\""; + final String useDatabaseSQL = "USE " + KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable() + ";"; + final String dropSQL = "DROP TABLE IF EXISTS " + hiveTable + ";"; + + final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); + hiveCmdBuilder.addStatement(useDatabaseSQL); + hiveCmdBuilder.addStatement(dropSQL); + final String dropHiveCMD = hiveCmdBuilder.build(); logger.info("executing: " + dropHiveCMD); ShellCmdOutput shellCmdOutput = new ShellCmdOutput(); context.getConfig().getCliCommandExecutor().execute(dropHiveCMD, shellCmdOutput); http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java ---------------------------------------------------------------------- diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java index 3b25ee1..0c87fc6 100644 --- a/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java +++ b/job/src/main/java/org/apache/kylin/job/hadoop/cube/StorageCleanupJob.java @@ -41,6 +41,8 @@ import org.apache.kylin.invertedindex.IISegment; import org.apache.kylin.job.JobInstance; import org.apache.kylin.job.cmd.ICommandOutput; import org.apache.kylin.job.cmd.ShellCmd; +import org.apache.kylin.job.common.HiveCmdBuilder; +import org.apache.kylin.job.common.ShellExecutable; import org.apache.kylin.job.engine.JobEngineConfig; import org.apache.kylin.job.exception.JobException; import org.apache.kylin.job.execution.ExecutableState; @@ -227,13 +229,11 @@ public class StorageCleanupJob extends AbstractHadoopJob { private void cleanUnusedIntermediateHiveTable(Configuration conf) throws IOException { int uuidLength = 36; final String useDatabaseHql = "USE " + KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable() + ";"; - StringBuilder buf = new StringBuilder(); - buf.append("hive -e \""); - buf.append(useDatabaseHql); - buf.append("show tables " + "\'kylin_intermediate_*\'" + "; "); - buf.append("\""); + HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); + hiveCmdBuilder.addStatement(useDatabaseHql); + hiveCmdBuilder.addStatement("show tables " + "\'kylin_intermediate_*\'" + "; "); - ShellCmd cmd = new ShellCmd(buf.toString(), null, null, null, false); + ShellCmd cmd = new ShellCmd(hiveCmdBuilder.build(), null, null, null, false); ICommandOutput output = null; try { @@ -278,15 +278,13 @@ public class StorageCleanupJob extends AbstractHadoopJob { } if (delete == true) { - buf.delete(0, buf.length()); - buf.append("hive -e \""); - buf.append(useDatabaseHql); + hiveCmdBuilder.reset(); + hiveCmdBuilder.addStatement(useDatabaseHql); for (String delHive : allHiveTablesNeedToBeDeleted) { - buf.append("drop table if exists " + delHive + "; "); + hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; "); log.info("Remove " + delHive + " from hive tables."); } - buf.append("\""); - cmd = new ShellCmd(buf.toString(), null, null, null, false); + cmd = new ShellCmd(hiveCmdBuilder.build(), null, null, null, false); try { cmd.execute(); http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java ---------------------------------------------------------------------- diff --git a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java index e9d8a4a..427be79 100644 --- a/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java +++ b/job/src/main/java/org/apache/kylin/job/hadoop/invertedindex/IIFlattenHiveJob.java @@ -29,6 +29,7 @@ import org.apache.kylin.job.JobInstance; import org.apache.kylin.job.JoinedFlatTable; import org.apache.kylin.job.cmd.ICommandOutput; import org.apache.kylin.job.cmd.ShellCmd; +import org.apache.kylin.job.common.HiveCmdBuilder; import org.apache.kylin.job.engine.JobEngineConfig; import org.apache.kylin.job.hadoop.AbstractHadoopJob; import org.apache.kylin.job.hadoop.hive.IIJoinedFlatTableDesc; @@ -65,18 +66,18 @@ public class IIFlattenHiveJob extends AbstractHadoopJob { JobInstance.getJobWorkingDir(jobUUID, engineConfig.getHdfsWorkingDirectory()), jobUUID); String insertDataHqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, jobUUID, engineConfig); - StringBuffer buf = new StringBuffer(); - buf.append("hive -e \""); - buf.append(useDatabaseHql + "\n"); - buf.append(dropTableHql + "\n"); - buf.append(createTableHql + "\n"); - buf.append(insertDataHqls + "\n"); - buf.append("\""); + HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); + hiveCmdBuilder.addStatement(useDatabaseHql); + hiveCmdBuilder.addStatement(dropTableHql); + hiveCmdBuilder.addStatement(createTableHql); + hiveCmdBuilder.addStatement(insertDataHqls); - System.out.println(buf.toString()); + final String hiveCmd = hiveCmdBuilder.build(); + + System.out.println(hiveCmd); System.out.println("========================"); - ShellCmd cmd = new ShellCmd(buf.toString(), null, null, null, false); + ShellCmd cmd = new ShellCmd(hiveCmd, null, null, null, false); ICommandOutput output = cmd.execute(); System.out.println(output.getOutput()); System.out.println(output.getExitCode()); http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java ---------------------------------------------------------------------- diff --git a/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java b/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java index e8162fe..f02aa7a 100644 --- a/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java +++ b/job/src/test/java/org/apache/kylin/job/BuildCubeWithEngineTest.java @@ -200,7 +200,7 @@ public class BuildCubeWithEngineTest { long date3 = f.parse("2022-01-01").getTime(); List<String> result = Lists.newArrayList(); result.add(buildSegment("test_kylin_cube_with_slr_empty", date1, date2)); - result.add(buildSegment("test_kylin_cube_with_slr_empty", date2, date3)); + result.add(buildSegment("test_kylin_cube_with_slr_empty", date2, date3, true)); // empty segment long date4 = f.parse("2050-01-01").getTime(); @@ -245,7 +245,7 @@ public class BuildCubeWithEngineTest { // date is 20220101000000 dateStart = f.parse("2012-06-01").getTime(); dateEnd = f.parse("2022-01-01").getTime(); - result.add(buildSegment(cubeName, dateStart, dateEnd)); + result.add(buildSegment(cubeName, dateStart, dateEnd, true)); return result; } @@ -283,4 +283,16 @@ public class BuildCubeWithEngineTest { return job.getId(); } + private String buildSegment(String cubeName, long startDate, long endDate, boolean useBeeline) throws Exception { + String jobId = null; + if (useBeeline) { + System.setProperty("kylin.hive.client", "beeline"); + jobId = buildSegment(cubeName, startDate, endDate); + System.clearProperty("kylin.hive.client"); + } else { + jobId = buildSegment(cubeName, startDate, endDate); + } + + return jobId; + } } http://git-wip-us.apache.org/repos/asf/kylin/blob/f4bd22a7/job/src/test/java/org/apache/kylin/job/HiveCmdBuilderTest.java ---------------------------------------------------------------------- diff --git a/job/src/test/java/org/apache/kylin/job/HiveCmdBuilderTest.java b/job/src/test/java/org/apache/kylin/job/HiveCmdBuilderTest.java new file mode 100644 index 0000000..6f89c67 --- /dev/null +++ b/job/src/test/java/org/apache/kylin/job/HiveCmdBuilderTest.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kylin.job; + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.IOException; + +import org.apache.commons.io.FileUtils; +import org.apache.kylin.common.util.LocalFileMetadataTestCase; +import org.apache.kylin.job.common.HiveCmdBuilder; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +/** + * Created by dongli on 2/22/16. + */ +public class HiveCmdBuilderTest extends LocalFileMetadataTestCase { + + @Before + public void setup() throws Exception { + createTestMetadata(); + } + + @After + public void after() throws Exception { + cleanupTestMetadata(); + + System.clearProperty("kylin.hive.client"); + System.clearProperty("kylin.hive.beeline.params"); + } + + @Test + public void testHiveCLI() { + System.setProperty("kylin.hive.client", "cli"); + + HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); + hiveCmdBuilder.addStatement("USE default;"); + hiveCmdBuilder.addStatement("DROP TABLE test;"); + hiveCmdBuilder.addStatement("SHOW\n TABLES;"); + + assertEquals("hive -e \"USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n\"", hiveCmdBuilder.build()); + } + + @Test + public void testBeeline() throws IOException { + System.setProperty("kylin.hive.client", "beeline"); + System.setProperty("kylin.hive.beeline.params", "-u jdbc_url"); + + HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); + hiveCmdBuilder.addStatement("USE default;"); + hiveCmdBuilder.addStatement("DROP TABLE test;"); + hiveCmdBuilder.addStatement("SHOW\n TABLES;"); + + String cmd = hiveCmdBuilder.build(); + assertTrue(cmd.startsWith("beeline -u jdbc_url -f") && cmd.contains(";rm -f")); + + String hqlFile = cmd.substring(cmd.lastIndexOf("-f ") + 3).trim(); + String hqlStatement = FileUtils.readFileToString(new File(hqlFile)); + assertEquals("USE default;\nDROP TABLE test;\nSHOW\n TABLES;\n", hqlStatement); + } +}
