This is an automated email from the ASF dual-hosted git repository.

morningman pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git

commit 18fb8407ae1fadead8ab925734375678b4300c66
Author: slothever <18522955+w...@users.noreply.github.com>
AuthorDate: Wed Apr 10 21:24:09 2024 +0800

    [feature](insert)use optional location and add hive regression test (#33153)
---
 fe/fe-core/src/main/cup/sql_parser.cup             |   4 +-
 .../java/org/apache/doris/analysis/DbName.java     |   3 -
 .../apache/doris/analysis/ShowCreateDbStmt.java    |  13 +-
 .../apache/doris/datasource/InternalCatalog.java   |   4 +-
 .../datasource/hive/HiveDatabaseMetadata.java      |   9 +
 .../doris/datasource/hive/HiveMetadataOps.java     |  26 +-
 .../doris/datasource/hive/HiveTableMetadata.java   |  37 +-
 .../org/apache/doris/datasource/hive/HiveUtil.java |  52 +-
 .../doris/nereids/parser/LogicalPlanBuilder.java   |   3 +-
 .../trees/plans/commands/info/CreateTableInfo.java |  45 +-
 .../java/org/apache/doris/qe/ShowExecutor.java     |   6 +-
 .../doris/analysis/ShowCreateDbStmtTest.java       |   5 +-
 .../datasource/hive/HiveDDLAndDMLPlanTest.java     |  22 +-
 .../doris/datasource/hive/HmsCommitTest.java       |  18 +-
 .../java/org/apache/doris/qe/ShowExecutorTest.java |   5 +-
 .../external_table_p0/hive/ddl/test_hive_ctas.out  |  92 +++
 .../external_table_p0/hive/ddl/test_hive_ddl.out   |  60 ++
 .../hive/ddl/test_hive_ddl_and_ctas.out            | 148 -----
 .../hive/ddl/test_hive_write_type.out              |  18 +
 .../hive/ddl/test_hive_ctas.groovy                 | 515 ++++++++++++++++
 .../hive/ddl/test_hive_ddl.groovy                  | 676 +++++++++++++++++++++
 .../hive/ddl/test_hive_ddl_and_ctas.groovy         | 423 -------------
 .../hive/ddl/test_hive_write_type.groovy           | 285 +++++++++
 23 files changed, 1832 insertions(+), 637 deletions(-)

diff --git a/fe/fe-core/src/main/cup/sql_parser.cup 
b/fe/fe-core/src/main/cup/sql_parser.cup
index 3a4f77fcc7a..26129f2d2c0 100644
--- a/fe/fe-core/src/main/cup/sql_parser.cup
+++ b/fe/fe-core/src/main/cup/sql_parser.cup
@@ -4039,11 +4039,11 @@ show_param ::=
         RESULT = new ShowCreateTableStmt(table, true, false);
     :}
     /* Create database */
-    | KW_CREATE KW_DATABASE ident:db
+    | KW_CREATE KW_DATABASE db_name:db
     {:
         RESULT = new ShowCreateDbStmt(db);
     :}
-    | KW_CREATE KW_SCHEMA ident:db
+    | KW_CREATE KW_SCHEMA db_name:db
     {:
         RESULT = new ShowCreateDbStmt(db);
     :}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DbName.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/DbName.java
index 2b8e92ab982..934f2d73435 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DbName.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DbName.java
@@ -14,9 +14,6 @@
 // KIND, either express or implied.  See the License for the
 // specific language governing permissions and limitations
 // under the License.
-// This file is copied from
-// 
https://github.com/apache/impala/blob/branch-2.9.0/fe/src/main/java/org/apache/impala/TableName.java
-// and modified by Doris
 
 package org.apache.doris.analysis;
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowCreateDbStmt.java 
b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowCreateDbStmt.java
index 0cadde094b8..1a51546e186 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowCreateDbStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowCreateDbStmt.java
@@ -40,19 +40,28 @@ public class ShowCreateDbStmt extends ShowStmt {
                     .addColumn(new Column("Create Database", 
ScalarType.createVarchar(30)))
                     .build();
 
+    private String ctl;
     private String db;
 
-    public ShowCreateDbStmt(String db) {
-        this.db = db;
+    public ShowCreateDbStmt(DbName db) {
+        this.ctl = db.getCtl();
+        this.db = db.getDb();
     }
 
     public String getDb() {
         return db;
     }
 
+    public String getCtl() {
+        return ctl;
+    }
+
     @Override
     public void analyze(Analyzer analyzer) throws AnalysisException, 
UserException {
         super.analyze(analyzer);
+        if (Strings.isNullOrEmpty(ctl)) {
+            ctl = Env.getCurrentEnv().getCurrentCatalog().getName();
+        }
         if (Strings.isNullOrEmpty(db)) {
             ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_DB_NAME, 
db);
         }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
index 2bd2f21478b..9295ab56a18 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/InternalCatalog.java
@@ -1129,8 +1129,8 @@ public class InternalCatalog implements 
CatalogIf<Database> {
             createEsTable(db, stmt);
             return;
         } else if (engineName.equalsIgnoreCase("hive")) {
-            createHiveTable(db, stmt);
-            return;
+            // should use hive catalog to create external hive table
+            throw new UserException("Cannot create hive table in internal 
catalog, should switch to hive catalog.");
         } else if (engineName.equalsIgnoreCase("jdbc")) {
             createJdbcTable(db, stmt);
             return;
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveDatabaseMetadata.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveDatabaseMetadata.java
index 50a80db3962..a1a383e0d07 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveDatabaseMetadata.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveDatabaseMetadata.java
@@ -21,6 +21,7 @@ import org.apache.doris.datasource.DatabaseMetadata;
 
 import lombok.Data;
 
+import java.util.HashMap;
 import java.util.Map;
 
 @Data
@@ -29,4 +30,12 @@ public class HiveDatabaseMetadata implements 
DatabaseMetadata {
     private String locationUri;
     private Map<String, String> properties;
     private String comment;
+
+    public Map<String, String> getProperties() {
+        return properties == null ? new HashMap<>() : properties;
+    }
+
+    public String getComment() {
+        return comment == null ? "" : comment;
+    }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
index 5cf362508e3..c5e74defc1a 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
@@ -23,8 +23,10 @@ import org.apache.doris.analysis.DistributionDesc;
 import org.apache.doris.analysis.DropDbStmt;
 import org.apache.doris.analysis.DropTableStmt;
 import org.apache.doris.analysis.HashDistributionDesc;
+import org.apache.doris.analysis.PartitionDesc;
 import org.apache.doris.catalog.Env;
 import org.apache.doris.catalog.JdbcResource;
+import org.apache.doris.catalog.PartitionType;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.ErrorCode;
@@ -48,11 +50,12 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.Set;
 import java.util.function.Function;
 
 public class HiveMetadataOps implements ExternalMetadataOps {
-    public static final String LOCATION_URI_KEY = "location_uri";
+    public static final String LOCATION_URI_KEY = "location";
     public static final String FILE_FORMAT_KEY = "file_format";
     public static final Set<String> DORIS_HIVE_KEYS = 
ImmutableSet.of(FILE_FORMAT_KEY, LOCATION_URI_KEY);
     private static final Logger LOG = 
LogManager.getLogger(HiveMetadataOps.class);
@@ -125,6 +128,7 @@ public class HiveMetadataOps implements ExternalMetadataOps 
{
             if (properties.containsKey(LOCATION_URI_KEY)) {
                 
catalogDatabase.setLocationUri(properties.get(LOCATION_URI_KEY));
             }
+            // remove it when set
             properties.remove(LOCATION_URI_KEY);
             catalogDatabase.setProperties(properties);
             catalogDatabase.setComment(properties.getOrDefault("comment", ""));
@@ -185,8 +189,18 @@ public class HiveMetadataOps implements 
ExternalMetadataOps {
             }
             List<String> partitionColNames = new ArrayList<>();
             if (stmt.getPartitionDesc() != null) {
-                
partitionColNames.addAll(stmt.getPartitionDesc().getPartitionColNames());
+                PartitionDesc partitionDesc = stmt.getPartitionDesc();
+                if (partitionDesc.getType() == PartitionType.RANGE) {
+                    throw new UserException("Only support 'LIST' partition 
type in hive catalog.");
+                }
+                partitionColNames.addAll(partitionDesc.getPartitionColNames());
+                if (!partitionDesc.getSinglePartitionDescs().isEmpty()) {
+                    throw new UserException("Partition values expressions is 
not supported in hive catalog.");
+                }
+
             }
+            String comment = stmt.getComment();
+            Optional<String> location = 
Optional.ofNullable(props.getOrDefault(LOCATION_URI_KEY, null));
             HiveTableMetadata hiveTableMeta;
             DistributionDesc bucketInfo = stmt.getDistributionDesc();
             if (bucketInfo != null) {
@@ -194,12 +208,14 @@ public class HiveMetadataOps implements 
ExternalMetadataOps {
                     if (bucketInfo instanceof HashDistributionDesc) {
                         hiveTableMeta = HiveTableMetadata.of(dbName,
                                 tblName,
+                                location,
                                 stmt.getColumns(),
                                 partitionColNames,
                                 ((HashDistributionDesc) 
bucketInfo).getDistributionColumnNames(),
                                 bucketInfo.getBuckets(),
                                 ddlProps,
-                                fileFormat);
+                                fileFormat,
+                                comment);
                     } else {
                         throw new UserException("External hive table only 
supports hash bucketing");
                     }
@@ -210,10 +226,12 @@ public class HiveMetadataOps implements 
ExternalMetadataOps {
             } else {
                 hiveTableMeta = HiveTableMetadata.of(dbName,
                         tblName,
+                        location,
                         stmt.getColumns(),
                         partitionColNames,
                         ddlProps,
-                        fileFormat);
+                        fileFormat,
+                        comment);
             }
             client.createTable(hiveTableMeta, stmt.isSetIfNotExists());
             db.setUnInitialized(true);
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java
index d8de9d60734..7f7a1ef7273 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveTableMetadata.java
@@ -23,13 +23,16 @@ import org.apache.doris.datasource.TableMetadata;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 
 public class HiveTableMetadata implements TableMetadata {
     private final String dbName;
     private final String tableName;
+    private final Optional<String> location;
     private final List<Column> columns;
     private final List<String> partitionKeys;
     private final String fileFormat;
+    private final String comment;
     private final Map<String, String> properties;
     private List<String> bucketCols;
     private int numBuckets;
@@ -37,20 +40,24 @@ public class HiveTableMetadata implements TableMetadata {
 
     public HiveTableMetadata(String dbName,
                              String tblName,
+                             Optional<String> location,
                              List<Column> columns,
                              List<String> partitionKeys,
                              Map<String, String> props,
-                             String fileFormat) {
-        this(dbName, tblName, columns, partitionKeys, new ArrayList<>(), 0, 
props, fileFormat);
+                             String fileFormat,
+                             String comment) {
+        this(dbName, tblName, location, columns, partitionKeys, new 
ArrayList<>(), 0, props, fileFormat, comment);
     }
 
     public HiveTableMetadata(String dbName, String tableName,
+                             Optional<String> location,
                              List<Column> columns,
                              List<String> partitionKeys,
                              List<String> bucketCols,
                              int numBuckets,
                              Map<String, String> props,
-                             String fileFormat) {
+                             String fileFormat,
+                             String comment) {
         this.dbName = dbName;
         this.tableName = tableName;
         this.columns = columns;
@@ -59,6 +66,8 @@ public class HiveTableMetadata implements TableMetadata {
         this.numBuckets = numBuckets;
         this.properties = props;
         this.fileFormat = fileFormat;
+        this.location = location;
+        this.comment = comment;
     }
 
     @Override
@@ -71,6 +80,14 @@ public class HiveTableMetadata implements TableMetadata {
         return tableName;
     }
 
+    public Optional<String> getLocation() {
+        return location;
+    }
+
+    public String getComment() {
+        return comment == null ? "" : comment;
+    }
+
     @Override
     public Map<String, String> getProperties() {
         return properties;
@@ -98,22 +115,26 @@ public class HiveTableMetadata implements TableMetadata {
 
     public static HiveTableMetadata of(String dbName,
                                        String tblName,
+                                       Optional<String> location,
                                        List<Column> columns,
                                        List<String> partitionKeys,
                                        Map<String, String> props,
-                                       String fileFormat) {
-        return new HiveTableMetadata(dbName, tblName, columns, partitionKeys, 
props, fileFormat);
+                                       String fileFormat,
+                                       String comment) {
+        return new HiveTableMetadata(dbName, tblName, location, columns, 
partitionKeys, props, fileFormat, comment);
     }
 
     public static HiveTableMetadata of(String dbName,
                                        String tblName,
+                                       Optional<String> location,
                                        List<Column> columns,
                                        List<String> partitionKeys,
                                        List<String> bucketCols,
                                        int numBuckets,
                                        Map<String, String> props,
-                                       String fileFormat) {
-        return new HiveTableMetadata(dbName, tblName, columns, partitionKeys,
-                bucketCols, numBuckets, props, fileFormat);
+                                       String fileFormat,
+                                       String comment) {
+        return new HiveTableMetadata(dbName, tblName, location, columns, 
partitionKeys,
+                bucketCols, numBuckets, props, fileFormat, comment);
     }
 }
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java
index dfbfe786985..bca04215fc4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveUtil.java
@@ -23,17 +23,20 @@ import org.apache.doris.common.UserException;
 import org.apache.doris.datasource.ExternalCatalog;
 import org.apache.doris.fs.remote.BrokerFileSystem;
 import org.apache.doris.fs.remote.RemoteFileSystem;
+import org.apache.doris.nereids.exceptions.AnalysisException;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -53,6 +56,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Optional;
 import java.util.Set;
 import java.util.stream.Collectors;
 
@@ -61,6 +65,10 @@ import java.util.stream.Collectors;
  */
 public final class HiveUtil {
 
+    public static final String COMPRESSION_KEY = "compression";
+    public static final Set<String> SUPPORTED_ORC_COMPRESSIONS = 
ImmutableSet.of("plain", "zlib", "snappy", "zstd");
+    public static final Set<String> SUPPORTED_PARQUET_COMPRESSIONS = 
ImmutableSet.of("plain", "snappy", "zstd");
+
     private HiveUtil() {
     }
 
@@ -187,12 +195,11 @@ public final class HiveUtil {
         table.setCreateTime(createTime);
         table.setLastAccessTime(createTime);
         // table.setRetention(0);
-        String location = 
hiveTable.getProperties().get(HiveMetadataOps.LOCATION_URI_KEY);
         Set<String> partitionSet = new HashSet<>(hiveTable.getPartitionKeys());
         Pair<List<FieldSchema>, List<FieldSchema>> hiveSchema = 
toHiveSchema(hiveTable.getColumns(), partitionSet);
 
         table.setSd(toHiveStorageDesc(hiveSchema.first, 
hiveTable.getBucketCols(), hiveTable.getNumBuckets(),
-                hiveTable.getFileFormat(), location));
+                hiveTable.getFileFormat(), hiveTable.getLocation()));
         table.setPartitionKeys(hiveSchema.second);
 
         // table.setViewOriginalText(hiveTable.getViewSql());
@@ -200,18 +207,44 @@ public final class HiveUtil {
         table.setTableType("MANAGED_TABLE");
         Map<String, String> props = new HashMap<>(hiveTable.getProperties());
         props.put(ExternalCatalog.DORIS_VERSION, 
ExternalCatalog.DORIS_VERSION_VALUE);
+        setCompressType(hiveTable, props);
+        // set hive table comment by table properties
+        props.put("comment", hiveTable.getComment());
         table.setParameters(props);
+        if (props.containsKey("owner")) {
+            table.setOwner(props.get("owner"));
+        }
         return table;
     }
 
+    private static void setCompressType(HiveTableMetadata hiveTable, 
Map<String, String> props) {
+        String fileFormat = hiveTable.getFileFormat();
+        String compression = props.get(COMPRESSION_KEY);
+        // on HMS, default orc compression type is zlib and default parquet 
compression type is snappy.
+        if (fileFormat.equalsIgnoreCase("parquet")) {
+            if (StringUtils.isNotEmpty(compression) && 
!SUPPORTED_PARQUET_COMPRESSIONS.contains(compression)) {
+                throw new AnalysisException("Unsupported parquet compression 
type " + compression);
+            }
+            props.putIfAbsent("parquet.compression", 
StringUtils.isEmpty(compression) ? "snappy" : compression);
+        } else if (fileFormat.equalsIgnoreCase("orc")) {
+            if (StringUtils.isNotEmpty(compression) && 
!SUPPORTED_ORC_COMPRESSIONS.contains(compression)) {
+                throw new AnalysisException("Unsupported orc compression type 
" + compression);
+            }
+            props.putIfAbsent("orc.compress", StringUtils.isEmpty(compression) 
? "zlib" : compression);
+        } else {
+            throw new IllegalArgumentException("Compression is not supported 
on " + fileFormat);
+        }
+        // remove if exists
+        props.remove(COMPRESSION_KEY);
+    }
+
     private static StorageDescriptor toHiveStorageDesc(List<FieldSchema> 
columns,
-            List<String> bucketCols, int numBuckets, String fileFormat, String 
location) {
+            List<String> bucketCols, int numBuckets, String fileFormat, 
Optional<String> location) {
         StorageDescriptor sd = new StorageDescriptor();
         sd.setCols(columns);
         setFileFormat(fileFormat, sd);
-        if (StringUtils.isNotEmpty(location)) {
-            sd.setLocation(location);
-        }
+        location.ifPresent(sd::setLocation);
+
         sd.setBucketCols(bucketCols);
         sd.setNumBuckets(numBuckets);
         Map<String, String> parameters = new HashMap<>();
@@ -267,8 +300,13 @@ public final class HiveUtil {
         if (StringUtils.isNotEmpty(hiveDb.getLocationUri())) {
             database.setLocationUri(hiveDb.getLocationUri());
         }
-        database.setParameters(hiveDb.getProperties());
+        Map<String, String> props = hiveDb.getProperties();
+        database.setParameters(props);
         database.setDescription(hiveDb.getComment());
+        if (props.containsKey("owner")) {
+            database.setOwnerName(props.get("owner"));
+            database.setOwnerType(PrincipalType.USER);
+        }
         return database;
     }
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
index 874b6a0d432..64c3572606f 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java
@@ -2440,7 +2440,8 @@ public class LogicalPlanBuilder extends 
DorisParserBaseVisitor<Object> {
         } else if (ctx.UNIQUE() != null) {
             keysType = KeysType.UNIQUE_KEYS;
         }
-        String engineName = ctx.engine != null ? 
ctx.engine.getText().toLowerCase() : "olap";
+        // when engineName is null, get engineName from current catalog later
+        String engineName = ctx.engine != null ? 
ctx.engine.getText().toLowerCase() : null;
         int bucketNum = FeConstants.default_bucket_num;
         if (ctx.INTEGER_VALUE() != null) {
             bucketNum = Integer.parseInt(ctx.INTEGER_VALUE().getText());
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java
 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java
index d2b6a1fd31d..0fc746da4e7 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateTableInfo.java
@@ -42,6 +42,8 @@ import org.apache.doris.common.util.ParseUtil;
 import org.apache.doris.common.util.PropertyAnalyzer;
 import org.apache.doris.datasource.InternalCatalog;
 import org.apache.doris.datasource.es.EsUtil;
+import org.apache.doris.datasource.hive.HMSExternalCatalog;
+import org.apache.doris.datasource.iceberg.IcebergExternalCatalog;
 import org.apache.doris.mysql.privilege.PrivPredicate;
 import org.apache.doris.nereids.exceptions.AnalysisException;
 import org.apache.doris.nereids.parser.PartitionTableInfo;
@@ -76,7 +78,7 @@ public class CreateTableInfo {
     private List<ColumnDefinition> columns;
     private final List<IndexDefinition> indexes;
     private final List<String> ctasColumns;
-    private final String engineName;
+    private String engineName;
     private KeysType keysType;
     private List<String> keys;
     private final String comment;
@@ -190,13 +192,22 @@ public class CreateTableInfo {
             throw new AnalysisException("table should contain at least one 
column");
         }
 
+        // analyze catalog name
+        if (Strings.isNullOrEmpty(ctlName)) {
+            if (ctx.getCurrentCatalog() != null) {
+                ctlName = ctx.getCurrentCatalog().getName();
+            } else {
+                ctlName = InternalCatalog.INTERNAL_CATALOG_NAME;
+            }
+        }
+        paddingEngineName(ctlName, ctx);
         checkEngineName();
 
         if (properties == null) {
             properties = Maps.newHashMap();
         }
 
-        if (Strings.isNullOrEmpty(engineName) || 
engineName.equalsIgnoreCase("olap")) {
+        if (engineName.equalsIgnoreCase("olap")) {
             if (distribution == null) {
                 throw new AnalysisException("Create olap table should contain 
distribution desc");
             }
@@ -209,12 +220,10 @@ public class CreateTableInfo {
             throw new AnalysisException(e.getMessage(), e);
         }
 
-        // analyze catalog name
-        if (Strings.isNullOrEmpty(ctlName)) {
-            if (ctx.getCurrentCatalog() != null) {
-                ctlName = ctx.getCurrentCatalog().getName();
-            } else {
-                ctlName = InternalCatalog.INTERNAL_CATALOG_NAME;
+        if (engineName.equals("olap")) {
+            if (!ctlName.equals(InternalCatalog.INTERNAL_CATALOG_NAME)) {
+                throw new AnalysisException("Cannot create olap table out of 
internal catalog."
+                    + " Make sure 'engine' type is specified when use the 
catalog: " + ctlName);
             }
         }
 
@@ -534,18 +543,34 @@ public class CreateTableInfo {
         }
     }
 
+    private void paddingEngineName(String ctlName, ConnectContext ctx) {
+        if (Strings.isNullOrEmpty(engineName)) {
+            if (InternalCatalog.INTERNAL_CATALOG_NAME.equals(ctlName)) {
+                engineName = "olap";
+            } else if (ctx.getCurrentCatalog() instanceof HMSExternalCatalog) {
+                engineName = "hive";
+            } else if (ctx.getCurrentCatalog() instanceof 
IcebergExternalCatalog) {
+                engineName = "iceberg";
+            } else {
+                // set to olap by default
+                engineName = "olap";
+            }
+        }
+    }
+
     /**
      * validate ctas definition
      */
     public void validateCreateTableAsSelect(List<String> qualifierTableName, 
List<ColumnDefinition> columns,
                                             ConnectContext ctx) {
+        String catalogName = qualifierTableName.get(0);
+        paddingEngineName(catalogName, ctx);
         this.columns = Utils.copyRequiredMutableList(columns);
         // bucket num is hard coded 10 to be consistent with legacy planner
         if (engineName.equals("olap") && this.distribution == null) {
-            String catalogName = qualifierTableName.get(0);
             if (!catalogName.equals(InternalCatalog.INTERNAL_CATALOG_NAME)) {
                 throw new AnalysisException("Cannot create olap table out of 
internal catalog."
-                        + "Make sure 'engine' type is specified when use the 
catalog: " + catalogName);
+                        + " Make sure 'engine' type is specified when use the 
catalog: " + catalogName);
             }
             this.distribution = new DistributionDescriptor(true, false, 10,
                     Lists.newArrayList(columns.get(0).getName()));
diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java 
b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
index fea976e11f8..1a407d2115b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/qe/ShowExecutor.java
@@ -986,7 +986,11 @@ public class ShowExecutor {
         List<List<String>> rows = Lists.newArrayList();
 
         StringBuilder sb = new StringBuilder();
-        CatalogIf catalog = ctx.getCurrentCatalog();
+        String catalogName = showStmt.getCtl();
+        if (Strings.isNullOrEmpty(catalogName)) {
+            catalogName = Env.getCurrentEnv().getCurrentCatalog().getName();
+        }
+        CatalogIf<?> catalog = 
Env.getCurrentEnv().getCatalogMgr().getCatalogOrAnalysisException(catalogName);
         if (catalog instanceof HMSExternalCatalog) {
             String simpleDBName = 
ClusterNamespace.getNameFromFullName(showStmt.getDb());
             org.apache.hadoop.hive.metastore.api.Database db = 
((HMSExternalCatalog) catalog).getClient()
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/analysis/ShowCreateDbStmtTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/analysis/ShowCreateDbStmtTest.java
index 96638801781..dedea00d697 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/analysis/ShowCreateDbStmtTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/analysis/ShowCreateDbStmtTest.java
@@ -19,6 +19,7 @@ package org.apache.doris.analysis;
 
 import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.UserException;
+import org.apache.doris.datasource.InternalCatalog;
 import org.apache.doris.mysql.privilege.AccessControllerManager;
 import org.apache.doris.mysql.privilege.MockedAuth;
 import org.apache.doris.qe.ConnectContext;
@@ -43,7 +44,7 @@ public class ShowCreateDbStmtTest {
 
     @Test
     public void testNormal() throws AnalysisException, UserException {
-        ShowCreateDbStmt stmt = new ShowCreateDbStmt("testDb");
+        ShowCreateDbStmt stmt = new ShowCreateDbStmt(new 
DbName(InternalCatalog.INTERNAL_CATALOG_NAME, "testDb"));
         stmt.analyze(AccessTestUtil.fetchAdminAnalyzer(true));
         Assert.assertEquals("testDb", stmt.getDb());
         Assert.assertEquals(2, stmt.getMetaData().getColumnCount());
@@ -52,7 +53,7 @@ public class ShowCreateDbStmtTest {
 
     @Test(expected = AnalysisException.class)
     public void testEmptyDb() throws AnalysisException, UserException {
-        ShowCreateDbStmt stmt = new ShowCreateDbStmt("");
+        ShowCreateDbStmt stmt = new ShowCreateDbStmt(new 
DbName(InternalCatalog.INTERNAL_CATALOG_NAME, ""));
         stmt.analyze(AccessTestUtil.fetchAdminAnalyzer(false));
         Assert.fail("No exception throws.");
     }
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
index a34e6cc7034..dc35d38af68 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HiveDDLAndDMLPlanTest.java
@@ -182,7 +182,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + "  `col2` STRING COMMENT 'col2'\n"
                 + ")  ENGINE=hive\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='parquet')";
         createTable(createSourceExtUTable, true);
         // partitioned table
@@ -193,7 +193,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + ")  ENGINE=hive\n"
                 + "PARTITION BY LIST (pt1, pt2) ()\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='orc')";
         createTable(createSourceExtTable, true);
 
@@ -258,7 +258,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + "  `col2` INT COMMENT 'col2'"
                 + ")  ENGINE=hive\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='orc')";
         createTable(createTableIfNotExists, true);
         createTable(createTableIfNotExists, true);
@@ -288,7 +288,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + "  `pt2` STRING COMMENT 'pt2'\n"
                 + ")  ENGINE=hive\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='orc')";
         createTable(createUnPartTable, true);
         dropTable("unpart_tbl", true);
@@ -305,7 +305,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + ")  ENGINE=hive\n"
                 + "PARTITION BY LIST (pt1, pt2) ()\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='parquet')";
         createTable(createPartTable, true);
         // check IF NOT EXISTS
@@ -320,7 +320,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + ")  ENGINE=hive\n"
                 + "DISTRIBUTED BY HASH (col2) BUCKETS 16\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='orc')";
         
ExceptionChecker.expectThrowsWithMsg(org.apache.doris.nereids.exceptions.AnalysisException.class,
                 "errCode = 2, detailMessage = errCode = 2,"
@@ -336,7 +336,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + ")  ENGINE=hive\n"
                 + "DISTRIBUTED BY HASH (col2) BUCKETS 16\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='orc')";
         createTable(createBucketedTableOk1, true);
         dropTable("buck_tbl", true);
@@ -352,7 +352,7 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
                 + "PARTITION BY LIST (pt2) ()\n"
                 + "DISTRIBUTED BY HASH (col2) BUCKETS 16\n"
                 + "PROPERTIES (\n"
-                + "  'location_uri'='hdfs://loc/db/tbl',\n"
+                + "  'location'='hdfs://loc/db/tbl',\n"
                 + "  'file_format'='orc')";
         createTable(createBucketedTableOk2, true);
         dropTable("part_buck_tbl", true);
@@ -430,15 +430,15 @@ public class HiveDDLAndDMLPlanTest extends 
TestWithFeService {
         createTable(createOlapSrc, true);
         switchHive();
         useDatabase(mockedDbName);
-        String olapCtasErr = "CREATE TABLE no_buck_olap AS SELECT * FROM 
internal.mockedDb.olap_src";
+        String olapCtasErr = "CREATE TABLE no_buck_olap ENGINE=olap AS SELECT 
* FROM internal.mockedDb.olap_src";
         LogicalPlan olapCtasErrPlan = nereidsParser.parseSingle(olapCtasErr);
         Assertions.assertTrue(olapCtasErrPlan instanceof CreateTableCommand);
         
ExceptionChecker.expectThrowsWithMsg(org.apache.doris.nereids.exceptions.AnalysisException.class,
                 "Cannot create olap table out of internal catalog."
-                        + "Make sure 'engine' type is specified when use the 
catalog: hive",
+                        + " Make sure 'engine' type is specified when use the 
catalog: hive",
                 () -> ((CreateTableCommand) 
olapCtasErrPlan).run(connectContext, null));
 
-        String olapCtasOk = "CREATE TABLE internal.mockedDb.no_buck_olap"
+        String olapCtasOk = "CREATE TABLE internal.mockedDb.no_buck_olap 
ENGINE=olap"
                 + " PROPERTIES('replication_num' = '1')"
                 + " AS SELECT * FROM internal.mockedDb.olap_src";
         LogicalPlan olapCtasOkPlan = createTablesAndReturnPlans(true, 
olapCtasOk).get(0);
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HmsCommitTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HmsCommitTest.java
index 4ec6ca84c52..fba91cb0b55 100644
--- 
a/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HmsCommitTest.java
+++ 
b/fe/fe-core/src/test/java/org/apache/doris/datasource/hive/HmsCommitTest.java
@@ -45,6 +45,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Optional;
 import java.util.UUID;
 
 public class HmsCommitTest {
@@ -107,23 +108,18 @@ public class HmsCommitTest {
         List<String> partitionKeys = new ArrayList<>();
         partitionKeys.add("c3");
         String fileFormat = "orc";
-        HashMap<String, String> params = new HashMap<String, String>() {{
-                put("location_uri", dbLocation + tbWithPartition);
-            }};
         HiveTableMetadata tableMetadata = new HiveTableMetadata(
-                dbName, tbWithPartition, columns, partitionKeys,
-                params, fileFormat);
+                dbName, tbWithPartition, Optional.of(dbLocation + 
tbWithPartition),
+                columns, partitionKeys,
+                new HashMap<>(), fileFormat, "");
         hmsClient.createTable(tableMetadata, true);
 
         // create table for tbWithoutPartition
-        HashMap<String, String> params2 = new HashMap<String, String>() {{
-                put("location_uri", dbLocation + tbWithPartition);
-            }};
         HiveTableMetadata tableMetadata2 = new HiveTableMetadata(
-                    dbName, tbWithoutPartition, columns, new ArrayList<>(),
-                    params2, fileFormat);
+                    dbName, tbWithoutPartition, Optional.of(dbLocation + 
tbWithPartition),
+                    columns, new ArrayList<>(),
+                    new HashMap<>(), fileFormat, "");
         hmsClient.createTable(tableMetadata2, true);
-
     }
 
     @After
diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/ShowExecutorTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/qe/ShowExecutorTest.java
index 5b8b374ad5a..c68715c17b7 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/qe/ShowExecutorTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/qe/ShowExecutorTest.java
@@ -19,6 +19,7 @@ package org.apache.doris.qe;
 
 import org.apache.doris.analysis.AccessTestUtil;
 import org.apache.doris.analysis.Analyzer;
+import org.apache.doris.analysis.DbName;
 import org.apache.doris.analysis.DescribeStmt;
 import org.apache.doris.analysis.HelpStmt;
 import org.apache.doris.analysis.SetType;
@@ -458,7 +459,7 @@ public class ShowExecutorTest {
         ctx.setEnv(env);
         ctx.setQualifiedUser("testUser");
 
-        ShowCreateDbStmt stmt = new ShowCreateDbStmt("testDb");
+        ShowCreateDbStmt stmt = new ShowCreateDbStmt(new 
DbName(InternalCatalog.INTERNAL_CATALOG_NAME, "testDb"));
         ShowExecutor executor = new ShowExecutor(ctx, stmt);
         ShowResultSet resultSet = executor.execute();
 
@@ -473,7 +474,7 @@ public class ShowExecutorTest {
         ctx.setEnv(env);
         ctx.setQualifiedUser("testUser");
 
-        ShowCreateDbStmt stmt = new ShowCreateDbStmt("emptyDb");
+        ShowCreateDbStmt stmt = new ShowCreateDbStmt(new 
DbName(InternalCatalog.INTERNAL_CATALOG_NAME, "emptyDb"));
         ShowExecutor executor = new ShowExecutor(ctx, stmt);
         executor.execute();
 
diff --git a/regression-test/data/external_table_p0/hive/ddl/test_hive_ctas.out 
b/regression-test/data/external_table_p0/hive/ddl/test_hive_ctas.out
new file mode 100644
index 00000000000..ccf3441c5c3
--- /dev/null
+++ b/regression-test/data/external_table_p0/hive/ddl/test_hive_ctas.out
@@ -0,0 +1,92 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !ctas_01 --
+2
+3
+
+-- !ctas_02 --
+2
+3
+
+-- !ctas_03 --
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_04 --
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_05 --
+11     value_for_pt1   value_for_pt2
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_06 --
+11     value_for_pt1   value_for_pt2
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_ex01 --
+2
+3
+
+-- !ctas_ex02 --
+11     value_for_pt1   value_for_pt2
+22     value_for_pt11  \N
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_03 --
+\N     another string value for col2
+\N     string value for col2
+\N     yet another string value for col2
+
+-- !ctas_04 --
+\N     11      value_for_pt1
+\N     22      value_for_pt11
+
+-- !ctas_types_01 --
+true   127     32767   2147483647      9223372036854775807     default 
22.12345        3.141592653     99999.9999      default default 2023-05-29      
2023-05-29T23:19:34
+
+-- !ctas_types_02 --
+true   127     32767   2147483647      default 22.12345        3.141592653     
99999.9999      default
+
+-- !ctas_01 --
+2
+3
+
+-- !ctas_02 --
+2
+3
+
+-- !ctas_03 --
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_04 --
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_05 --
+11     value_for_pt1   value_for_pt2
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_06 --
+11     value_for_pt1   value_for_pt2
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_ex01 --
+2
+3
+
+-- !ctas_ex02 --
+11     value_for_pt1   value_for_pt2
+22     value_for_pt11  \N
+22     value_for_pt11  value_for_pt22
+
+-- !ctas_03 --
+\N     another string value for col2
+\N     string value for col2
+\N     yet another string value for col2
+
+-- !ctas_04 --
+\N     11      value_for_pt1
+\N     22      value_for_pt11
+
+-- !ctas_types_01 --
+true   127     32767   2147483647      9223372036854775807     default 
22.12345        3.141592653     99999.9999      default default 2023-05-29      
2023-05-29T23:19:34
+
+-- !ctas_types_02 --
+true   127     32767   2147483647      default 22.12345        3.141592653     
99999.9999      default
diff --git a/regression-test/data/external_table_p0/hive/ddl/test_hive_ddl.out 
b/regression-test/data/external_table_p0/hive/ddl/test_hive_ddl.out
new file mode 100644
index 00000000000..eab813cf5f3
--- /dev/null
+++ b/regression-test/data/external_table_p0/hive/ddl/test_hive_ddl.out
@@ -0,0 +1,60 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !insert01 --
+true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
+
+-- !insert02 --
+\N     123     \N      \N      8.98
+true   123     9876543210      \N      stringval
+true   123     9876543210      123.4567        stringval
+
+-- !insert03 --
+\N     123     \N      \N      \N      \N      \N      varcharval      8.98
+false  1       9876543210      abcdefghij      2.3     6.28    0.0000  2223    
stringval
+true   \N      9876543210      abcdefghij      2.3     6.28    \N      
varcharval      stringval
+true   123     9876543210      \N      \N      \N      \N      varcharval      
stringval
+true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
+
+-- !insert04 --
+true   1       1000    2.3     value_for_pt1   value_for_pt2
+
+-- !insert05 --
+true   1       1000    2.3
+true   1       1000    2.3
+true   1       1000    2.3
+
+-- !insert06 --
+\N     1       1000    1.3
+false  1       1000    \N
+true   1       1000    2.3
+true   1       1000    2.3
+true   1       1000    2.3
+
+-- !insert01 --
+true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
+
+-- !insert02 --
+\N     123     \N      \N      8.98
+true   123     9876543210      \N      stringval
+true   123     9876543210      123.4567        stringval
+
+-- !insert03 --
+\N     123     \N      \N      \N      \N      \N      varcharval      8.98
+false  1       9876543210      abcdefghij      2.3     6.28    0.0000  2223    
stringval
+true   \N      9876543210      abcdefghij      2.3     6.28    \N      
varcharval      stringval
+true   123     9876543210      \N      \N      \N      \N      varcharval      
stringval
+true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
+
+-- !insert04 --
+true   1       1000    2.3     value_for_pt1   value_for_pt2
+
+-- !insert05 --
+true   1       1000    2.3
+true   1       1000    2.3
+true   1       1000    2.3
+
+-- !insert06 --
+\N     1       1000    1.3
+false  1       1000    \N
+true   1       1000    2.3
+true   1       1000    2.3
+true   1       1000    2.3
diff --git 
a/regression-test/data/external_table_p0/hive/ddl/test_hive_ddl_and_ctas.out 
b/regression-test/data/external_table_p0/hive/ddl/test_hive_ddl_and_ctas.out
deleted file mode 100644
index f30081e70b6..00000000000
--- a/regression-test/data/external_table_p0/hive/ddl/test_hive_ddl_and_ctas.out
+++ /dev/null
@@ -1,148 +0,0 @@
--- This file is automatically generated. You should know what you did if you 
want to edit this
--- !insert01 --
-true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
-
--- !insert02 --
-\N     123     \N      \N      8.98
-true   123     9876543210      \N      stringval
-true   123     9876543210      123.4567        stringval
-
--- !insert03 --
-\N     123     \N      \N      \N      \N      \N      varcharval      8.98
-false  1       9876543210      abcdefghij      2.3     6.28    0.0000  2223    
stringval
-true   \N      9876543210      abcdefghij      2.3     6.28    \N      
varcharval      stringval
-true   123     9876543210      \N      \N      \N      \N      varcharval      
stringval
-true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
-
--- !insert04 --
-true   1       1000    2.3     value_for_pt1   value_for_pt2
-
--- !insert05 --
-true   1       1000    2.3
-true   1       1000    2.3
-true   1       1000    2.3
-
--- !insert06 --
-\N     1       1000    1.3
-false  1       1000    \N
-true   1       1000    2.3
-true   1       1000    2.3
-true   1       1000    2.3
-
--- !ctas_01 --
-1
-2
-2
-3
-3
-
--- !ctas_02 --
-11     value_for_pt1   value_for_pt2
-22     value_for_pt11  value_for_pt22
-22     value_for_pt11  value_for_pt22
-
--- !ctas_03 --
-1      string value for col2
-1      string value for col2
-2      another string value for col2
-2      another string value for col2
-3      yet another string value for col2
-3      yet another string value for col2
-
--- !ctas_04 --
-11     value_for_pt1   value_for_pt2
-11     value_for_pt1   value_for_pt2
-22     value_for_pt11  value_for_pt22
-22     value_for_pt11  value_for_pt22
-
--- !ctas_05 --
-1      string value for col2
-2      another string value for col2
-3      yet another string value for col2
-
--- !ctas_06 --
-11     value_for_pt1   value_for_pt2
-22     value_for_pt11  value_for_pt22
-
--- !complex_type01 --
-a      \N      \N      \N      \N      \N      \N      \N      \N      \N      
["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  [{1:"a"}, 
{2:"b"}]      {1234567890123456789:"a"}       {1234567890123456789:0.12345678}  
      {"key":["char1", "char2"]}      {"id": 1, "gender": 1, "name": "John 
Doe"}      {"scale": 123.4567, "metric": ["metric1", "metric2"]}   \N
-a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
-a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
-
--- !complex_type02 --
-a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
-a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
-
--- !insert01 --
-true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
-
--- !insert02 --
-\N     123     \N      \N      8.98
-true   123     9876543210      \N      stringval
-true   123     9876543210      123.4567        stringval
-
--- !insert03 --
-\N     123     \N      \N      \N      \N      \N      varcharval      8.98
-false  1       9876543210      abcdefghij      2.3     6.28    0.0000  2223    
stringval
-true   \N      9876543210      abcdefghij      2.3     6.28    \N      
varcharval      stringval
-true   123     9876543210      \N      \N      \N      \N      varcharval      
stringval
-true   123     9876543210      abcdefghij      3.14    6.28    123.4567        
varcharval      stringval
-
--- !insert04 --
-true   1       1000    2.3     value_for_pt1   value_for_pt2
-
--- !insert05 --
-true   1       1000    2.3
-true   1       1000    2.3
-true   1       1000    2.3
-
--- !insert06 --
-\N     1       1000    1.3
-false  1       1000    \N
-true   1       1000    2.3
-true   1       1000    2.3
-true   1       1000    2.3
-
--- !ctas_01 --
-1
-2
-2
-3
-3
-
--- !ctas_02 --
-11     value_for_pt1   value_for_pt2
-22     value_for_pt11  value_for_pt22
-22     value_for_pt11  value_for_pt22
-
--- !ctas_03 --
-1      string value for col2
-1      string value for col2
-2      another string value for col2
-2      another string value for col2
-3      yet another string value for col2
-3      yet another string value for col2
-
--- !ctas_04 --
-11     value_for_pt1   value_for_pt2
-11     value_for_pt1   value_for_pt2
-22     value_for_pt11  value_for_pt22
-22     value_for_pt11  value_for_pt22
-
--- !ctas_05 --
-1      string value for col2
-2      another string value for col2
-3      yet another string value for col2
-
--- !ctas_06 --
-11     value_for_pt1   value_for_pt2
-22     value_for_pt11  value_for_pt22
-
--- !complex_type01 --
-a      \N      \N      \N      \N      \N      \N      \N      \N      \N      
["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  [{1:"a"}, 
{2:"b"}]      {1234567890123456789:"a"}       {1234567890123456789:0.12345678}  
      {"key":["char1", "char2"]}      {"id": 1, "gender": 1, "name": "John 
Doe"}      {"scale": 123.4567, "metric": ["metric1", "metric2"]}   \N
-a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
-a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
-
--- !complex_type02 --
-a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
-a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
diff --git 
a/regression-test/data/external_table_p0/hive/ddl/test_hive_write_type.out 
b/regression-test/data/external_table_p0/hive/ddl/test_hive_write_type.out
new file mode 100644
index 00000000000..a95bbd0b8d4
--- /dev/null
+++ b/regression-test/data/external_table_p0/hive/ddl/test_hive_write_type.out
@@ -0,0 +1,18 @@
+-- This file is automatically generated. You should know what you did if you 
want to edit this
+-- !complex_type01 --
+a      \N      \N      \N      \N      \N      \N      \N      \N      \N      
["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  [{1:"a"}, 
{2:"b"}]      {1234567890123456789:"a"}       {1234567890123456789:0.12345678}  
      {"key":["char1", "char2"]}      {"id": 1, "gender": 1, "name": "John 
Doe"}      {"scale": 123.4567, "metric": ["metric1", "metric2"]}   \N
+a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
+a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
+
+-- !complex_type02 --
+a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
+a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
+
+-- !complex_type01 --
+a      \N      \N      \N      \N      \N      \N      \N      \N      \N      
["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  [{1:"a"}, 
{2:"b"}]      {1234567890123456789:"a"}       {1234567890123456789:0.12345678}  
      {"key":["char1", "char2"]}      {"id": 1, "gender": 1, "name": "John 
Doe"}      {"scale": 123.4567, "metric": ["metric1", "metric2"]}   \N
+a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
+a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
+
+-- !complex_type02 --
+a      b       c       d       e       1.1     12345   0.12345678      string  
\N      \N      \N      \N      \N      \N      \N      \N      \N      \N      
\N
+a      b       c       d       e       1.1     12345   0.12345678      string  
[0.001, 0.002]  ["char1", "char2"]      ["c", "d"]      ["string1", "string2"]  
[{1:"a"}, {2:"b"}]      {1234567890123456789:"a"}       
{1234567890123456789:0.12345678}        {"key":["char1", "char2"]}      {"id": 
1, "gender": 1, "name": "John Doe"}      {"scale": 123.4567, "metric": 
["metric1", "metric2"]}   {"codes": [123, 456], "props": {"key1":["char1", 
"char2"]}}
diff --git 
a/regression-test/suites/external_table_p0/hive/ddl/test_hive_ctas.groovy 
b/regression-test/suites/external_table_p0/hive/ddl/test_hive_ctas.groovy
new file mode 100644
index 00000000000..1c69004867f
--- /dev/null
+++ b/regression-test/suites/external_table_p0/hive/ddl/test_hive_ctas.groovy
@@ -0,0 +1,515 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hive_ctas", 
"p0,external,hive,external_docker,external_docker_hive") {
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def file_formats = ["parquet", "orc"]
+        def generateSrcDDLForCTAS = { String file_format, String catalog_name 
->
+            sql """ switch `${catalog_name}` """
+            sql """ create database if not exists `test_ctas` """;
+            sql """ switch internal """
+            sql """ create database if not exists test_ctas_olap """;
+            sql """ use internal.test_ctas_olap """
+            sql """
+                CREATE TABLE `unpart_ctas_olap_src` (
+                    `col1` INT COMMENT 'col1',
+                    `col2` STRING COMMENT 'col2'
+                )
+                ENGINE=olap
+                DISTRIBUTED BY HASH(col1) BUCKETS 16
+                PROPERTIES (
+                    'replication_num' = '1'
+                );
+            """
+
+            sql """ INSERT INTO `unpart_ctas_olap_src` (col1, col2) VALUES
+                (1, 'string value for col2'),
+                (2, 'another string value for col2'),
+                (3, 'yet another string value for col2'); 
+            """
+
+            sql """
+                CREATE TABLE `part_ctas_olap_src`(
+                    `col1` INT COMMENT 'col1',
+                    `pt1` VARCHAR(16) COMMENT 'pt1',
+                    `pt2` VARCHAR(16) COMMENT 'pt2'
+                )
+                ENGINE=olap
+                PARTITION BY LIST (pt1, pt2) (
+                    PARTITION pp1 VALUES IN(
+                        ('value_for_pt1', 'value_for_pt2'),
+                        ('value_for_pt11', 'value_for_pt22')
+                    )
+                )
+                DISTRIBUTED BY HASH(col1) BUCKETS 16
+                PROPERTIES (
+                    'replication_num' = '1'
+                );
+            """
+
+            sql """
+            INSERT INTO `part_ctas_olap_src` (col1, pt1, pt2) VALUES
+             (11, 'value_for_pt1', 'value_for_pt2'),
+             (22, 'value_for_pt11', 'value_for_pt22');
+            """
+
+            sql """ use `${catalog_name}`.`test_ctas` """
+            sql """
+                CREATE TABLE `unpart_ctas_src`(
+                  `col1` INT COMMENT 'col1',
+                  `col2` STRING COMMENT 'col2'
+                ) ENGINE=hive
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                );
+            """
+
+            sql """ INSERT INTO `unpart_ctas_src` (col1, col2) VALUES
+                (1, 'string value for col2'),
+                (2, 'another string value for col2'),
+                (3, 'yet another string value for col2'); 
+            """
+
+            sql """
+                CREATE TABLE `part_ctas_src`(
+                  `col1` INT COMMENT 'col1',
+                  `pt1` VARCHAR COMMENT 'pt1',
+                  `pt2` VARCHAR COMMENT 'pt2'
+                ) ENGINE=hive
+                PARTITION BY LIST (pt1, pt2) (
+                    
+                )
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                );
+            """
+
+            sql """
+            INSERT INTO `part_ctas_src` (col1, pt1, pt2) VALUES
+             (11, 'value_for_pt1', 'value_for_pt2'),
+             (22, 'value_for_pt11', 'value_for_pt22');
+            """
+        }
+
+        def destroySrcDDLForCTAS = { String catalog_name ->
+            sql """ switch `${catalog_name}` """
+            sql """ DROP TABLE IF EXISTS `test_ctas`.part_ctas_src """
+            sql """ DROP TABLE IF EXISTS `test_ctas`.unpart_ctas_src """
+            sql """ drop database if exists `test_ctas` """;
+            sql """ DROP TABLE IF EXISTS 
internal.test_ctas_olap.part_ctas_olap_src """
+            sql """ DROP TABLE IF EXISTS 
internal.test_ctas_olap.unpart_ctas_olap_src """
+            sql """ switch internal """;
+            sql """ drop database if exists test_ctas_olap """;
+        }
+
+        def test_ctas_tbl = { String file_format, String catalog_name ->
+            generateSrcDDLForCTAS(file_format, catalog_name)
+            try {
+                sql """ switch `${catalog_name}` """
+                sql """ use test_ctas """
+                // 1. external to external un-partitioned table
+                sql """ CREATE TABLE hive_ctas1 ENGINE=hive AS SELECT col1 
FROM unpart_ctas_src; 
+                """
+
+                sql """ INSERT INTO hive_ctas1 SELECT col1 FROM 
unpart_ctas_src WHERE col1 > 1;
+                """
+                sql """ INSERT OVERWRITE TABLE hive_ctas1 SELECT col1 FROM 
unpart_ctas_src WHERE col1 > 1;
+                """
+
+                order_qt_ctas_01 """ SELECT * FROM hive_ctas1 """
+                sql """ DROP TABLE hive_ctas1 """
+
+                // 2. external to external un-partitioned table with columns
+                sql """ CREATE TABLE hive_ctas2 (col1) ENGINE=hive AS SELECT 
col1 FROM unpart_ctas_src; 
+                """
+
+                sql """ INSERT INTO hive_ctas2 SELECT col1 FROM 
unpart_ctas_src WHERE col1 > 1;
+                """
+                sql """ INSERT OVERWRITE TABLE hive_ctas2 SELECT col1 FROM 
unpart_ctas_src WHERE col1 > 1;
+                """
+                order_qt_ctas_02 """ SELECT * FROM hive_ctas2  """
+                sql """ DROP TABLE hive_ctas2 """
+
+                // 3. external to external partitioned table
+                sql """ CREATE TABLE hive_ctas3 ENGINE=hive AS SELECT 
col1,pt1,pt2 FROM part_ctas_src WHERE col1>0;
+                """
+
+                sql """ INSERT INTO hive_ctas3 SELECT col1,pt1,pt2 FROM 
part_ctas_src WHERE col1>=22;
+                """
+                sql """ INSERT OVERWRITE TABLE hive_ctas3 SELECT col1,pt1,pt2 
FROM part_ctas_src WHERE col1>=22;
+                """
+                order_qt_ctas_03 """ SELECT * FROM hive_ctas3  """
+                sql """ DROP TABLE hive_ctas3 """
+
+                sql """ CREATE TABLE hive_ctas4 AS SELECT * FROM part_ctas_src 
WHERE col1>0;
+                """
+
+                sql """ INSERT INTO hive_ctas4 SELECT * FROM part_ctas_src 
WHERE col1>=22;
+                """
+                sql """ INSERT OVERWRITE TABLE hive_ctas4 SELECT * FROM 
part_ctas_src WHERE col1>=22;
+                """
+                order_qt_ctas_04 """ SELECT * FROM 
${catalog_name}.test_ctas.hive_ctas4  """
+                sql """ DROP TABLE hive_ctas4 """
+
+                // 4. external to external partitioned table with partitions 
and cols
+                sql """ CREATE TABLE hive_ctas5 ENGINE=hive PARTITION BY LIST 
(pt1, pt2) ()
+                AS SELECT col1,pt1,pt2 FROM part_ctas_src WHERE col1>0;
+                """
+
+                sql """ INSERT INTO hive_ctas5 SELECT col1,pt1,pt2 FROM 
part_ctas_src WHERE col1>=22;
+                """
+                sql """ INSERT OVERWRITE TABLE hive_ctas5 SELECT col1,pt1,pt2 
FROM part_ctas_src WHERE col1>=22;
+                """
+                order_qt_ctas_05 """ SELECT * FROM hive_ctas5  """
+                sql """ DROP TABLE hive_ctas5 """
+
+                sql """ CREATE TABLE hive_ctas6 PARTITION BY LIST (pt1, pt2) ()
+                AS SELECT * FROM part_ctas_src WHERE col1>0;
+                """
+
+                sql """ INSERT INTO hive_ctas6 SELECT * FROM part_ctas_src 
WHERE col1>=22;
+                """
+                sql """ INSERT OVERWRITE TABLE hive_ctas6 SELECT * FROM 
part_ctas_src WHERE col1>=22;
+                """
+                order_qt_ctas_06 """ SELECT * FROM 
${catalog_name}.test_ctas.hive_ctas6  """
+                sql """ DROP TABLE hive_ctas6 """
+
+            } finally {
+                destroySrcDDLForCTAS(catalog_name)
+            }
+        }
+
+        def test_ctas_extend = { String file_format, String catalog_name ->
+            generateSrcDDLForCTAS(file_format, catalog_name)
+            sql """ switch ${catalog_name} """
+
+            try {
+                sql """ DROP DATABASE IF EXISTS ${catalog_name}.test_ctas_ex 
""";
+                sql """ DROP DATABASE IF EXISTS `test_ctas_ex` """;
+                sql """ CREATE DATABASE IF NOT EXISTS 
${catalog_name}.test_ctas_ex
+                    PROPERTIES (
+                        "location" = "/user/hive/warehouse/test_ctas_ex"
+                    )
+                    """;
+                sql """ CREATE DATABASE IF NOT EXISTS `test_ctas_ex`
+                    PROPERTIES (
+                        "location" = "/user/hive/warehouse/test_ctas_ex"
+                    )
+                    """;
+                sql """ use `${catalog_name}`.`test_ctas_ex` """
+
+                // 1. external to external un-partitioned table
+                sql """ DROP TABLE IF EXISTS 
${catalog_name}.test_ctas_ex.hive_ctas1 """
+                sql """ CREATE TABLE ${catalog_name}.test_ctas_ex.hive_ctas1 
(col1) ENGINE=hive 
+                        PROPERTIES (
+                            "location" = 
"/user/hive/warehouse/test_ctas_ex/loc_hive_ctas1",
+                            "file_format"="orc",
+                            "orc.compress"="zlib"
+                        ) AS SELECT col1 FROM test_ctas.unpart_ctas_src; 
+                    """
+                sql """ INSERT INTO ${catalog_name}.test_ctas_ex.hive_ctas1
+                        SELECT col1 FROM test_ctas.unpart_ctas_src WHERE col1 
> 1;
+                    """
+                sql """ INSERT OVERWRITE TABLE 
${catalog_name}.test_ctas_ex.hive_ctas1
+                        SELECT col1 FROM test_ctas.unpart_ctas_src WHERE col1 
> 1;
+                    """
+                order_qt_ctas_ex01 """ SELECT * FROM hive_ctas1 """
+                sql """ DROP TABLE hive_ctas1 """
+
+                // 2. external to external partitioned table
+                sql """ DROP TABLE IF EXISTS 
${catalog_name}.test_ctas_ex.hive_ctas2 """
+                sql """ CREATE TABLE ${catalog_name}.test_ctas_ex.hive_ctas2 
(col1,pt1,pt2) ENGINE=hive 
+                        PARTITION BY LIST (pt1, pt2) ()
+                        PROPERTIES (
+                            "location" = 
"/user/hive/warehouse/test_ctas_ex/loc_hive_ctas2",
+                            "file_format"="parquet",
+                            "parquet.compression"="snappy"
+                        )
+                        AS SELECT col1,pt1,pt2 FROM test_ctas.part_ctas_src 
WHERE col1>0; 
+                    """
+                sql """ INSERT INTO ${catalog_name}.test_ctas_ex.hive_ctas2 
(col1,pt1,pt2)
+                        SELECT col1,pt1,pt2 FROM test_ctas.part_ctas_src WHERE 
col1>=22;
+                    """
+                sql """ INSERT OVERWRITE TABLE hive_ctas2 (col1,pt1,pt2)
+                        SELECT col1,pt1,pt2 FROM test_ctas.part_ctas_src WHERE 
col1>=22;
+                    """
+                sql """ INSERT INTO ${catalog_name}.test_ctas_ex.hive_ctas2 
(pt1,col1)
+                        SELECT pt1,col1 FROM test_ctas.part_ctas_src WHERE 
col1>=22;
+                    """
+                order_qt_ctas_ex02 """ SELECT * FROM hive_ctas2  """
+                sql """ DROP TABLE hive_ctas2 """
+
+                // 3. internal to external un-partitioned table
+                sql """ DROP TABLE IF EXISTS 
${catalog_name}.test_ctas_ex.ctas_o1 """
+                sql """ CREATE TABLE ${catalog_name}.test_ctas_ex.ctas_o1 
(col1,col2) ENGINE=hive
+                        PROPERTIES (
+                            "location" = 
"/user/hive/warehouse/test_ctas_ex/loc_ctas_o1",
+                            "file_format"="parquet",
+                            "parquet.compression"="snappy"
+                        )
+                        AS SELECT col1,col2 FROM 
internal.test_ctas_olap.unpart_ctas_olap_src;
+                """
+                sql """ INSERT INTO ${catalog_name}.test_ctas_ex.ctas_o1 
(col2,col1)
+                        SELECT col2,col1 FROM 
internal.test_ctas_olap.unpart_ctas_olap_src;
+                """
+                sql """ INSERT OVERWRITE TABLE 
${catalog_name}.test_ctas_ex.ctas_o1 (col2)
+                        SELECT col2 FROM 
internal.test_ctas_olap.unpart_ctas_olap_src;
+                """
+                order_qt_ctas_03 """ SELECT * FROM ctas_o1  """
+                sql """ DROP TABLE ctas_o1 """
+
+                // 4. internal to external partitioned table
+                sql """ DROP TABLE IF EXISTS 
${catalog_name}.test_ctas_ex.ctas_o2 """
+                sql """ CREATE TABLE ${catalog_name}.test_ctas_ex.ctas_o2 
(col1,col2,pt1) ENGINE=hive
+                        PARTITION BY LIST (pt1) ()
+                        PROPERTIES (
+                            "location" = 
"/user/hive/warehouse/test_ctas_ex/loc_ctas_o2",
+                            "file_format"="orc",
+                            "orc.compress"="zlib"
+                        ) 
+                        AS SELECT null as col1, pt2 as col2, pt1 FROM 
internal.test_ctas_olap.part_ctas_olap_src WHERE col1>0;
+                    """
+                sql """ INSERT INTO ${catalog_name}.test_ctas_ex.ctas_o2 
(col1,pt1,col2)
+                        SELECT col1,pt1,pt2 FROM 
internal.test_ctas_olap.part_ctas_olap_src;
+                """
+                sql """ INSERT INTO ${catalog_name}.test_ctas_ex.ctas_o2 
(col2,pt1,col1)
+                        SELECT pt2,pt1,col1 FROM 
internal.test_ctas_olap.part_ctas_olap_src;
+                """
+                sql """ INSERT OVERWRITE TABLE 
${catalog_name}.test_ctas_ex.ctas_o2 (pt1,col2)
+                        SELECT pt1,col1 FROM 
internal.test_ctas_olap.part_ctas_olap_src;
+                """
+                order_qt_ctas_04 """ SELECT * FROM ctas_o2  """
+                sql """ DROP TABLE ctas_o2 """
+                sql """ DROP DATABASE IF EXISTS test_ctas_ex """
+            } finally {
+                destroySrcDDLForCTAS(catalog_name)
+            }
+        }
+
+        def test_ctas_exception = { String file_format, String catalog_name ->
+            sql """ switch ${catalog_name} """
+
+            sql """ create database if not exists `test_hive_ex_ctas` """;
+            test {
+                sql """ create database `test_hive_ex_ctas` """
+                exception "errCode = 2, detailMessage = Can't create database 
'test_hive_ex_ctas'; database exists"
+            }
+            sql """use `${catalog_name}`.`test_hive_ex_ctas`"""
+            sql """ DROP DATABASE IF EXISTS ${catalog_name}.test_hive_ex_ctas 
"""
+            // check ctas error
+            generateSrcDDLForCTAS(file_format, catalog_name)
+            try {
+                test {
+                    sql """ DROP DATABASE ${catalog_name}.test_no_exist """
+                    exception "errCode = 2, detailMessage = Can't drop 
database 'test_no_exist'; database doesn't exist"
+                }
+                sql """ DROP DATABASE IF EXISTS ${catalog_name}.test_err """
+                sql """ CREATE DATABASE ${catalog_name}.test_err """
+                test {
+                    sql """ CREATE DATABASE ${catalog_name}.test_err
+                    PROPERTIES (
+                        "location" = "/user/hive/warehouse/test_err",
+                        "owner" = "err"
+                    )
+                    """;
+                    exception "errCode = 2, detailMessage = Can't create 
database 'test_err'; database exists"
+                }
+                sql """ DROP DATABASE IF EXISTS ${catalog_name}.test_err """
+
+                sql """ CREATE DATABASE IF NOT EXISTS `test_no_err`""";
+                sql """ use `${catalog_name}`.`test_no_err` """
+
+                // 1. external to external un-partitioned table
+                test {
+                    sql """ DROP TABLE ${catalog_name}.test_no_err.hive_ctas1 
"""
+                    exception "errCode = 2, detailMessage = Unknown table 
'hive_ctas1' in test_no_err"
+                }
+                test {
+                    sql """ CREATE TABLE 
${catalog_name}.test_no_err.hive_ctas1 (col1) ENGINE=hive 
+                            PROPERTIES (
+                                "file_format"="orc",
+                                "orc.compress"="zstd"
+                            ) AS SELECT col1,col2 FROM 
test_ctas.unpart_ctas_src; 
+                        """
+                    exception "errCode = 2, detailMessage = ctas column size 
is not equal to the query's"
+                }
+
+                test {
+                    sql """ CREATE TABLE ${catalog_name}.test_no_err.ctas_o2 
(col1,pt1,pt2) ENGINE=hive 
+                        PARTITION BY LIST (pt1,pt2,pt3) ()
+                        PROPERTIES (
+                            "file_format"="parquet",
+                            "orc.compress"="zstd"
+                        )
+                        AS SELECT * FROM test_ctas.part_ctas_src WHERE col1>0; 
+                    """
+                    exception "errCode = 2, detailMessage = partition key pt3 
is not exists"
+                }
+
+                sql """ DROP TABLE IF EXISTS 
${catalog_name}.test_no_err.ctas_o2 """
+                sql """ CREATE TABLE ${catalog_name}.test_no_err.ctas_o2 
(col1,col2,pt1) ENGINE=hive 
+                        PARTITION BY LIST (pt1) ()
+                        PROPERTIES (
+                            "file_format"="parquet",
+                            "parquet.compression"="zstd"
+                        )
+                        AS SELECT col1,pt1 as col2,pt2 as pt1 FROM 
test_ctas.part_ctas_src WHERE col1>0; 
+                    """
+
+                test {
+                    sql """ INSERT INTO ${catalog_name}.test_no_err.ctas_o2 
(col1,col2)
+                            SELECT col1 FROM test_ctas.part_ctas_src;
+                        """
+                    exception "errCode = 2, detailMessage = insert into cols 
should be corresponding to the query output"
+                }
+
+                test {
+                    sql """ INSERT INTO ${catalog_name}.test_no_err.ctas_o2 
(col1)
+                            SELECT col1,pt1 as col2 FROM 
test_ctas.part_ctas_src WHERE col1>0;
+                        """
+                    exception "errCode = 2, detailMessage = insert into cols 
should be corresponding to the query output"
+                }
+                sql """ DROP TABLE IF EXISTS 
${catalog_name}.test_no_err.ctas_o2  """
+                sql """ DROP DATABASE IF EXISTS test_no_err """
+
+            } finally {
+                destroySrcDDLForCTAS(catalog_name)
+            }
+        }
+
+        def test_ctas_all_types = { String file_format, String catalog_name ->
+            sql """ switch `${catalog_name}` """
+            sql """ CREATE DATABASE IF NOT EXISTS `test_ctas_all_type` """;
+            sql """ use test_ctas_all_type """;
+
+            sql """
+                CREATE TABLE IF NOT EXISTS all_types_ctas_${file_format}(
+                  `col1` BOOLEAN DEFAULT 'false' COMMENT 'col1',
+                  `col2` TINYINT DEFAULT '127' COMMENT 'col2',
+                  `col3` SMALLINT DEFAULT '32767' COMMENT 'col3',
+                  `col4` INT DEFAULT '2147483647' COMMENT 'col4',
+                  `col5` BIGINT DEFAULT '9223372036854775807' COMMENT 'col5',
+                  `col6` CHAR(10) DEFAULT 'default' COMMENT 'col6',
+                  `col7` FLOAT DEFAULT '1' COMMENT 'col7',
+                  `col8` DOUBLE DEFAULT '3.141592653' COMMENT 'col8',
+                  `col9` DECIMAL(9,4) DEFAULT '99999.9999' COMMENT 'col9',
+                  `col10` VARCHAR(11) DEFAULT 'default' COMMENT 'col10',
+                  `col11` STRING DEFAULT 'default' COMMENT 'col11',
+                  `col12` DATE DEFAULT '2023-05-29' COMMENT 'col12',
+                  `col13` DATETIME DEFAULT current_timestamp COMMENT 'col13'
+                )  ENGINE=hive
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+
+            sql """
+                    INSERT INTO all_types_ctas_${file_format} (
+                        col1,
+                        col2,
+                        col3,
+                        col4,
+                        col5,
+                        col6,
+                        col7,
+                        col8,
+                        col9,
+                        col10,
+                        col11,
+                        col12,
+                        col13
+                    ) VALUES (
+                        true,        -- col1 (BOOLEAN)
+                        127,         -- col2 (TINYINT)
+                        32767,       -- col3 (SMALLINT)
+                        2147483647,  -- col4 (INT)
+                        9223372036854775807, -- col5 (BIGINT)
+                        'default',   -- col6 (CHAR)
+                        22.12345,         -- col7 (FLOAT)
+                        3.141592653, -- col8 (DOUBLE)
+                        99999.9999,  -- col9 (DECIMAL)
+                        'default',   -- col10 (VARCHAR)
+                        'default',   -- col11 (STRING)
+                        '2023-05-29',-- col12 (DATE)
+                        '2023-05-29 23:19:34' -- col13 (DATETIME)
+                    );
+                """
+
+            sql """
+                    CREATE TABLE IF NOT EXISTS all_types_ctas1 AS SELECT * 
FROM all_types_ctas_${file_format}
+                """
+            sql """
+                    INSERT INTO all_types_ctas1 SELECT * FROM 
all_types_ctas_${file_format}
+                """
+            sql """
+                    INSERT OVERWRITE TABLE all_types_ctas1 SELECT * FROM 
all_types_ctas_${file_format}
+                """
+            order_qt_ctas_types_01 """ SELECT * FROM all_types_ctas1 """
+            sql """
+                    DROP TABLE all_types_ctas1
+                """
+
+            sql """
+                    CREATE TABLE IF NOT EXISTS all_types_ctas2 (col1, col2, 
col3, col4, col6, col7, col8, col9, col11) 
+                    AS SELECT col1, col2, col3, col4, col6, col7, col8, col9, 
col11 FROM all_types_ctas_${file_format}
+                """
+            sql """
+                    INSERT INTO all_types_ctas2 (col1, col3, col7, col9) 
+                    SELECT col1, col3, col7, col9 FROM 
all_types_ctas_${file_format}
+                """
+            sql """
+                    INSERT OVERWRITE TABLE all_types_ctas2 (col1, col2, col3, 
col4, col6, col7, col8, col9, col11)
+                    SELECT col1, col2, col3, col4, col6, col7, col8, col9, 
col11 FROM all_types_ctas_${file_format}
+                """
+            order_qt_ctas_types_02 """ SELECT * FROM all_types_ctas2 """
+            sql """
+                DROP TABLE all_types_ctas2
+                """
+            sql """
+                DROP TABLE all_types_ctas_${file_format}
+                """
+            sql """ drop database if exists `test_ctas_all_type` """;
+        }
+
+        try {
+            String hms_port = context.config.otherConfigs.get("hms_port")
+            String hdfs_port = context.config.otherConfigs.get("hdfs_port")
+            String catalog_name = "test_hive_ctas"
+            String externalEnvIp = 
context.config.otherConfigs.get("externalEnvIp")
+
+            sql """drop catalog if exists ${catalog_name}"""
+            sql """create catalog if not exists ${catalog_name} properties (
+                'type'='hms',
+                'hive.metastore.uris' = 
'thrift://${externalEnvIp}:${hms_port}',
+                'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
+            );"""
+            sql """switch ${catalog_name}"""
+
+            sql """set enable_fallback_to_original_planner=false;"""
+            for (String file_format in file_formats) {
+                logger.info("Process file format" + file_format)
+                test_ctas_tbl(file_format, catalog_name)
+                test_ctas_extend(file_format, catalog_name)
+                test_ctas_exception(file_format, catalog_name)
+                test_ctas_all_types(file_format, catalog_name)
+            }
+            sql """drop catalog if exists ${catalog_name}"""
+        } finally {
+        }
+    }
+}
diff --git 
a/regression-test/suites/external_table_p0/hive/ddl/test_hive_ddl.groovy 
b/regression-test/suites/external_table_p0/hive/ddl/test_hive_ddl.groovy
new file mode 100644
index 00000000000..b0543931371
--- /dev/null
+++ b/regression-test/suites/external_table_p0/hive/ddl/test_hive_ddl.groovy
@@ -0,0 +1,676 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hive_ddl", 
"p0,external,hive,external_docker,external_docker_hive") {
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def file_formats = ["parquet", "orc"]
+
+        def test_db = { String catalog_name ->
+            logger.info("Test create/drop database...")
+            sql """switch ${catalog_name}"""
+            sql """ drop database if exists `test_hive_db` """;
+            sql """ create database if not exists 
${catalog_name}.`test_hive_db` """;
+            def create_db_res = sql """ show create database test_hive_db """
+            logger.info("${create_db_res}")
+            
assertTrue(create_db_res.toString().containsIgnoreCase("/user/hive/warehouse/test_hive_db.db"))
+            sql """ use `test_hive_db` """
+            sql """
+                    CREATE TABLE test_hive_db_has_tbl (
+                      `col` STRING COMMENT 'col'
+                    )  ENGINE=hive
+                 """
+            test {
+                sql """ drop database `test_hive_db` """;
+                exception "java.sql.SQLException: Unexpected exception: failed 
to drop database from hms client. reason: 
org.apache.hadoop.hive.metastore.api.InvalidOperationException: Database 
test_hive_db is not empty. One or more tables exist."
+            }
+
+            sql """ DROP TABLE `test_hive_db_has_tbl` """
+            sql """ drop database `test_hive_db` """;
+            sql """ drop database if exists `test_hive_db` """;
+        }
+
+        def test_loc_db = { String externalEnvIp, String hdfs_port, String 
catalog_name ->
+            logger.info("Test create/drop database with location...")
+            sql """switch ${catalog_name}"""
+            def loc = "${externalEnvIp}:${hdfs_port}/tmp/hive/test_hive_loc_db"
+            sql """ create database if not exists `test_hive_loc_db`
+                    properties('location'='hdfs://${loc}')
+                """
+
+            def create_db_res = sql """show create database test_hive_loc_db"""
+            logger.info("${create_db_res}")
+            assertTrue(create_db_res.toString().containsIgnoreCase("${loc}"))
+
+            sql """use `test_hive_loc_db`"""
+            sql """ drop database if exists `test_hive_loc_db` """;
+
+            sql """ create database if not exists `test_hive_loc_no_exist`
+                
properties('location'='hdfs://${externalEnvIp}:${hdfs_port}/exist_check')
+            """
+            sql """ create database if not exists `test_hive_loc_exist`
+                
properties('location'='hdfs://${externalEnvIp}:${hdfs_port}/exist_check')
+            """
+            sql """ drop database if exists `test_hive_loc_no_exist` """;
+            sql """ drop database if exists `test_hive_loc_exist` """;
+
+            try {
+                sql """ create database if not exists `test_hive_loc_no_exist`
+                        
properties('location'='tt://${externalEnvIp}:${hdfs_port}/exist_check')
+                    """
+            } catch (Exception e) {
+                assertTrue(e.getMessage().contains("No FileSystem for scheme: 
tt"))
+            }
+            try {
+                sql """ create database if not exists `test_hive_loc_no_exist`
+                        
properties('location'='hdfs://err_${externalEnvIp}:${hdfs_port}/exist_check')
+                    """
+            } catch (Exception e) {
+                assertTrue(e.getMessage().contains("Incomplete HDFS URI, no 
host"))
+            }
+            try {
+                sql """ create database if not exists `test_hive_loc_no_exist`
+                        
properties('location'='hdfs:err//${externalEnvIp}:${hdfs_port}/exist_check')
+                    """
+            } catch (Exception e) {
+                assertTrue(e.getMessage().contains("Relative path in absolute 
URI"))
+            }
+        }
+
+        def test_loc_tbl = { String file_format, String externalEnvIp, String 
hdfs_port, String catalog_name ->
+            logger.info("Test create/drop table with location...")
+            sql """switch ${catalog_name}"""
+            def loc = "${externalEnvIp}:${hdfs_port}/tmp/hive/test_hive_loc_db"
+            sql  """ create database if not exists `test_hive_loc`
+                    properties('location'='hdfs://${loc}')
+                 """
+            sql """use `test_hive_loc`"""
+
+            // case1. the table default location is inherited from db
+            sql """DROP TABLE IF EXISTS `loc_tbl_${file_format}_default`"""
+            sql """
+                    CREATE TABLE loc_tbl_${file_format}_default (
+                      `col` STRING COMMENT 'col'
+                    )  ENGINE=hive 
+                    PROPERTIES (
+                      'file_format'='${file_format}'
+                    )
+                 """
+            def create_tbl_res = sql """ show create table 
loc_tbl_${file_format}_default """
+            logger.info("${create_tbl_res}")
+            
assertTrue(create_tbl_res.toString().containsIgnoreCase("${loc}/loc_tbl_${file_format}_default"))
+
+            sql """ INSERT INTO loc_tbl_${file_format}_default values(1)  """
+
+            def tvfRes = sql """ SELECT * FROM hdfs(
+                                  
'uri'='hdfs://${loc}/loc_tbl_${file_format}_default/*',
+                                  'format' = '${file_format}',
+                                  'fs.defaultFS' = 
'hdfs://${externalEnvIp}:${hdfs_port}'
+                                 )
+                             """
+            logger.info("${tvfRes}")
+            assertTrue(!tvfRes.isEmpty())
+            sql """DROP TABLE `loc_tbl_${file_format}_default`"""
+            def tvfDropRes = sql """ SELECT * FROM hdfs(
+                                  
'uri'='hdfs://${loc}/loc_tbl_${file_format}_default/*',
+                                  'format' = '${file_format}',
+                                  'fs.defaultFS' = 
'hdfs://${externalEnvIp}:${hdfs_port}'
+                                 )
+                             """
+            logger.info("${tvfDropRes}")
+            assertTrue(tvfDropRes.isEmpty())
+
+            // case2. use a custom location to create table
+            def tbl_loc = "hdfs://${loc}/custom_loc"
+            sql """ DROP TABLE IF EXISTS loc_tbl_${file_format}_customm"""
+            sql """
+                    CREATE TABLE loc_tbl_${file_format}_custom (
+                      `col` STRING COMMENT 'col'
+                    )  ENGINE=hive 
+                    PROPERTIES (
+                      'file_format'='${file_format}',
+                      'location'='${tbl_loc}'
+                    )
+                 """
+            def create_tbl_res2 = sql """ show create table 
loc_tbl_${file_format}_custom """
+            logger.info("${create_tbl_res2}")
+            
assertTrue(create_tbl_res2.toString().containsIgnoreCase("${tbl_loc}"))
+            sql """ INSERT INTO loc_tbl_${file_format}_custom values(1)  """
+            def tvfRes2 = sql """ SELECT * FROM hdfs(
+                                    'uri'='${tbl_loc}/*',
+                                    'format' = '${file_format}',
+                                    'fs.defaultFS' = 
'hdfs://${externalEnvIp}:${hdfs_port}'
+                                  )
+                              """
+            logger.info("${tvfRes2}")
+            assertTrue(!tvfRes2.isEmpty())
+            sql """DROP TABLE `loc_tbl_${file_format}_custom`"""
+            def tvfDropRes2 = sql """ SELECT * FROM hdfs(
+                                        'uri'='${tbl_loc}/*',
+                                        'format' = '${file_format}',
+                                        'fs.defaultFS' = 
'hdfs://${externalEnvIp}:${hdfs_port}'
+                                      )
+                                  """
+            logger.info("${tvfDropRes2}")
+            assertTrue(tvfDropRes2.isEmpty())
+
+            // case3. check default
+            sql """
+                CREATE TABLE all_default_values_${file_format}(
+                  `col1` BOOLEAN DEFAULT 'false' COMMENT 'col1',
+                  `col2` TINYINT DEFAULT '127' COMMENT 'col2',
+                  `col3` SMALLINT DEFAULT '32767' COMMENT 'col3',
+                  `col4` INT DEFAULT '2147483647' COMMENT 'col4',
+                  `col5` BIGINT DEFAULT '9223372036854775807' COMMENT 'col5',
+                  `col6` CHAR(10) DEFAULT 'default' COMMENT 'col6',
+                  `col7` FLOAT DEFAULT '1' COMMENT 'col7',
+                  `col8` DOUBLE DEFAULT '3.141592653' COMMENT 'col8',
+                  `col9` DECIMAL(9,4) DEFAULT '99999.9999' COMMENT 'col9',
+                  `col10` VARCHAR(11) DEFAULT 'default' COMMENT 'col10',
+                  `col11` STRING DEFAULT 'default' COMMENT 'col11',
+                  `col12` DATE DEFAULT '2023-05-29' COMMENT 'col12',
+                  `col13` DATETIME DEFAULT current_timestamp COMMENT 'col13'
+                )  ENGINE=hive
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+            // need support default insert:
+            //            sql """ INSERT INTO all_default_values_${file_format}
+            //                    VALUES(null, null, null, null, null, null, 
null, null, null, null, null, null, null)
+            //                """
+            //            sql """ INSERT INTO 
all_default_values_${file_format} (col1, col3, col5, col7, col12)
+            //                    VALUES(null, null, null, null)
+            //                """
+            //            order_qt_default_val01 """ SELECT * FROM 
all_default_values_${file_format} """
+            sql """DROP TABLE `all_default_values_${file_format}`"""
+
+            test {
+                sql """
+                CREATE TABLE all_default_values_${file_format}_err_bool(
+                  `col1` BOOLEAN DEFAULT '-1' COMMENT 'col1'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = Invalid BOOLEAN literal: -1"
+            }
+
+            test {
+                sql """
+                CREATE TABLE all_default_values_${file_format}_err_float(
+                  `col1` FLOAT DEFAULT '1.1234' COMMENT 'col1'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = Default value will loose precision: 1.1234"
+            }
+
+            test {
+                sql """
+                CREATE TABLE all_default_values_${file_format}_err_double(
+                  `col1` DOUBLE DEFAULT 'abc' COMMENT 'col1'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = Invalid floating-point literal: abc"
+            }
+
+            test {
+                sql """
+                CREATE TABLE all_default_values_${file_format}_err_int(
+                  `col1` INT DEFAULT 'abcd' COMMENT 'col1'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = Invalid number format: abcd"
+            }
+
+            test {
+                sql """
+                CREATE TABLE all_default_values_${file_format}_err_date(
+                  `col1` DATE DEFAULT '123' COMMENT 'col1'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = date literal [123] is invalid: null"
+            }
+
+            test {
+                sql """
+                CREATE TABLE all_default_values_${file_format}_err_datetime(
+                   `col1` DATETIME DEFAULT '1512561000000' COMMENT 'col1'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = date literal [1512561000000] is invalid: errCode = 2, 
detailMessage = Invalid date value: 1512561000000"
+            }
+
+            test {
+                sql """
+                CREATE TABLE all_default_values_${file_format}_err_datetime(
+                   `col1` DATETIME DEFAULT '2020-09-20 02:60' COMMENT 'col1'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = date literal [2020-09-20 02:60] is invalid: Text '2020-09-20 
02:60' could not be parsed: Invalid value for MinuteOfHour (valid values 0 - 
59): 60"
+            }
+
+            // case4. check some exceptions
+            def comment_check = sql """ CREATE TABLE ex_tbl_${file_format}(
+                                          `col1` INT COMMENT 'col1',
+                                          `col2` STRING COMMENT 'col2',
+                                          `pt1` VARCHAR COMMENT 'pt1'
+                                        )  ENGINE=hive 
+                                        COMMENT 'test'
+                                        PARTITION BY LIST (pt1) ()
+                                        PROPERTIES (
+                                          'file_format'='${file_format}'
+                                        )
+                                    """
+            def comment_check_res = sql """ show create table 
ex_tbl_${file_format} """
+            logger.info("${comment_check_res}")
+            
assertTrue(comment_check_res.toString().containsIgnoreCase("COMMENT 'col1'"))
+            
assertTrue(comment_check_res.toString().containsIgnoreCase("COMMENT 'col2'"))
+            sql """DROP TABLE `ex_tbl_${file_format}`"""
+
+            test {
+                sql """
+                        CREATE TABLE nullable_check (
+                            `col` STRING NOT NULL COMMENT 'col'
+                        )  ENGINE=hive 
+                        PROPERTIES (
+                            'file_format'='${file_format}'
+                        )
+                    """
+                exception "errCode = 2, detailMessage = hive catalog doesn't 
support column with 'NOT NULL'."
+            }
+
+            test {
+                sql """
+                        CREATE TABLE schema_check ENGINE=hive ;
+                    """
+                exception "AnalysisException, msg: Should contain at least one 
column in a table"
+            }
+            sql """ DROP DATABASE IF EXISTS `test_hive_loc` """
+        }
+
+        def test_tbl_compress = { String compression, String file_format, 
String catalog_name ->
+            logger.info("Test create table with compression...")
+            sql """ switch ${catalog_name} """
+            sql  """ create database if not exists `test_hive_compress`
+                 """
+            sql """use `test_hive_compress`"""
+
+            // check table compression here, 
write/test_hive_write_insert.groovy contains the insert into compression
+            sql """ DROP TABLE IF EXISTS tbl_${file_format}_${compression} """
+            sql """
+                    CREATE TABLE tbl_${file_format}_${compression} (
+                      `col` STRING COMMENT 'col'
+                    )  ENGINE=hive 
+                    PROPERTIES (
+                      'file_format'='${file_format}',
+                      'compression'='${compression}'
+                    )
+                 """
+            def create_tbl_res = sql """ show create table 
tbl_${file_format}_${compression} """
+            logger.info("${create_tbl_res}")
+            if (file_format.equals("parquet")) {
+                
assertTrue(create_tbl_res.toString().containsIgnoreCase("'parquet.compression'='${compression}'"))
+            } else if (file_format.equals("orc")) {
+                
assertTrue(create_tbl_res.toString().containsIgnoreCase("'orc.compress'='${compression}'"))
+            } else {
+                throw new Exception("Invalid compression type: ${compression} 
for tbl_${file_format}_${compression}")
+            }
+
+            sql """DROP TABLE `tbl_${file_format}_${compression}`"""
+            sql """ drop database if exists `test_hive_compress` """;
+        }
+
+        def test_create_tbl_cross_catalog = { String file_format, String 
catalog_name ->
+            sql """switch ${catalog_name}"""
+            sql """ CREATE DATABASE IF NOT EXISTS `test_olap_cross_catalog` 
""";
+            sql """ USE test_olap_cross_catalog """;
+            test {
+                sql """
+                        CREATE TABLE `test_olap_cross_catalog_tbl` (
+                            `col1` INT COMMENT 'col1',
+                            `col2` STRING COMMENT 'col2'
+                        )
+                        ENGINE=olap
+                        DISTRIBUTED BY HASH(col1) BUCKETS 16
+                        PROPERTIES (
+                            'replication_num' = '1'
+                        );
+                    """
+                exception "Cannot create olap table out of internal catalog. 
Make sure 'engine' type is specified when use the catalog: test_hive_ddl"
+            }
+
+            // test default engine is hive in hive catalog
+            sql """
+                    CREATE TABLE `test_olap_cross_catalog_tbl` (
+                        `col1` INT COMMENT 'col1',
+                        `col2` STRING COMMENT 'col2'
+                    )
+                """
+            sql """ DROP TABLE `test_olap_cross_catalog_tbl`
+                """
+
+            test {
+                sql """
+                        CREATE TABLE `test_olap_cross_catalog_tbl` (
+                            `col1` INT COMMENT 'col1',
+                            `col2` STRING COMMENT 'col2'
+                        ) DISTRIBUTED BY HASH(col1) BUCKETS 16
+                        PROPERTIES (
+                            'replication_num' = '1'
+                        );
+                    """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = errCode = 2, detailMessage = Create hive bucket table need set 
enable_create_hive_bucket_table to true"
+            }
+
+            sql """ SWITCH internal """
+            sql """ CREATE DATABASE IF NOT EXISTS test_hive_cross_catalog """;
+            sql """ USE internal.test_hive_cross_catalog """
+
+            test {
+                sql """
+                        CREATE TABLE test_hive_cross_catalog_tbl (
+                          `col` STRING COMMENT 'col'
+                        )  ENGINE=hive 
+                    """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = Cannot create hive table in internal catalog, should switch to 
hive catalog."
+            }
+
+            sql """ DROP DATABASE IF EXISTS test_olap_cross_catalog """
+            sql """ DROP DATABASE IF EXISTS test_hive_cross_catalog """
+        }
+
+        def test_db_tbl = { String file_format,  String externalEnvIp, String 
hdfs_port, String catalog_name ->
+            logger.info("Test create/drop table...")
+            sql """switch ${catalog_name}"""
+            sql """ create database if not exists `test_hive_db_tbl` """;
+            sql """use `${catalog_name}`.`test_hive_db_tbl`"""
+
+            sql """
+                CREATE TABLE unpart_tbl_${file_format}(
+                  `col1` BOOLEAN COMMENT 'col1',
+                  `col2` INT COMMENT 'col2',
+                  `col3` BIGINT COMMENT 'col3',
+                  `col4` CHAR(10) COMMENT 'col4',
+                  `col5` FLOAT COMMENT 'col5',
+                  `col6` DOUBLE COMMENT 'col6',
+                  `col7` DECIMAL(9,4) COMMENT 'col7',
+                  `col8` VARCHAR(11) COMMENT 'col8',
+                  `col9` STRING COMMENT 'col9',
+                  `col10` DATE COMMENT 'col10',
+                  `col11` DATETIME COMMENT 'col11'
+                )  ENGINE=hive 
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+            """;
+
+            // test all columns
+            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
+                    VALUES 
+                    (true, 123, 9876543210, 'abcdefghij', 3.14, 6.28, 
123.4567, 'varcharval', 'stringval');
+                """
+            order_qt_insert01 """ SELECT `col1`, `col2`, `col3`, `col4`, 
`col5`, `col6`, `col7`, `col8`, `col9` FROM unpart_tbl_${file_format};  """
+
+            // test part of columns
+            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col8`, `col9`) 
+                    VALUES 
+                    (true, 123, 9876543210, 'varcharval', 'stringval');
+                """
+            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col8`, `col9`) 
+                    VALUES 
+                    (null, 123, 'varcharval', 8.98);
+                """
+            order_qt_insert02 """ SELECT `col1`, `col2`, `col3`, `col7`, 
`col9` FROM unpart_tbl_${file_format};  """
+
+            // test data diff
+            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
+                    VALUES 
+                    (true, null, 9876543210, 'abcdefghij', '2.3', 6.28, null, 
'varcharval', 'stringval');
+                """
+            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
+                    VALUES 
+                    (false, '1', 9876543210, 'abcdefghij', '2.3', 6.28, 0, 
2223, 'stringval');
+                """
+            order_qt_insert03 """ SELECT `col1`, `col2`, `col3`, `col4`, 
`col5`, `col6`, `col7`, `col8`, `col9` FROM unpart_tbl_${file_format} """
+
+            sql """ drop table if exists unpart_tbl_${file_format}"""
+
+            // partitioned table test
+            sql """
+                CREATE TABLE part_tbl_${file_format}(
+                  `col1` BOOLEAN COMMENT 'col1',
+                  `col2` INT COMMENT 'col2',
+                  `col3` BIGINT COMMENT 'col3',
+                  `col4` DECIMAL(2,1) COMMENT 'col4',
+                  `pt1` VARCHAR COMMENT 'pt1',
+                  `pt2` VARCHAR COMMENT 'pt2'
+                )  ENGINE=hive 
+                PARTITION BY LIST (pt1, pt2) ()
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+
+            // test all columns
+            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
+                VALUES (true, 1, 1000, 2.3, 'value_for_pt1', 'value_for_pt2')
+                """
+            order_qt_insert04 """ SELECT col1, col2, col3, col4, pt1, pt2 FROM 
part_tbl_${file_format};  """
+
+            // test part of columns
+            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
+                VALUES (true, 1, 1000, 2.3, 'value_for_pt1', 'value_for_pt2')
+                """
+            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
+                VALUES (true, 1, 1000, 2.3, 'value_for_pt1', 'value_for_pt2')
+                """
+            order_qt_insert05 """ SELECT col1, col2, col3, col4 FROM 
part_tbl_${file_format} """
+
+            // test data diff
+            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
+                VALUES (0, '1', 1000, null, 2.56, 'value_for_pt2')
+                """
+            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
+                VALUES (null, 1, '1000', '1.3', 'value_for_pt1', 2345)
+                """
+            order_qt_insert06 """ SELECT col1, col2, col3, col4 FROM 
part_tbl_${file_format}  """
+
+            sql """ drop table if exists part_tbl_${file_format} """
+
+            // test partitions
+            sql """
+                CREATE TABLE all_part_types_tbl_${file_format}(
+                  `col` INT COMMENT 'col',
+                  `pt1` BOOLEAN COMMENT 'pt1',
+                  `pt2` TINYINT COMMENT 'pt2',
+                  `pt3` SMALLINT COMMENT 'pt3',
+                  `pt4` INT COMMENT 'pt4',
+                  `pt5` BIGINT COMMENT 'pt5',
+                  `pt6` DATE COMMENT 'pt6',
+                  `pt7` DATETIME COMMENT 'pt7',
+                  `pt8` CHAR COMMENT 'pt8',
+                  `pt9` VARCHAR COMMENT 'pt9',
+                  `pt10` STRING COMMENT 'pt10'
+                )  ENGINE=hive 
+                PARTITION BY LIST (pt1, pt2, pt3, pt4, pt5, pt6, pt7, pt8, 
pt9, pt10) ()
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+                """
+            sql """ 
+                    INSERT INTO all_part_types_tbl_${file_format} (`col`, 
`pt1`, `pt2`, `pt3`, `pt4`, `pt5`, `pt6`, `pt7`, `pt8`, `pt9`, `pt10`)
+                    VALUES
+                    (1, true, 1, 123, 456789, 922232355, '2024-04-09', 
'2024-04-09 12:34:56', 'A', 'example', 'string_value');
+                """
+            def loc = 
"hdfs://${externalEnvIp}:${hdfs_port}/user/hive/warehouse/test_hive_db_tbl.db/all_part_types_tbl_${file_format}/pt1=1/pt2=1/pt3=123/pt4=456789/pt5=922232355/pt6=2024-04-09/*/pt8=A/pt9=example/pt10=string_value"
+            def pt_check = sql """ SELECT * FROM hdfs(
+                                    'uri'='${loc}/*',
+                                    'format' = '${file_format}',
+                                    'fs.defaultFS' = 
'hdfs://${externalEnvIp}:${hdfs_port}'
+                                  )
+                               """
+            logger.info("${pt_check}")
+            assertEquals(1, pt_check[0].size())
+            sql """ drop table if exists all_part_types_tbl_${file_format} """
+
+            test {
+                sql """
+                    CREATE TABLE all_part_types_tbl_${file_format}_err3(
+                      `col` INT COMMENT 'col',
+                      `pt1` STRING COMMENT 'pt1'
+                    )  ENGINE=hive 
+                    PARTITION BY LIST (pt000) ()
+                    PROPERTIES (
+                      'file_format'='${file_format}'
+                    )
+                    """
+                exception "errCode = 2, detailMessage = partition key pt000 is 
not exists"
+            }
+
+            test {
+                sql """ 
+                    CREATE TABLE all_part_types_tbl_${file_format}_err3(
+                      `col` INT COMMENT 'col',
+                      `pt1` STRING COMMENT 'pt1'
+                    )  ENGINE=hive 
+                    PARTITION BY LIST (pt1)
+                    (PARTITION pp VALUES IN ('2014-01-01'))
+                    PROPERTIES (
+                      'file_format'='${file_format}'
+                    ) 
+                    """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = errCode = 2, detailMessage = Partition values expressions is 
not supported in hive catalog."
+            }
+
+            test {
+                sql """ 
+                    CREATE TABLE all_part_types_tbl_${file_format}_err3(
+                      `col` INT COMMENT 'col',
+                      `pt1` STRING COMMENT 'pt1'
+                    )  ENGINE=hive 
+                    PARTITION BY LIST (pt000)
+                    (PARTITION pp VALUES IN ('2014-01-01'))
+                    PROPERTIES (
+                      'file_format'='${file_format}'
+                    ) 
+                    """
+                exception "errCode = 2, detailMessage = partition key pt000 is 
not exists"
+            }
+
+            test {
+                sql """
+                    CREATE TABLE all_part_types_tbl_${file_format}_err1(
+                      `col` INT COMMENT 'col',
+                      `pt1` LARGEINT COMMENT 'pt1'
+                    )  ENGINE=hive 
+                    PARTITION BY LIST (pt1) ()
+                    PROPERTIES (
+                      'file_format'='${file_format}'
+                    )
+                    """
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = failed to create database from hms client. reason: 
org.apache.doris.datasource.hive.HMSClientException: Unsupported primitive type 
conversion of LARGEINT"
+            }
+
+            test {
+                sql """
+                    CREATE TABLE all_part_types_tbl_${file_format}_err2(
+                      `col` INT COMMENT 'col',
+                      `pt1` FLOAT COMMENT 'pt1'
+                    )  ENGINE=hive 
+                    PARTITION BY LIST (pt1) ()
+                    PROPERTIES (
+                      'file_format'='${file_format}'
+                    )
+                    """
+                exception "errCode = 2, detailMessage = Floating point type 
column can not be partition column"
+            }
+
+            test {
+                sql """
+                    CREATE TABLE all_part_types_tbl_${file_format}_err3(
+                      `col` INT COMMENT 'col',
+                      `pt1` DOUBLE COMMENT 'pt1'
+                    )  ENGINE=hive 
+                    PARTITION BY LIST (pt1) ()
+                    PROPERTIES (
+                      'file_format'='${file_format}'
+                    )
+                    """
+                exception "errCode = 2, detailMessage = Floating point type 
column can not be partition column"
+            }
+
+            sql """ drop database if exists `test_hive_db_tbl` """;
+        }
+
+
+        try {
+            String hms_port = context.config.otherConfigs.get("hms_port")
+            String hdfs_port = context.config.otherConfigs.get("hdfs_port")
+            String catalog_name = "test_hive_ddl"
+            String externalEnvIp = 
context.config.otherConfigs.get("externalEnvIp")
+
+            sql """drop catalog if exists ${catalog_name}"""
+            sql """create catalog if not exists ${catalog_name} properties (
+                'type'='hms',
+                'hive.metastore.uris' = 
'thrift://${externalEnvIp}:${hms_port}',
+                'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
+            );"""
+            sql """switch ${catalog_name}"""
+
+            sql """set enable_fallback_to_original_planner=false;"""
+            test_db(catalog_name)
+            test_loc_db(externalEnvIp, hdfs_port, catalog_name)
+            def compressions = ["snappy", "zlib", "zstd"]
+            for (String file_format in file_formats) {
+                logger.info("Process file format " + file_format)
+                test_loc_tbl(file_format, externalEnvIp, hdfs_port, 
catalog_name)
+                test_db_tbl(file_format, externalEnvIp, hdfs_port, 
catalog_name)
+
+                for (String compression in compressions) {
+                    if (file_format.equals("parquet") && 
compression.equals("zlib")) {
+                        continue
+                    }
+                    logger.info("Process file compression " + compression)
+                    test_tbl_compress(compression, file_format, catalog_name)
+                }
+                test_create_tbl_cross_catalog(file_format, catalog_name)
+            }
+            sql """drop catalog if exists ${catalog_name}"""
+        } finally {
+        }
+    }
+}
diff --git 
a/regression-test/suites/external_table_p0/hive/ddl/test_hive_ddl_and_ctas.groovy
 
b/regression-test/suites/external_table_p0/hive/ddl/test_hive_ddl_and_ctas.groovy
deleted file mode 100644
index 093ba674fc6..00000000000
--- 
a/regression-test/suites/external_table_p0/hive/ddl/test_hive_ddl_and_ctas.groovy
+++ /dev/null
@@ -1,423 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-suite("test_hive_ddl_and_ctas", 
"p0,external,hive,external_docker,external_docker_hive") {
-    String enabled = context.config.otherConfigs.get("enableHiveTest")
-    if (enabled != null && enabled.equalsIgnoreCase("true")) {
-        def file_formats = ["parquet", "orc"]
-
-        def test_db = { String catalog_name ->
-            sql """switch ${catalog_name}"""
-            sql """ create database if not exists `test_hive_db` """;
-            sql """use `test_hive_db`"""
-            sql """ drop database if exists `test_hive_db` """;
-        }
-
-        def test_loc_db = { String externalEnvIp, String hdfs_port, String 
catalog_name ->
-            sql """switch ${catalog_name}"""
-            sql """ create database if not exists `test_hive_loc_db`
-                    
properties('location_uri'='hdfs://${externalEnvIp}:${hdfs_port}/tmp/hive/test_hive_loc_db')
-                """;
-            sql """use `test_hive_loc_db`"""
-            sql """ drop database if exists `test_hive_loc_db` """;
-        }
-
-        def test_db_tbl = { String file_format, String catalog_name ->
-            sql """switch ${catalog_name}"""
-            sql """ create database if not exists `test_hive_db` """;
-            sql """use `${catalog_name}`.`test_hive_db`"""
-
-            sql """
-                CREATE TABLE unpart_tbl_${file_format}(
-                  `col1` BOOLEAN COMMENT 'col1',
-                  `col2` INT COMMENT 'col2',
-                  `col3` BIGINT COMMENT 'col3',
-                  `col4` CHAR(10) COMMENT 'col4',
-                  `col5` FLOAT COMMENT 'col5',
-                  `col6` DOUBLE COMMENT 'col6',
-                  `col7` DECIMAL(9,4) COMMENT 'col7',
-                  `col8` VARCHAR(11) COMMENT 'col8',
-                  `col9` STRING COMMENT 'col9'
-                )  ENGINE=hive 
-                PROPERTIES (
-                  'file_format'='${file_format}'
-                )
-            """;
-
-            // test all columns
-            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
-                    VALUES 
-                    (true, 123, 9876543210, 'abcdefghij', 3.14, 6.28, 
123.4567, 'varcharval', 'stringval');
-                """
-            order_qt_insert01 """ SELECT `col1`, `col2`, `col3`, `col4`, 
`col5`, `col6`, `col7`, `col8`, `col9` FROM unpart_tbl_${file_format};  """
-
-            // test part of columns
-            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col8`, `col9`) 
-                    VALUES 
-                    (true, 123, 9876543210, 'varcharval', 'stringval');
-                """
-            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col8`, `col9`) 
-                    VALUES 
-                    (null, 123, 'varcharval', 8.98);
-                """
-            order_qt_insert02 """ SELECT `col1`, `col2`, `col3`, `col7`, 
`col9` FROM unpart_tbl_${file_format};  """
-
-            // test data diff
-            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
-                    VALUES 
-                    (true, null, 9876543210, 'abcdefghij', '2.3', 6.28, null, 
'varcharval', 'stringval');
-                """
-            sql """ INSERT INTO unpart_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
-                    VALUES 
-                    (false, '1', 9876543210, 'abcdefghij', '2.3', 6.28, 0, 
2223, 'stringval');
-                """
-            order_qt_insert03 """ SELECT `col1`, `col2`, `col3`, `col4`, 
`col5`, `col6`, `col7`, `col8`, `col9` FROM unpart_tbl_${file_format} """
-
-            sql """ drop table if exists unpart_tbl_${file_format}"""
-
-            // partitioned table test
-            sql """
-                CREATE TABLE part_tbl_${file_format}(
-                  `col1` BOOLEAN COMMENT 'col1',
-                  `col2` INT COMMENT 'col2',
-                  `col3` BIGINT COMMENT 'col3',
-                  `col4` DECIMAL(2,1) COMMENT 'col4',
-                  `pt1` VARCHAR COMMENT 'pt1',
-                  `pt2` VARCHAR COMMENT 'pt2'
-                )  ENGINE=hive 
-                PARTITION BY LIST (pt1, pt2) ()
-                PROPERTIES (
-                  'file_format'='${file_format}'
-                )
-            """;
-
-            // test all columns
-            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
-                VALUES (true, 1, 1000, 2.3, 'value_for_pt1', 'value_for_pt2')
-                """
-            order_qt_insert04 """ SELECT col1, col2, col3, col4, pt1, pt2 FROM 
part_tbl_${file_format};  """
-
-            // test part of columns
-            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
-                VALUES (true, 1, 1000, 2.3, 'value_for_pt1', 'value_for_pt2')
-                """
-            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
-                VALUES (true, 1, 1000, 2.3, 'value_for_pt1', 'value_for_pt2')
-                """
-            order_qt_insert05 """ SELECT col1, col2, col3, col4 FROM 
part_tbl_${file_format} """
-
-            // test data diff
-            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
-                VALUES (0, '1', 1000, null, 2.56, 'value_for_pt2')
-                """
-            sql """ INSERT INTO part_tbl_${file_format} (col1, col2, col3, 
col4, pt1, pt2)
-                VALUES (null, 1, '1000', '1.3', 'value_for_pt1', 2345)
-                """
-            order_qt_insert06 """ SELECT col1, col2, col3, col4 FROM 
part_tbl_${file_format}  """
-
-            sql """ drop table if exists part_tbl_${file_format}"""
-            sql """ drop database if exists `test_hive_db` """;
-        }
-
-        def test_ctas_tbl = { String file_format, String catalog_name ->
-            sql """ switch `${catalog_name}` """
-            sql """ create database if not exists `test_ctas` """;
-            sql """ switch internal """
-            sql """ create database if not exists test_ctas_olap """;
-            sql """ use internal.test_ctas_olap """
-
-            sql """
-                CREATE TABLE `unpart_ctas_olap_src` (
-                    `col1` INT COMMENT 'col1',
-                    `col2` STRING COMMENT 'col2'
-                )
-                ENGINE=olap
-                DISTRIBUTED BY HASH(col1) BUCKETS 16
-                PROPERTIES (
-                    'replication_num' = '1'
-                );
-            """
-
-            sql """ INSERT INTO `unpart_ctas_olap_src` (col1, col2) VALUES
-                (1, 'string value for col2'),
-                (2, 'another string value for col2'),
-                (3, 'yet another string value for col2'); 
-            """
-
-            sql """
-                CREATE TABLE `part_ctas_olap_src`(
-                    `col1` INT COMMENT 'col1',
-                    `pt1` VARCHAR(16) COMMENT 'pt1',
-                    `pt2` VARCHAR(16) COMMENT 'pt2'
-                )
-                ENGINE=olap
-                PARTITION BY LIST (pt1, pt2) (
-                    PARTITION pp1 VALUES IN(
-                        ('value_for_pt1', 'value_for_pt2'),
-                        ('value_for_pt11', 'value_for_pt22')
-                    )
-                )
-                DISTRIBUTED BY HASH(col1) BUCKETS 16
-                PROPERTIES (
-                    'replication_num' = '1'
-                );
-            """
-
-            sql """
-            INSERT INTO `part_ctas_olap_src` (col1, pt1, pt2) VALUES
-             (11, 'value_for_pt1', 'value_for_pt2'),
-             (22, 'value_for_pt11', 'value_for_pt22');
-            """
-
-            sql """ use `${catalog_name}`.`test_ctas` """
-            sql """
-                CREATE TABLE `unpart_ctas_src`(
-                  `col1` INT COMMENT 'col1',
-                  `col2` STRING COMMENT 'col2'
-                ) ENGINE=hive
-                PROPERTIES (
-                  'file_format'='parquet'
-                );
-            """
-
-            sql """ INSERT INTO `unpart_ctas_src` (col1, col2) VALUES
-                (1, 'string value for col2'),
-                (2, 'another string value for col2'),
-                (3, 'yet another string value for col2'); 
-            """
-
-            sql """
-                CREATE TABLE `part_ctas_src`(
-                  `col1` INT COMMENT 'col1',
-                  `pt1` VARCHAR COMMENT 'pt1',
-                  `pt2` VARCHAR COMMENT 'pt2'
-                ) ENGINE=hive
-                PARTITION BY LIST (pt1, pt2) ()
-                PROPERTIES (
-                  'file_format'='orc'
-                );
-            """
-
-            sql """
-            INSERT INTO `part_ctas_src` (col1, pt1, pt2) VALUES
-             (11, 'value_for_pt1', 'value_for_pt2'),
-             (22, 'value_for_pt11', 'value_for_pt22');
-            """
-
-            sql """ switch `${catalog_name}` """
-            // 1. external to external un-partitioned table
-            sql """ CREATE TABLE hive_ctas1 ENGINE=hive AS SELECT col1 FROM 
unpart_ctas_src; 
-            """
-
-            sql """ INSERT INTO hive_ctas1 SELECT col1 FROM unpart_ctas_src 
WHERE col1 > 1;
-            """
-
-            order_qt_ctas_01 """ SELECT * FROM hive_ctas1 """
-            sql """ DROP TABLE hive_ctas1 """
-
-            // 2. external to external partitioned table
-            sql """ CREATE TABLE hive_ctas2 ENGINE=hive AS SELECT col1,pt1,pt2 
FROM part_ctas_src WHERE col1>0;
-                """
-
-            sql """ INSERT INTO hive_ctas2 SELECT col1,pt1,pt2 FROM 
part_ctas_src WHERE col1>=22;
-                """
-
-            order_qt_ctas_02 """ SELECT * FROM hive_ctas2  """
-            sql """ DROP TABLE hive_ctas2 """
-
-            // 3. internal to external un-partitioned table
-            sql """ CREATE TABLE ctas_o1 ENGINE=hive AS SELECT col1,col2 FROM 
internal.test_ctas_olap.unpart_ctas_olap_src;
-            """
-
-            sql """ INSERT INTO ctas_o1 SELECT col1,col2 FROM 
internal.test_ctas_olap.unpart_ctas_olap_src;
-            """
-
-            order_qt_ctas_03 """ SELECT * FROM ctas_o1  """
-            sql """ DROP TABLE ctas_o1 """
-
-            // 4. internal to external partitioned table
-            sql """ CREATE TABLE ctas_o2 ENGINE=hive AS SELECT col1,pt1,pt2 
FROM internal.test_ctas_olap.part_ctas_olap_src WHERE col1>0;
-            """
-            sql """ INSERT INTO ctas_o2 SELECT col1,pt1,pt2 FROM 
internal.test_ctas_olap.part_ctas_olap_src WHERE col1>2;
-            """
-            order_qt_ctas_04 """ SELECT * FROM ctas_o2  """
-            sql """ DROP TABLE ctas_o2 """
-
-            // 5. check external to internal un-partitioned table
-            sql """ use internal.test_ctas_olap """
-            sql """  CREATE TABLE olap_ctas1
-                 PROPERTIES (
-                     "replication_allocation" = "tag.location.default: 1"
-                 ) AS SELECT col1,col2 
-                 FROM `${catalog_name}`.`test_ctas`.unpart_ctas_src;
-                """
-            order_qt_ctas_05 """ SELECT * FROM olap_ctas1  """
-            sql """ DROP TABLE olap_ctas1 """
-
-            // 6. check external to internal partitioned table
-            sql """ CREATE TABLE olap_ctas2 
-                PROPERTIES (
-                    "replication_allocation" = "tag.location.default: 1"
-                ) AS SELECT col1,pt1,pt2 
-                FROM `${catalog_name}`.`test_ctas`.part_ctas_src WHERE col1>0;
-                """
-            order_qt_ctas_06 """ SELECT * FROM olap_ctas2  """
-            sql """ DROP TABLE olap_ctas2 """
-
-            sql """ switch `${catalog_name}` """
-            sql """ DROP TABLE `test_ctas`.part_ctas_src """
-            sql """ DROP TABLE `test_ctas`.unpart_ctas_src """
-            sql """ drop database if exists `test_ctas` """;
-            sql """ DROP TABLE internal.test_ctas_olap.part_ctas_olap_src """
-            sql """ DROP TABLE internal.test_ctas_olap.unpart_ctas_olap_src """
-            sql """ switch internal """;
-            sql """ drop database if exists test_ctas_olap """;
-        }
-
-        def test_complex_type_tbl = { String file_format, String catalog_name 
->
-            sql """ switch ${catalog_name} """
-            sql """ create database if not exists `test_complex_type` """;
-            sql """ use `${catalog_name}`.`test_complex_type` """
-
-            sql """
-                CREATE TABLE unpart_tbl_${file_format} (
-                  `col1` CHAR,
-                  `col2` CHAR(1),
-                  `col3` CHAR(16),
-                  `col4` VARCHAR,
-                  `col5` VARCHAR(255),
-                  `col6` DECIMAL(2,1),
-                  `col7` DECIMAL(5,0),
-                  `col8` DECIMAL(8,8),
-                  `col9` STRING,
-                  `col10` ARRAY<DECIMAL(4,3)>,
-                  `col11` ARRAY<CHAR(16)>,
-                  `col12` ARRAY<CHAR>,
-                  `col13` ARRAY<STRING>,
-                  `col14` ARRAY<MAP<INT, CHAR>>,
-                  `col15` MAP<BIGINT, CHAR>,
-                  `col16` MAP<BIGINT, DECIMAL(8,8)>,
-                  `col17` MAP<STRING, ARRAY<CHAR(16)>>,
-                  `col18` STRUCT<id:INT,gender:BOOLEAN,name:CHAR(16)>,
-                  `col19` STRUCT<scale:DECIMAL(7,4),metric:ARRAY<STRING>>,
-                  `col20` STRUCT<codes:ARRAY<INT>,props:MAP<STRING, 
ARRAY<CHAR(16)>>>
-                )  ENGINE=hive
-                PROPERTIES (
-                  'file_format'='${file_format}'
-                )
-            """;
-
-            sql """
-            INSERT INTO unpart_tbl_${file_format} (
-              col1, col2, col3, col4, col5, col6, col7, col8, col9,
-              col10, col11, col12, col13, col14, col15, col16, col17,
-              col18, col19, col20
-            ) VALUES (
-              'a', -- CHAR
-              'b', -- CHAR(1)
-              'c', -- CHAR(16)
-              'd', -- VARCHAR
-              'e', -- VARCHAR(255)
-              1.1, -- DECIMAL(2,1)
-              12345, -- DECIMAL(5,0)
-              0.12345678, -- DECIMAL(8,8)
-              'string', -- STRING
-              ARRAY(0.001, 0.002), -- ARRAY<DECIMAL(4,3)>
-              ARRAY('char1', 'char2'), -- ARRAY<CHAR(16)>
-              ARRAY('c', 'd'), -- ARRAY<CHAR>
-              ARRAY('string1', 'string2'), -- ARRAY<STRING>
-              ARRAY(MAP(1, 'a'), MAP(2, 'b')), -- ARRAY<MAP<INT, CHAR>>
-              MAP(1234567890123456789, 'a'), -- MAP<BIGINT, CHAR>
-              MAP(1234567890123456789, 0.12345678), -- MAP<BIGINT, 
DECIMAL(8,8)>
-              MAP('key', ARRAY('char1', 'char2')), -- MAP<STRING, 
ARRAY<CHAR(16)>>
-              STRUCT(1, TRUE, 'John Doe'), -- 
STRUCT<id:INT,gender:BOOLEAN,name:CHAR(16)>
-              STRUCT(123.4567, ARRAY('metric1', 'metric2')), -- 
STRUCT<scale:DECIMAL(7,4),metric:ARRAY<STRING>>
-              STRUCT(ARRAY(123, 456), MAP('key1', ARRAY('char1', 'char2'))) -- 
STRUCT<codes:ARRAY<INT>,props:MAP<STRING, ARRAY<CHAR(16)>>
-            );
-        """
-
-            sql """
-            INSERT INTO unpart_tbl_${file_format} (
-              col1, col11, col12, col13, col14, col15, col16, col17,
-              col18, col19
-            ) VALUES (
-              'a', -- CHAR
-              ARRAY('char1', 'char2'), -- ARRAY<CHAR(16)>
-              ARRAY('c', 'd'), -- ARRAY<CHAR>
-              ARRAY('string1', 'string2'), -- ARRAY<STRING>
-              ARRAY(MAP(1, 'a'), MAP(2, 'b')), -- ARRAY<MAP<INT, CHAR>>
-              MAP(1234567890123456789, 'a'), -- MAP<BIGINT, CHAR>
-              MAP(1234567890123456789, 0.12345678), -- MAP<BIGINT, 
DECIMAL(8,8)>
-              MAP('key', ARRAY('char1', 'char2')), -- MAP<STRING, 
ARRAY<CHAR(16)>>
-              STRUCT(1, TRUE, 'John Doe'), -- 
STRUCT<id:INT,gender:BOOLEAN,name:CHAR(16)>
-              STRUCT(123.4567, ARRAY('metric1', 'metric2')) -- 
STRUCT<scale:DECIMAL(7,4),metric:ARRAY<STRING>>
-            );
-        """
-
-            sql """
-            INSERT INTO unpart_tbl_${file_format} (
-              col1, col2, col3, col4, col5, col6, col7, col8, col9
-            ) VALUES (
-              'a', -- CHAR
-              'b', -- CHAR(1)
-              'c', -- CHAR(16)
-              'd', -- VARCHAR
-              'e', -- VARCHAR(255)
-              1.1, -- DECIMAL(2,1)
-              12345, -- DECIMAL(5,0)
-              0.12345678, -- DECIMAL(8,8)
-              'string' -- STRING
-            );
-        """
-
-            order_qt_complex_type01 """ SELECT * FROM 
unpart_tbl_${file_format} """
-            order_qt_complex_type02 """ SELECT * FROM 
unpart_tbl_${file_format} WHERE col2='b' """
-
-            sql """ DROP TABLE unpart_tbl_${file_format} """
-            sql """ drop database if exists `test_complex_type` """;
-        }
-
-        try {
-            String hms_port = context.config.otherConfigs.get("hms_port")
-            String hdfs_port = context.config.otherConfigs.get("hdfs_port")
-            String catalog_name = "test_hive_ddl_and_ctas"
-            String externalEnvIp = 
context.config.otherConfigs.get("externalEnvIp")
-
-            sql """drop catalog if exists ${catalog_name}"""
-            sql """create catalog if not exists ${catalog_name} properties (
-                'type'='hms',
-                'hive.metastore.uris' = 
'thrift://${externalEnvIp}:${hms_port}',
-                'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
-            );"""
-            sql """switch ${catalog_name}"""
-
-            sql """set enable_fallback_to_original_planner=false;"""
-
-            test_db(catalog_name)
-            test_loc_db(externalEnvIp, hdfs_port, catalog_name)
-            for (String file_format in file_formats) {
-                logger.info("Process file format" + file_format)
-                test_db_tbl(file_format, catalog_name)
-                test_ctas_tbl(file_format, catalog_name)
-                test_complex_type_tbl(file_format, catalog_name)
-                // todo: test bucket table: test_db_buck_tbl()
-            }
-            sql """drop catalog if exists ${catalog_name}"""
-        } finally {
-        }
-    }
-}
diff --git 
a/regression-test/suites/external_table_p0/hive/ddl/test_hive_write_type.groovy 
b/regression-test/suites/external_table_p0/hive/ddl/test_hive_write_type.groovy
new file mode 100644
index 00000000000..07953ed1575
--- /dev/null
+++ 
b/regression-test/suites/external_table_p0/hive/ddl/test_hive_write_type.groovy
@@ -0,0 +1,285 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+suite("test_hive_write_type", 
"p0,external,hive,external_docker,external_docker_hive") {
+    String enabled = context.config.otherConfigs.get("enableHiveTest")
+    if (enabled != null && enabled.equalsIgnoreCase("true")) {
+        def file_formats = ["parquet", "orc"]
+        def test_complex_type_tbl = { String file_format, String catalog_name 
->
+            sql """ switch ${catalog_name} """
+            sql """ create database if not exists `test_complex_type` """;
+            sql """ use `${catalog_name}`.`test_complex_type` """
+
+            sql """
+                CREATE TABLE unpart_tbl_${file_format} (
+                  `col1` CHAR,
+                  `col2` CHAR(1),
+                  `col3` CHAR(16),
+                  `col4` VARCHAR,
+                  `col5` VARCHAR(255),
+                  `col6` DECIMAL(2,1),
+                  `col7` DECIMAL(5,0),
+                  `col8` DECIMAL(8,8),
+                  `col9` STRING,
+                  `col10` ARRAY<DECIMAL(4,3)>,
+                  `col11` ARRAY<CHAR(16)>,
+                  `col12` ARRAY<CHAR>,
+                  `col13` ARRAY<STRING>,
+                  `col14` ARRAY<MAP<INT, CHAR>>,
+                  `col15` MAP<BIGINT, CHAR>,
+                  `col16` MAP<BIGINT, DECIMAL(8,8)>,
+                  `col17` MAP<STRING, ARRAY<CHAR(16)>>,
+                  `col18` STRUCT<id:INT,gender:BOOLEAN,name:CHAR(16)>,
+                  `col19` STRUCT<scale:DECIMAL(7,4),metric:ARRAY<STRING>>,
+                  `col20` STRUCT<codes:ARRAY<INT>,props:MAP<STRING, 
ARRAY<CHAR(16)>>>
+                )  ENGINE=hive
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+            """;
+
+            sql """
+            INSERT INTO unpart_tbl_${file_format} (
+              col1, col2, col3, col4, col5, col6, col7, col8, col9,
+              col10, col11, col12, col13, col14, col15, col16, col17,
+              col18, col19, col20
+            ) VALUES (
+              'a', -- CHAR
+              'b', -- CHAR(1)
+              'c', -- CHAR(16)
+              'd', -- VARCHAR
+              'e', -- VARCHAR(255)
+              1.1, -- DECIMAL(2,1)
+              12345, -- DECIMAL(5,0)
+              0.12345678, -- DECIMAL(8,8)
+              'string', -- STRING
+              ARRAY(0.001, 0.002), -- ARRAY<DECIMAL(4,3)>
+              ARRAY('char1', 'char2'), -- ARRAY<CHAR(16)>
+              ARRAY('c', 'd'), -- ARRAY<CHAR>
+              ARRAY('string1', 'string2'), -- ARRAY<STRING>
+              ARRAY(MAP(1, 'a'), MAP(2, 'b')), -- ARRAY<MAP<INT, CHAR>>
+              MAP(1234567890123456789, 'a'), -- MAP<BIGINT, CHAR>
+              MAP(1234567890123456789, 0.12345678), -- MAP<BIGINT, 
DECIMAL(8,8)>
+              MAP('key', ARRAY('char1', 'char2')), -- MAP<STRING, 
ARRAY<CHAR(16)>>
+              STRUCT(1, TRUE, 'John Doe'), -- 
STRUCT<id:INT,gender:BOOLEAN,name:CHAR(16)>
+              STRUCT(123.4567, ARRAY('metric1', 'metric2')), -- 
STRUCT<scale:DECIMAL(7,4),metric:ARRAY<STRING>>
+              STRUCT(ARRAY(123, 456), MAP('key1', ARRAY('char1', 'char2'))) -- 
STRUCT<codes:ARRAY<INT>,props:MAP<STRING, ARRAY<CHAR(16)>>
+            );
+            """
+
+            sql """
+            INSERT OVERWRITE TABLE unpart_tbl_${file_format} (
+              col1, col2, col3, col4, col5, col6, col7, col8, col9,
+              col10, col11, col12, col13, col14, col15, col16, col17,
+              col18, col19, col20
+            ) VALUES (
+              'a', -- CHAR
+              'b', -- CHAR(1)
+              'c', -- CHAR(16)
+              'd', -- VARCHAR
+              'e', -- VARCHAR(255)
+              1.1, -- DECIMAL(2,1)
+              12345, -- DECIMAL(5,0)
+              0.12345678, -- DECIMAL(8,8)
+              'string', -- STRING
+              ARRAY(0.001, 0.002), -- ARRAY<DECIMAL(4,3)>
+              ARRAY('char1', 'char2'), -- ARRAY<CHAR(16)>
+              ARRAY('c', 'd'), -- ARRAY<CHAR>
+              ARRAY('string1', 'string2'), -- ARRAY<STRING>
+              ARRAY(MAP(1, 'a'), MAP(2, 'b')), -- ARRAY<MAP<INT, CHAR>>
+              MAP(1234567890123456789, 'a'), -- MAP<BIGINT, CHAR>
+              MAP(1234567890123456789, 0.12345678), -- MAP<BIGINT, 
DECIMAL(8,8)>
+              MAP('key', ARRAY('char1', 'char2')), -- MAP<STRING, 
ARRAY<CHAR(16)>>
+              STRUCT(1, TRUE, 'John Doe'), -- 
STRUCT<id:INT,gender:BOOLEAN,name:CHAR(16)>
+              STRUCT(123.4567, ARRAY('metric1', 'metric2')), -- 
STRUCT<scale:DECIMAL(7,4),metric:ARRAY<STRING>>
+              STRUCT(ARRAY(123, 456), MAP('key1', ARRAY('char1', 'char2'))) -- 
STRUCT<codes:ARRAY<INT>,props:MAP<STRING, ARRAY<CHAR(16)>>
+            );
+            """
+
+            sql """
+            INSERT INTO unpart_tbl_${file_format} (
+              col1, col11, col12, col13, col14, col15, col16, col17,
+              col18, col19
+            ) VALUES (
+              'a', -- CHAR
+              ARRAY('char1', 'char2'), -- ARRAY<CHAR(16)>
+              ARRAY('c', 'd'), -- ARRAY<CHAR>
+              ARRAY('string1', 'string2'), -- ARRAY<STRING>
+              ARRAY(MAP(1, 'a'), MAP(2, 'b')), -- ARRAY<MAP<INT, CHAR>>
+              MAP(1234567890123456789, 'a'), -- MAP<BIGINT, CHAR>
+              MAP(1234567890123456789, 0.12345678), -- MAP<BIGINT, 
DECIMAL(8,8)>
+              MAP('key', ARRAY('char1', 'char2')), -- MAP<STRING, 
ARRAY<CHAR(16)>>
+              STRUCT(1, TRUE, 'John Doe'), -- 
STRUCT<id:INT,gender:BOOLEAN,name:CHAR(16)>
+              STRUCT(123.4567, ARRAY('metric1', 'metric2')) -- 
STRUCT<scale:DECIMAL(7,4),metric:ARRAY<STRING>>
+            );
+            """
+
+            sql """
+            INSERT INTO unpart_tbl_${file_format} (
+              col1, col2, col3, col4, col5, col6, col7, col8, col9
+            ) VALUES (
+              'a', -- CHAR
+              'b', -- CHAR(1)
+              'c', -- CHAR(16)
+              'd', -- VARCHAR
+              'e', -- VARCHAR(255)
+              1.1, -- DECIMAL(2,1)
+              12345, -- DECIMAL(5,0)
+              0.12345678, -- DECIMAL(8,8)
+              'string' -- STRING
+            );
+            """
+
+            order_qt_complex_type01 """ SELECT * FROM 
unpart_tbl_${file_format} """
+            order_qt_complex_type02 """ SELECT * FROM 
unpart_tbl_${file_format} WHERE col2='b' """
+
+            sql """ DROP TABLE unpart_tbl_${file_format} """
+            sql """ drop database if exists `test_complex_type` """;
+        }
+
+        def test_insert_exception = { String file_format, String catalog_name 
->
+            sql """ switch ${catalog_name} """
+
+            sql """ create database if not exists `test_hive_ex` """;
+            test {
+                sql """ create database `test_hive_ex` """
+                exception "errCode = 2, detailMessage = Can't create database 
'test_hive_ex'; database exists"
+            }
+            sql """use `${catalog_name}`.`test_hive_ex`"""
+
+            sql """
+                CREATE TABLE IF NOT EXISTS test_hive_ex.ex_tbl_${file_format}(
+                  `col1` BOOLEAN COMMENT 'col1',
+                  `col2` INT COMMENT 'col2',
+                  `col3` BIGINT COMMENT 'col3',
+                  `col4` CHAR(10) COMMENT 'col4',
+                  `col5` FLOAT COMMENT 'col5',
+                  `col6` DOUBLE COMMENT 'col6',
+                  `col7` DECIMAL(6,4) COMMENT 'col7',
+                  `col8` VARCHAR(11) COMMENT 'col8',
+                  `col9` STRING COMMENT 'col9',
+                  `pt1` VARCHAR COMMENT 'pt1',
+                  `pt2` STRING COMMENT 'pt2',
+                  `pt3` DATE COMMENT 'pt3'
+                )  ENGINE=hive 
+                PARTITION BY LIST (pt1, pt2) ()
+                PROPERTIES (
+                  'file_format'='${file_format}'
+                )
+            """;
+
+            try {
+                // test  columns
+                sql """ INSERT INTO ex_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
+                    VALUES 
+                    (true, 123, 987654321099, 'abcdefghij', 3.1214, 63.28, 
123.4567, 'varcharval', 'stringval');
+                """
+            } catch (Exception e) {
+                // BE err msg need use string contains to check
+                assertTrue(e.getMessage().contains("[E-124]Arithmetic 
overflow, convert failed from 1234567, expected data is [-999999, 999999]"))
+            }
+
+            try {
+                // test type diff columns
+                sql """ INSERT INTO ex_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `col6`, `col7`, `col8`, `col9`) 
+                    VALUES 
+                    ('1', 123, 987654319, 'abcdefghij', '3.15', '6.28', 
123.4567, 432, 'stringval');
+                """
+            } catch (Exception e) {
+                assertTrue(e.getMessage().contains("[E-124]Arithmetic 
overflow, convert failed from 1234567, expected data is [-999999, 999999]"))
+            }
+
+            test {
+                sql """
+                        CREATE TABLE test_hive_ex.ex_tbl_${file_format}(
+                          `col1` BOOLEAN COMMENT 'col1'
+                        )  ENGINE=hive 
+                        PROPERTIES (
+                          'file_format'='${file_format}'
+                        )
+                    """;
+                exception "errCode = 2, detailMessage = errCode = 2, 
detailMessage = Table 'ex_tbl_${file_format}' already exists"
+            }
+
+            test {
+                // test columns
+                sql """ INSERT INTO ex_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`) 
+                        VALUES 
+                        (true, 123, 9876543210, 'abcdefghij', 3.14, 6.28, 
123.4567, 'varcharval', 'stringval');
+                """
+                exception "errCode = 2, detailMessage = Column count doesn't 
match value count"
+            }
+
+            test {
+                // test columns
+                sql """ INSERT INTO ex_tbl_${file_format} (`col1`, `col2`, 
`col3`, `col4`, `col5`, `pt00`) 
+                    VALUES 
+                    (true, 123, 9876543210, 'abcdefghij', 3.14, 'error');
+                """
+                exception "errCode = 2, detailMessage = Unknown column 'pt00' 
in target table."
+            }
+
+            test {
+                sql """ INSERT INTO ex_tbl_${file_format} 
partition(`pt1`,`pt2`) (`col1`, `col9`) 
+                    VALUES 
+                    ('abcdefghij', 'error', true, 123);
+                """
+                exception "errCode = 2, detailMessage = Column count doesn't 
match value count"
+            }
+
+            sql """ INSERT INTO ex_tbl_${file_format} partition(`pt1`,`pt2`) 
(`col3`, `col6`, `col9`) 
+                VALUES 
+                (9876543210, 6.28, 'no_error');
+            """
+
+            test {
+                sql """ INSERT INTO ex_tbl_${file_format} partition(`pt0`, 
`pt1`,`pt3`) (`col3`, `col6`, `col9`) 
+                    VALUES 
+                    ('err', 'err', 'err', 9876543210, 6.28, 'error');
+                """
+                exception "errCode = 2, detailMessage = Column count doesn't 
match value count"
+            }
+
+            sql """ DROP TABLE 
${catalog_name}.test_hive_ex.ex_tbl_${file_format} """
+            sql """ DROP DATABASE ${catalog_name}.test_hive_ex """
+        }
+
+        try {
+            String hms_port = context.config.otherConfigs.get("hms_port")
+            String hdfs_port = context.config.otherConfigs.get("hdfs_port")
+            String catalog_name = "test_hive_write_type"
+            String externalEnvIp = 
context.config.otherConfigs.get("externalEnvIp")
+
+            sql """drop catalog if exists ${catalog_name}"""
+            sql """create catalog if not exists ${catalog_name} properties (
+                'type'='hms',
+                'hive.metastore.uris' = 
'thrift://${externalEnvIp}:${hms_port}',
+                'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
+            );"""
+            sql """switch ${catalog_name}"""
+
+            sql """set enable_fallback_to_original_planner=false;"""
+            for (String file_format in file_formats) {
+                logger.info("Process file format" + file_format)
+                test_complex_type_tbl(file_format, catalog_name)
+                test_insert_exception(file_format, catalog_name)
+            }
+            sql """drop catalog if exists ${catalog_name}"""
+        } finally {
+        }
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org


Reply via email to