This is an automated email from the ASF dual-hosted git repository. morningman pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push: new 56fa9fe24b4 [feat](catalog) support drop database force in external catalog (#48943) 56fa9fe24b4 is described below commit 56fa9fe24b43eb1978ded04e880054e11382ed26 Author: Mingyu Chen (Rayner) <morning...@163.com> AuthorDate: Wed Mar 12 13:58:27 2025 +0800 [feat](catalog) support drop database force in external catalog (#48943) ### What problem does this PR solve? Problem Summary: Support `force` keyword when dropping database in hive or iceberg catalog: ``` DROP DATABASE db FORCE; ``` Before, if there are tables in a hive or iceberg database, `DROP DATABASE` will not allowed. Now, if user add `force` keyword, the tables in the database will be dropped, and finally the database will be dropped. --- .../doris/datasource/hive/HiveMetadataOps.java | 10 ++ .../datasource/iceberg/IcebergMetadataOps.java | 16 ++- .../hive/ddl/test_hive_drop_db.out | Bin 0 -> 317 bytes .../iceberg/test_iceberg_drop_db.out | Bin 0 -> 194 bytes .../hive/ddl/test_hive_drop_db.groovy | 106 +++++++++++++++++++ .../iceberg/test_iceberg_drop_db.groovy | 112 +++++++++++++++++++++ 6 files changed, 241 insertions(+), 3 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java index 66a4162c853..633e6a72489 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java @@ -151,6 +151,16 @@ public class HiveMetadataOps implements ExternalMetadataOps { } } try { + if (force) { + // try to drop all tables in the database + List<String> tables = listTableNames(dbName); + for (String table : tables) { + dropTableImpl(dbName, table, true); + } + if (!tables.isEmpty()) { + LOG.info("drop database[{}] with force, drop all tables, num: {}", dbName, tables.size()); + } + } client.dropDatabase(dbName); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java index 0159defbae0..11451b445a2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java @@ -170,10 +170,10 @@ public class IcebergMetadataOps implements ExternalMetadataOps { } @Override - public void dropDbImpl(String dbName, boolean ifExists, boolean fore) throws DdlException { + public void dropDbImpl(String dbName, boolean ifExists, boolean force) throws DdlException { try { preExecutionAuthenticator.execute(() -> { - preformDropDb(dbName, ifExists); + preformDropDb(dbName, ifExists, force); return null; }); } catch (Exception e) { @@ -182,7 +182,7 @@ public class IcebergMetadataOps implements ExternalMetadataOps { } } - private void preformDropDb(String dbName, boolean ifExists) throws DdlException { + private void preformDropDb(String dbName, boolean ifExists, boolean force) throws DdlException { if (!databaseExist(dbName)) { if (ifExists) { LOG.info("drop database[{}] which does not exist", dbName); @@ -191,6 +191,16 @@ public class IcebergMetadataOps implements ExternalMetadataOps { ErrorReport.reportDdlException(ErrorCode.ERR_DB_DROP_EXISTS, dbName); } } + if (force) { + // try to drop all tables in the database + List<String> tables = listTableNames(dbName); + for (String table : tables) { + performDropTable(dbName, table, true); + } + if (!tables.isEmpty()) { + LOG.info("drop database[{}] with force, drop all tables, num: {}", dbName, tables.size()); + } + } nsCatalog.dropNamespace(getNamespace(dbName)); } diff --git a/regression-test/data/external_table_p0/hive/ddl/test_hive_drop_db.out b/regression-test/data/external_table_p0/hive/ddl/test_hive_drop_db.out new file mode 100644 index 00000000000..a15551fd299 Binary files /dev/null and b/regression-test/data/external_table_p0/hive/ddl/test_hive_drop_db.out differ diff --git a/regression-test/data/external_table_p0/iceberg/test_iceberg_drop_db.out b/regression-test/data/external_table_p0/iceberg/test_iceberg_drop_db.out new file mode 100644 index 00000000000..3e4aab98de8 Binary files /dev/null and b/regression-test/data/external_table_p0/iceberg/test_iceberg_drop_db.out differ diff --git a/regression-test/suites/external_table_p0/hive/ddl/test_hive_drop_db.groovy b/regression-test/suites/external_table_p0/hive/ddl/test_hive_drop_db.groovy new file mode 100644 index 00000000000..a659da08f43 --- /dev/null +++ b/regression-test/suites/external_table_p0/hive/ddl/test_hive_drop_db.groovy @@ -0,0 +1,106 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_hive_drop_db", "p0,external,hive,external_docker,external_docker_hive") { + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("diable Hive test.") + return; + } + + for (String hivePrefix : ["hive2", "hive3"]) { + try { + String hms_port = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String hdfs_port = context.config.otherConfigs.get(hivePrefix + "HdfsPort") + String catalog_name = "test_${hivePrefix}_drop_db_ctl" + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + String db_name = "test_${hivePrefix}_drop_db" + + sql """drop catalog if exists ${catalog_name}""" + sql """create catalog if not exists ${catalog_name} properties ( + 'type'='hms', + 'hive.metastore.uris' = 'thrift://${externalEnvIp}:${hms_port}', + 'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}', + 'use_meta_cache' = 'true' + );""" + sql """switch ${catalog_name}""" + + sql """set enable_fallback_to_original_planner=false;""" + + sql """drop database if exists ${db_name} force""" + sql """create database ${db_name}""" + sql """use ${db_name}""" + sql """ + CREATE TABLE test_hive_drop_db_tbl1 ( + `col1` BOOLEAN, + `col2` TINYINT + ); + """ + sql """insert into test_hive_drop_db_tbl1 values(true, 1);""" + qt_sql_tbl1 """select * from test_hive_drop_db_tbl1""" + sql """ + CREATE TABLE test_hive_drop_db_tbl2 ( + `col1` BOOLEAN, + `col2` TINYINT + ); + """ + sql """insert into test_hive_drop_db_tbl2 values(false, 2);""" + qt_sql_tbl2 """select * from test_hive_drop_db_tbl2""" + sql """ + CREATE TABLE test_hive_drop_db_tbl3 ( + `col1` BOOLEAN, + `col2` TINYINT + ); + """ + qt_sql_tbl3 """select * from test_hive_drop_db_tbl3""" + + // drop db with tables + test { + sql """drop database ${db_name}""" + exception """One or more tables exist""" + } + + // drop db froce with tables + sql """drop database ${db_name} force""" + + // refresh catalog + sql """refresh catalog ${catalog_name}""" + // should be empty + test { + sql """show tables from ${db_name}""" + exception "Unknown database" + } + + // use tvf to check if table is dropped + String tbl1_path = "hdfs://${externalEnvIp}:${hdfs_port}/user/hive/warehouse/test_${hivePrefix}_drop_db.db/test_hive_drop_db_tbl1" + String tbl2_path = "hdfs://${externalEnvIp}:${hdfs_port}/user/hive/warehouse/test_${hivePrefix}_drop_db.db/test_hive_drop_db_tbl2" + String tbl3_path = "hdfs://${externalEnvIp}:${hdfs_port}/user/hive/warehouse/test_${hivePrefix}_drop_db.db/test_hive_drop_db_tbl3" + + qt_test_1 """ select * from HDFS( + "uri" = "${tbl1_path}/*", + "format" = "orc"); """ + qt_test_2 """ select * from HDFS( + "uri" = "${tbl1_path}/*", + "format" = "orc"); """ + qt_test_3 """ select * from HDFS( + "uri" = "${tbl1_path}/*", + "format" = "orc"); """ + + } finally { + } + } +} diff --git a/regression-test/suites/external_table_p0/iceberg/test_iceberg_drop_db.groovy b/regression-test/suites/external_table_p0/iceberg/test_iceberg_drop_db.groovy new file mode 100644 index 00000000000..ccc6196cd74 --- /dev/null +++ b/regression-test/suites/external_table_p0/iceberg/test_iceberg_drop_db.groovy @@ -0,0 +1,112 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_iceberg_drop_db", "p0,external,doris,external_docker,external_docker_doris") { + + String enabled = context.config.otherConfigs.get("enableIcebergTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable iceberg test.") + return + } + + String catalog_name = "test_iceberg_drop_db_ctl" + String db_name = "test_iceberg_drop_db" + String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port") + String minio_port = context.config.otherConfigs.get("iceberg_minio_port") + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + sql """drop catalog if exists ${catalog_name}""" + sql """ + CREATE CATALOG ${catalog_name} PROPERTIES ( + 'type'='iceberg', + 'iceberg.catalog.type'='rest', + 'uri' = 'http://${externalEnvIp}:${rest_port}', + "s3.access_key" = "admin", + "s3.secret_key" = "password", + "s3.endpoint" = "http://${externalEnvIp}:${minio_port}", + "s3.region" = "us-east-1" + );""" + + sql """switch ${catalog_name}""" + + sql """drop database if exists ${db_name} force""" + sql """create database ${db_name}""" + sql """use ${db_name}""" + + sql """ create table test_iceberg_drop_db_tbl1 (id int) """ + sql """insert into test_iceberg_drop_db_tbl1 values(1);""" + qt_sql_tbl1 """select * from test_iceberg_drop_db_tbl1""" + + sql """ create table test_iceberg_drop_db_tbl2 (id int) """ + sql """insert into test_iceberg_drop_db_tbl2 values(2);""" + qt_sql_tbl2 """select * from test_iceberg_drop_db_tbl2""" + + sql """ create table test_iceberg_drop_db_tbl3 (id int) """ + sql """insert into test_iceberg_drop_db_tbl3 values(3);""" + qt_sql_tbl3 """select * from test_iceberg_drop_db_tbl3""" + + // drop db with tables + test { + sql """drop database ${db_name}""" + exception """is not empty""" + } + + // drop db froce with tables + sql """drop database ${db_name} force""" + + // refresh catalog + sql """refresh catalog ${catalog_name}""" + // should be empty + test { + sql """show tables from ${db_name}""" + exception "Unknown database" + } + + // table should be deleted + qt_test1 """ + select * from s3( + "uri" = "s3://warehouse/wh/${db_name}/test_iceberg_drop_db_tbl1/data/*.parquet", + "s3.endpoint"="http://${externalEnvIp}:${minio_port}", + "s3.access_key" = "admin", + "s3.secret_key" = "password", + "s3.region" = "us-east-1", + "format" = "parquet", + "use_path_style" = "true" + ) + """ + qt_test2 """ + select * from s3( + "uri" = "s3://warehouse/wh/${db_name}/test_iceberg_drop_db_tbl1/data/*.parquet", + "s3.endpoint"="http://${externalEnvIp}:${minio_port}", + "s3.access_key" = "admin", + "s3.secret_key" = "password", + "s3.region" = "us-east-1", + "format" = "parquet", + "use_path_style" = "true" + ) + """ + qt_test3 """ + select * from s3( + "uri" = "s3://warehouse/wh/${db_name}/test_iceberg_drop_db_tbl1/data/*.parquet", + "s3.endpoint"="http://${externalEnvIp}:${minio_port}", + "s3.access_key" = "admin", + "s3.secret_key" = "password", + "s3.region" = "us-east-1", + "format" = "parquet", + "use_path_style" = "true" + ) + """ +} --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org