This is an automated email from the ASF dual-hosted git repository. morningman pushed a commit to branch branch-1.2-lts in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-1.2-lts by this push: new efc544e1d2 Revert "[improvement](libhdfs) Use keytab and principal to login kerberos (#19841)" efc544e1d2 is described below commit efc544e1d287e4d0c1001c57aff382ad37b766d1 Author: morningman <morning...@163.com> AuthorDate: Wed May 31 18:09:33 2023 +0800 Revert "[improvement](libhdfs) Use keytab and principal to login kerberos (#19841)" This reverts commit 4db6aa67b38c75ecac9e10bedb662bf60ff9aa30. --- be/CMakeLists.txt | 2 +- be/src/io/hdfs_builder.cpp | 34 ++++++---------------------------- 2 files changed, 7 insertions(+), 29 deletions(-) diff --git a/be/CMakeLists.txt b/be/CMakeLists.txt index 12621403fb..aa40e61040 100644 --- a/be/CMakeLists.txt +++ b/be/CMakeLists.txt @@ -739,7 +739,7 @@ set(COMMON_THIRDPARTY simdjson ) -if ((ARCH_AMD64 OR ARCH_AARCH64) AND OS_LINUX) +if (ARCH_AMD64 AND OS_LINUX) add_library(hadoop_hdfs STATIC IMPORTED) set_target_properties(hadoop_hdfs PROPERTIES IMPORTED_LOCATION ${THIRDPARTY_DIR}/lib/hadoop_hdfs/native/libhdfs.a) diff --git a/be/src/io/hdfs_builder.cpp b/be/src/io/hdfs_builder.cpp index ebed8b8c5b..51a7e539b6 100644 --- a/be/src/io/hdfs_builder.cpp +++ b/be/src/io/hdfs_builder.cpp @@ -47,17 +47,9 @@ Status HDFSCommonBuilder::run_kinit() { return Status::InvalidArgument("Invalid hdfs_kerberos_principal or hdfs_kerberos_keytab"); } std::string ticket_path = TICKET_CACHE_PATH + generate_uuid_string(); - const char* krb_home = getenv("KRB_HOME"); - std::string krb_home_str(krb_home ? krb_home : ""); fmt::memory_buffer kinit_command; - if (krb_home_str.empty()) { - fmt::format_to(kinit_command, "kinit -c {} -R -t {} -k {}", ticket_path, - hdfs_kerberos_keytab, hdfs_kerberos_principal); - } else { - // Assign kerberos home in env, get kinit in kerberos home - fmt::format_to(kinit_command, krb_home_str + "/bin/kinit -c {} -R -t {} -k {}", ticket_path, - hdfs_kerberos_keytab, hdfs_kerberos_principal); - } + fmt::format_to(kinit_command, "kinit -c {} -R -t {} -k {}", ticket_path, hdfs_kerberos_keytab, + hdfs_kerberos_principal); VLOG_NOTICE << "kinit command: " << fmt::to_string(kinit_command); std::string msg; AgentUtils util; @@ -67,9 +59,8 @@ Status HDFSCommonBuilder::run_kinit() { } #ifdef USE_LIBHDFS3 hdfsBuilderSetPrincipal(hdfs_builder, hdfs_kerberos_principal.c_str()); + hdfsBuilderSetKerbTicketCachePath(hdfs_builder, ticket_path.c_str()); #endif - hdfsBuilderConfSetStr(hdfs_builder, "hadoop.security.kerberos.ticket.cache.path", - ticket_path.c_str()); return Status::OK(); } @@ -112,37 +103,24 @@ Status createHDFSBuilder(const THdfsParams& hdfsParams, HDFSCommonBuilder* build if (hdfsParams.__isset.hdfs_kerberos_principal) { builder->need_kinit = true; builder->hdfs_kerberos_principal = hdfsParams.hdfs_kerberos_principal; - hdfsBuilderSetUserName(builder->get(), hdfsParams.hdfs_kerberos_principal.c_str()); - } else if (hdfsParams.__isset.user) { - hdfsBuilderSetUserName(builder->get(), hdfsParams.user.c_str()); -#ifdef USE_HADOOP_HDFS - hdfsBuilderSetKerb5Conf(builder->get(), nullptr); - hdfsBuilderSetKeyTabFile(builder->get(), nullptr); -#endif } if (hdfsParams.__isset.hdfs_kerberos_keytab) { builder->need_kinit = true; builder->hdfs_kerberos_keytab = hdfsParams.hdfs_kerberos_keytab; -#ifdef USE_HADOOP_HDFS - hdfsBuilderSetKeyTabFile(builder->get(), hdfsParams.hdfs_kerberos_keytab.c_str()); -#endif } // set other conf if (hdfsParams.__isset.hdfs_conf) { for (const THdfsConf& conf : hdfsParams.hdfs_conf) { hdfsBuilderConfSetStr(builder->get(), conf.key.c_str(), conf.value.c_str()); -#ifdef USE_HADOOP_HDFS - // Set krb5.conf, we should define java.security.krb5.conf in catalog properties - if (strcmp(conf.key.c_str(), "java.security.krb5.conf") == 0) { - hdfsBuilderSetKerb5Conf(builder->get(), conf.value.c_str()); - } -#endif } } hdfsBuilderConfSetStr(builder->get(), "ipc.client.fallback-to-simple-auth-allowed", "true"); if (builder->is_need_kinit()) { RETURN_IF_ERROR(builder->run_kinit()); + } else if (hdfsParams.__isset.user) { + // set hdfs user + hdfsBuilderSetUserName(builder->get(), hdfsParams.user.c_str()); } return Status::OK(); --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org