morningman commented on code in PR #54304:
URL: https://github.com/apache/doris/pull/54304#discussion_r2267975759


##########
be/src/runtime/s3_plugin_downloader.cpp:
##########
@@ -0,0 +1,543 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "runtime/s3_plugin_downloader.h"
+
+#include <fmt/format.h>
+#include <zlib.h>
+
+#include <algorithm>
+#include <chrono>
+#include <filesystem>
+#include <fstream>
+#include <thread>
+
+#include "common/logging.h"
+#include "io/fs/obj_storage_client.h"
+#include "io/fs/s3_obj_storage_client.h"
+#include "runtime/plugin_file_cache.h"
+#include "util/s3_util.h"
+
+namespace doris {
+
+std::string S3PluginDownloader::S3Config::to_string() const {
+    return fmt::format("S3Config{{endpoint='{}', region='{}', bucket='{}', 
access_key='{}'}}",
+                       endpoint, region, bucket, access_key.empty() ? "null" : 
"***");
+}
+
+S3PluginDownloader::S3PluginDownloader(const S3Config& config) : 
config_(config) {
+    s3_client_ = create_s3_client(config_);
+    if (!s3_client_) {
+        throw std::runtime_error("Failed to create S3 client");
+    }
+}
+
+S3PluginDownloader::~S3PluginDownloader() = default;
+
+std::string S3PluginDownloader::download_file(const std::string& 
remote_s3_path,
+                                              const std::string& local_path,
+                                              const std::string& expected_md5) 
{
+    for (int attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; ++attempt) {
+        try {
+            return do_download_file(remote_s3_path, local_path, expected_md5);
+        } catch (const std::exception& e) {
+            LOG(WARNING) << "Download attempt " << attempt << "/" << 
MAX_RETRY_ATTEMPTS
+                         << " failed for " << remote_s3_path << ": " << 
e.what();
+
+            if (attempt == MAX_RETRY_ATTEMPTS) {
+                LOG(ERROR) << "Download failed after " << MAX_RETRY_ATTEMPTS
+                           << " attempts: " << e.what();
+                return "";
+            }
+
+            
std::this_thread::sleep_for(std::chrono::milliseconds(RETRY_DELAY_MS * 
attempt));
+        }
+    }
+    return "";
+}
+
+std::string S3PluginDownloader::download_and_extract_directory(
+        const std::string& remote_s3_directory, const std::string& 
local_directory) {
+    try {
+        // Ensure directory path ends with /
+        std::string remote_dir = remote_s3_directory;
+        if (!remote_dir.ends_with("/")) {
+            remote_dir += "/";
+        }
+
+        // Create local directory
+        Status status = create_parent_directory(local_directory + "/dummy");
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to create parent directory: " + 
status.to_string());
+        }
+
+        // Parse S3 path to get prefix
+        S3PathInfo path_info = parse_s3_path(remote_dir);
+
+        // List remote tar.gz files
+        std::vector<io::FileInfo> remote_files;
+        status = list_remote_files(path_info.key, &remote_files);

Review Comment:
   are you listing a dir, or a zip file?



##########
be/src/runtime/s3_plugin_downloader.cpp:
##########
@@ -0,0 +1,543 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "runtime/s3_plugin_downloader.h"
+
+#include <fmt/format.h>
+#include <zlib.h>
+
+#include <algorithm>
+#include <chrono>
+#include <filesystem>
+#include <fstream>
+#include <thread>
+
+#include "common/logging.h"
+#include "io/fs/obj_storage_client.h"
+#include "io/fs/s3_obj_storage_client.h"
+#include "runtime/plugin_file_cache.h"
+#include "util/s3_util.h"
+
+namespace doris {
+
+std::string S3PluginDownloader::S3Config::to_string() const {
+    return fmt::format("S3Config{{endpoint='{}', region='{}', bucket='{}', 
access_key='{}'}}",
+                       endpoint, region, bucket, access_key.empty() ? "null" : 
"***");
+}
+
+S3PluginDownloader::S3PluginDownloader(const S3Config& config) : 
config_(config) {
+    s3_client_ = create_s3_client(config_);
+    if (!s3_client_) {
+        throw std::runtime_error("Failed to create S3 client");
+    }
+}
+
+S3PluginDownloader::~S3PluginDownloader() = default;
+
+std::string S3PluginDownloader::download_file(const std::string& 
remote_s3_path,
+                                              const std::string& local_path,
+                                              const std::string& expected_md5) 
{
+    for (int attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; ++attempt) {
+        try {
+            return do_download_file(remote_s3_path, local_path, expected_md5);
+        } catch (const std::exception& e) {
+            LOG(WARNING) << "Download attempt " << attempt << "/" << 
MAX_RETRY_ATTEMPTS
+                         << " failed for " << remote_s3_path << ": " << 
e.what();
+
+            if (attempt == MAX_RETRY_ATTEMPTS) {
+                LOG(ERROR) << "Download failed after " << MAX_RETRY_ATTEMPTS

Review Comment:
   ```suggestion
                   LOG(WARNING) << "Download failed after " << 
MAX_RETRY_ATTEMPTS
   ```



##########
be/src/runtime/s3_plugin_downloader.cpp:
##########
@@ -0,0 +1,543 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "runtime/s3_plugin_downloader.h"
+
+#include <fmt/format.h>
+#include <zlib.h>
+
+#include <algorithm>
+#include <chrono>
+#include <filesystem>
+#include <fstream>
+#include <thread>
+
+#include "common/logging.h"
+#include "io/fs/obj_storage_client.h"
+#include "io/fs/s3_obj_storage_client.h"
+#include "runtime/plugin_file_cache.h"
+#include "util/s3_util.h"
+
+namespace doris {
+
+std::string S3PluginDownloader::S3Config::to_string() const {
+    return fmt::format("S3Config{{endpoint='{}', region='{}', bucket='{}', 
access_key='{}'}}",
+                       endpoint, region, bucket, access_key.empty() ? "null" : 
"***");
+}
+
+S3PluginDownloader::S3PluginDownloader(const S3Config& config) : 
config_(config) {
+    s3_client_ = create_s3_client(config_);
+    if (!s3_client_) {
+        throw std::runtime_error("Failed to create S3 client");
+    }
+}
+
+S3PluginDownloader::~S3PluginDownloader() = default;
+
+std::string S3PluginDownloader::download_file(const std::string& 
remote_s3_path,
+                                              const std::string& local_path,
+                                              const std::string& expected_md5) 
{
+    for (int attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; ++attempt) {
+        try {
+            return do_download_file(remote_s3_path, local_path, expected_md5);
+        } catch (const std::exception& e) {
+            LOG(WARNING) << "Download attempt " << attempt << "/" << 
MAX_RETRY_ATTEMPTS
+                         << " failed for " << remote_s3_path << ": " << 
e.what();
+
+            if (attempt == MAX_RETRY_ATTEMPTS) {
+                LOG(ERROR) << "Download failed after " << MAX_RETRY_ATTEMPTS
+                           << " attempts: " << e.what();
+                return "";
+            }
+
+            
std::this_thread::sleep_for(std::chrono::milliseconds(RETRY_DELAY_MS * 
attempt));
+        }
+    }
+    return "";
+}
+
+std::string S3PluginDownloader::download_and_extract_directory(
+        const std::string& remote_s3_directory, const std::string& 
local_directory) {
+    try {
+        // Ensure directory path ends with /
+        std::string remote_dir = remote_s3_directory;
+        if (!remote_dir.ends_with("/")) {
+            remote_dir += "/";
+        }
+
+        // Create local directory
+        Status status = create_parent_directory(local_directory + "/dummy");
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to create parent directory: " + 
status.to_string());
+        }
+
+        // Parse S3 path to get prefix
+        S3PathInfo path_info = parse_s3_path(remote_dir);
+
+        // List remote tar.gz files
+        std::vector<io::FileInfo> remote_files;
+        status = list_remote_files(path_info.key, &remote_files);
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to list files from " + remote_dir 
+ ": " +
+                                     status.to_string());
+        }
+
+        bool has_download = false;
+        for (const auto& remote_file : remote_files) {
+            if (remote_file.is_file && 
remote_file.file_name.ends_with(".tar.gz")) {
+                std::filesystem::path file_path(remote_file.file_name);
+                std::string file_name = file_path.filename().string();
+                std::string temp_file_path = 
std::filesystem::path(local_directory) / file_name;
+
+                // Build complete S3 path
+                std::string full_s3_path =
+                        fmt::format("s3://{}/{}", config_.bucket, 
remote_file.file_name);
+
+                // Download tar.gz file
+                std::string downloaded_file = download_file(full_s3_path, 
temp_file_path);
+                if (!downloaded_file.empty()) {
+                    // Extract to target directory
+                    status = extract_tar_gz(temp_file_path, local_directory);
+                    if (status.ok()) {
+                        // Delete temporary tar.gz file
+                        std::filesystem::remove(temp_file_path);
+                        LOG(INFO) << "Extracted and cleaned up: " << file_name;
+                        has_download = true;
+                    } else {
+                        LOG(WARNING) << "Failed to extract: " << file_name
+                                     << ", error: " << status.to_string();
+                    }
+                }
+            }
+        }
+
+        return has_download ? local_directory : "";
+
+    } catch (const std::exception& e) {
+        LOG(WARNING) << "Failed to download and extract directory " << 
remote_s3_directory << ": "
+                     << e.what();
+        return "";
+    }
+}
+
+std::string S3PluginDownloader::do_download_file(const std::string& 
remote_s3_path,

Review Comment:
   Please use Status as the return value of the function.
   Same for other methods.



##########
be/src/runtime/user_function_cache.cpp:
##########
@@ -381,4 +386,40 @@ std::vector<std::string> 
UserFunctionCache::_split_string_by_checksum(const std:
 
     return result;
 }
+
+std::string UserFunctionCache::_get_real_url(const std::string& url, const 
std::string& checksum) {
+    if (url.find(":/") == std::string::npos) {
+        return _check_and_return_default_java_udf_url(url, checksum);
+    }
+    return url;
+}
+
+std::string UserFunctionCache::_check_and_return_default_java_udf_url(const 
std::string& url,
+                                                                      const 
std::string& checksum) {
+    const char* doris_home = std::getenv("DORIS_HOME");
+    std::string default_url = std::string(doris_home) + "/plugins/java_udf";
+
+    std::filesystem::path file = default_url + "/" + url;
+
+    // In cloud mode, always try cloud download first (prioritize cloud mode)
+    if (config::is_cloud_mode()) {
+        try {
+            std::string target_path = default_url + "/" + url;
+            std::string downloaded_path = 
CloudPluginDownloader::download_from_cloud(

Review Comment:
   This is a hot path. But you will call `download_from_cloud` each time.



##########
be/src/runtime/s3_plugin_downloader.cpp:
##########
@@ -0,0 +1,543 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "runtime/s3_plugin_downloader.h"
+
+#include <fmt/format.h>
+#include <zlib.h>
+
+#include <algorithm>
+#include <chrono>
+#include <filesystem>
+#include <fstream>
+#include <thread>
+
+#include "common/logging.h"
+#include "io/fs/obj_storage_client.h"
+#include "io/fs/s3_obj_storage_client.h"
+#include "runtime/plugin_file_cache.h"
+#include "util/s3_util.h"
+
+namespace doris {
+
+std::string S3PluginDownloader::S3Config::to_string() const {
+    return fmt::format("S3Config{{endpoint='{}', region='{}', bucket='{}', 
access_key='{}'}}",
+                       endpoint, region, bucket, access_key.empty() ? "null" : 
"***");
+}
+
+S3PluginDownloader::S3PluginDownloader(const S3Config& config) : 
config_(config) {
+    s3_client_ = create_s3_client(config_);
+    if (!s3_client_) {
+        throw std::runtime_error("Failed to create S3 client");
+    }
+}
+
+S3PluginDownloader::~S3PluginDownloader() = default;
+
+std::string S3PluginDownloader::download_file(const std::string& 
remote_s3_path,
+                                              const std::string& local_path,
+                                              const std::string& expected_md5) 
{
+    for (int attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; ++attempt) {
+        try {
+            return do_download_file(remote_s3_path, local_path, expected_md5);
+        } catch (const std::exception& e) {
+            LOG(WARNING) << "Download attempt " << attempt << "/" << 
MAX_RETRY_ATTEMPTS
+                         << " failed for " << remote_s3_path << ": " << 
e.what();
+
+            if (attempt == MAX_RETRY_ATTEMPTS) {
+                LOG(ERROR) << "Download failed after " << MAX_RETRY_ATTEMPTS
+                           << " attempts: " << e.what();
+                return "";
+            }
+
+            
std::this_thread::sleep_for(std::chrono::milliseconds(RETRY_DELAY_MS * 
attempt));
+        }
+    }
+    return "";
+}
+
+std::string S3PluginDownloader::download_and_extract_directory(
+        const std::string& remote_s3_directory, const std::string& 
local_directory) {
+    try {
+        // Ensure directory path ends with /
+        std::string remote_dir = remote_s3_directory;
+        if (!remote_dir.ends_with("/")) {
+            remote_dir += "/";
+        }
+
+        // Create local directory
+        Status status = create_parent_directory(local_directory + "/dummy");
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to create parent directory: " + 
status.to_string());
+        }
+
+        // Parse S3 path to get prefix
+        S3PathInfo path_info = parse_s3_path(remote_dir);
+
+        // List remote tar.gz files
+        std::vector<io::FileInfo> remote_files;
+        status = list_remote_files(path_info.key, &remote_files);
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to list files from " + remote_dir 
+ ": " +
+                                     status.to_string());
+        }
+
+        bool has_download = false;
+        for (const auto& remote_file : remote_files) {
+            if (remote_file.is_file && 
remote_file.file_name.ends_with(".tar.gz")) {
+                std::filesystem::path file_path(remote_file.file_name);
+                std::string file_name = file_path.filename().string();
+                std::string temp_file_path = 
std::filesystem::path(local_directory) / file_name;
+
+                // Build complete S3 path
+                std::string full_s3_path =
+                        fmt::format("s3://{}/{}", config_.bucket, 
remote_file.file_name);
+
+                // Download tar.gz file
+                std::string downloaded_file = download_file(full_s3_path, 
temp_file_path);
+                if (!downloaded_file.empty()) {
+                    // Extract to target directory
+                    status = extract_tar_gz(temp_file_path, local_directory);
+                    if (status.ok()) {
+                        // Delete temporary tar.gz file
+                        std::filesystem::remove(temp_file_path);
+                        LOG(INFO) << "Extracted and cleaned up: " << file_name;
+                        has_download = true;
+                    } else {
+                        LOG(WARNING) << "Failed to extract: " << file_name
+                                     << ", error: " << status.to_string();
+                    }
+                }
+            }
+        }
+
+        return has_download ? local_directory : "";
+
+    } catch (const std::exception& e) {
+        LOG(WARNING) << "Failed to download and extract directory " << 
remote_s3_directory << ": "
+                     << e.what();
+        return "";
+    }
+}
+
+std::string S3PluginDownloader::do_download_file(const std::string& 
remote_s3_path,
+                                                 const std::string& local_path,
+                                                 const std::string& 
expected_md5) {
+    // Enhanced validation with cache optimization
+    if (PluginFileCache::is_file_valid(local_path, expected_md5)) {
+        // If user provided MD5, file is valid locally, but still need to 
check remote consistency
+        if (!expected_md5.empty()) {
+            // User MD5 is authoritative, but we still validate remote file 
matches user expectation
+            if (!has_remote_update(remote_s3_path, local_path)) {
+                LOG(INFO) << "Local file " << local_path
+                          << " MD5 validated and remote is consistent, 
skipping download";
+                return local_path;
+            } else {
+                LOG(INFO) << "Local file " << local_path
+                          << " MD5 valid, but remote has different version, 
downloading to verify";
+            }
+        } else {
+            // No user MD5, check remote only if cache expired (optimization)
+            if (!PluginFileCache::needs_remote_check(local_path, expected_md5) 
||
+                !has_remote_update(remote_s3_path, local_path)) {
+                LOG(INFO) << "Local file " << local_path
+                          << " is up to date (cache optimization), skipping 
download";
+                return local_path;
+            }
+        }
+    }
+
+    // Create parent directory
+    Status status = create_parent_directory(local_path);
+    if (!status.ok()) {
+        throw std::runtime_error("Failed to create parent directory: " + 
status.to_string());
+    }
+
+    // Parse S3 path
+    S3PathInfo path_info = parse_s3_path(remote_s3_path);
+
+    // Prepare download parameters
+    io::ObjectStoragePathOptions opts;
+    opts.bucket = path_info.bucket;
+    opts.key = path_info.key;
+
+    // Get file size
+    auto head_response = s3_client_->head_object(opts);

Review Comment:
   There already has download logic in s3filesystem



##########
be/src/runtime/cloud_plugin_downloader.cpp:
##########
@@ -0,0 +1,144 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "runtime/cloud_plugin_downloader.h"
+
+#include <fmt/format.h>
+
+#include "cloud/config.h"
+#include "common/logging.h"
+#include "runtime/cloud_plugin_config_provider.h"
+
+namespace doris {
+
+std::string CloudPluginDownloader::download_plugin_if_needed(PluginType 
plugin_type,

Review Comment:
   This method is not used



##########
be/src/runtime/s3_plugin_downloader.cpp:
##########
@@ -0,0 +1,543 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "runtime/s3_plugin_downloader.h"
+
+#include <fmt/format.h>
+#include <zlib.h>
+
+#include <algorithm>
+#include <chrono>
+#include <filesystem>
+#include <fstream>
+#include <thread>
+
+#include "common/logging.h"
+#include "io/fs/obj_storage_client.h"
+#include "io/fs/s3_obj_storage_client.h"
+#include "runtime/plugin_file_cache.h"
+#include "util/s3_util.h"
+
+namespace doris {
+
+std::string S3PluginDownloader::S3Config::to_string() const {
+    return fmt::format("S3Config{{endpoint='{}', region='{}', bucket='{}', 
access_key='{}'}}",
+                       endpoint, region, bucket, access_key.empty() ? "null" : 
"***");
+}
+
+S3PluginDownloader::S3PluginDownloader(const S3Config& config) : 
config_(config) {
+    s3_client_ = create_s3_client(config_);
+    if (!s3_client_) {
+        throw std::runtime_error("Failed to create S3 client");
+    }
+}
+
+S3PluginDownloader::~S3PluginDownloader() = default;
+
+std::string S3PluginDownloader::download_file(const std::string& 
remote_s3_path,
+                                              const std::string& local_path,
+                                              const std::string& expected_md5) 
{
+    for (int attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; ++attempt) {
+        try {
+            return do_download_file(remote_s3_path, local_path, expected_md5);
+        } catch (const std::exception& e) {
+            LOG(WARNING) << "Download attempt " << attempt << "/" << 
MAX_RETRY_ATTEMPTS
+                         << " failed for " << remote_s3_path << ": " << 
e.what();
+
+            if (attempt == MAX_RETRY_ATTEMPTS) {
+                LOG(ERROR) << "Download failed after " << MAX_RETRY_ATTEMPTS
+                           << " attempts: " << e.what();
+                return "";
+            }
+
+            
std::this_thread::sleep_for(std::chrono::milliseconds(RETRY_DELAY_MS * 
attempt));
+        }
+    }
+    return "";
+}
+
+std::string S3PluginDownloader::download_and_extract_directory(
+        const std::string& remote_s3_directory, const std::string& 
local_directory) {
+    try {
+        // Ensure directory path ends with /
+        std::string remote_dir = remote_s3_directory;
+        if (!remote_dir.ends_with("/")) {
+            remote_dir += "/";
+        }
+
+        // Create local directory
+        Status status = create_parent_directory(local_directory + "/dummy");
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to create parent directory: " + 
status.to_string());
+        }
+
+        // Parse S3 path to get prefix
+        S3PathInfo path_info = parse_s3_path(remote_dir);
+
+        // List remote tar.gz files
+        std::vector<io::FileInfo> remote_files;
+        status = list_remote_files(path_info.key, &remote_files);
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to list files from " + remote_dir 
+ ": " +
+                                     status.to_string());
+        }
+
+        bool has_download = false;
+        for (const auto& remote_file : remote_files) {
+            if (remote_file.is_file && 
remote_file.file_name.ends_with(".tar.gz")) {
+                std::filesystem::path file_path(remote_file.file_name);
+                std::string file_name = file_path.filename().string();
+                std::string temp_file_path = 
std::filesystem::path(local_directory) / file_name;
+
+                // Build complete S3 path
+                std::string full_s3_path =
+                        fmt::format("s3://{}/{}", config_.bucket, 
remote_file.file_name);
+
+                // Download tar.gz file
+                std::string downloaded_file = download_file(full_s3_path, 
temp_file_path);
+                if (!downloaded_file.empty()) {
+                    // Extract to target directory
+                    status = extract_tar_gz(temp_file_path, local_directory);
+                    if (status.ok()) {
+                        // Delete temporary tar.gz file
+                        std::filesystem::remove(temp_file_path);
+                        LOG(INFO) << "Extracted and cleaned up: " << file_name;
+                        has_download = true;
+                    } else {
+                        LOG(WARNING) << "Failed to extract: " << file_name
+                                     << ", error: " << status.to_string();
+                    }
+                }
+            }
+        }
+
+        return has_download ? local_directory : "";
+
+    } catch (const std::exception& e) {
+        LOG(WARNING) << "Failed to download and extract directory " << 
remote_s3_directory << ": "
+                     << e.what();
+        return "";
+    }
+}
+
+std::string S3PluginDownloader::do_download_file(const std::string& 
remote_s3_path,
+                                                 const std::string& local_path,
+                                                 const std::string& 
expected_md5) {
+    // Enhanced validation with cache optimization
+    if (PluginFileCache::is_file_valid(local_path, expected_md5)) {
+        // If user provided MD5, file is valid locally, but still need to 
check remote consistency
+        if (!expected_md5.empty()) {
+            // User MD5 is authoritative, but we still validate remote file 
matches user expectation
+            if (!has_remote_update(remote_s3_path, local_path)) {

Review Comment:
   Here you need to get object info from remote each time



##########
be/src/runtime/s3_plugin_downloader.cpp:
##########
@@ -0,0 +1,543 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+#include "runtime/s3_plugin_downloader.h"
+
+#include <fmt/format.h>
+#include <zlib.h>
+
+#include <algorithm>
+#include <chrono>
+#include <filesystem>
+#include <fstream>
+#include <thread>
+
+#include "common/logging.h"
+#include "io/fs/obj_storage_client.h"
+#include "io/fs/s3_obj_storage_client.h"
+#include "runtime/plugin_file_cache.h"
+#include "util/s3_util.h"
+
+namespace doris {
+
+std::string S3PluginDownloader::S3Config::to_string() const {
+    return fmt::format("S3Config{{endpoint='{}', region='{}', bucket='{}', 
access_key='{}'}}",
+                       endpoint, region, bucket, access_key.empty() ? "null" : 
"***");
+}
+
+S3PluginDownloader::S3PluginDownloader(const S3Config& config) : 
config_(config) {
+    s3_client_ = create_s3_client(config_);
+    if (!s3_client_) {
+        throw std::runtime_error("Failed to create S3 client");
+    }
+}
+
+S3PluginDownloader::~S3PluginDownloader() = default;
+
+std::string S3PluginDownloader::download_file(const std::string& 
remote_s3_path,
+                                              const std::string& local_path,
+                                              const std::string& expected_md5) 
{
+    for (int attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; ++attempt) {
+        try {
+            return do_download_file(remote_s3_path, local_path, expected_md5);
+        } catch (const std::exception& e) {
+            LOG(WARNING) << "Download attempt " << attempt << "/" << 
MAX_RETRY_ATTEMPTS
+                         << " failed for " << remote_s3_path << ": " << 
e.what();
+
+            if (attempt == MAX_RETRY_ATTEMPTS) {
+                LOG(ERROR) << "Download failed after " << MAX_RETRY_ATTEMPTS
+                           << " attempts: " << e.what();
+                return "";
+            }
+
+            
std::this_thread::sleep_for(std::chrono::milliseconds(RETRY_DELAY_MS * 
attempt));
+        }
+    }
+    return "";
+}
+
+std::string S3PluginDownloader::download_and_extract_directory(
+        const std::string& remote_s3_directory, const std::string& 
local_directory) {
+    try {
+        // Ensure directory path ends with /
+        std::string remote_dir = remote_s3_directory;
+        if (!remote_dir.ends_with("/")) {
+            remote_dir += "/";
+        }
+
+        // Create local directory
+        Status status = create_parent_directory(local_directory + "/dummy");
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to create parent directory: " + 
status.to_string());
+        }
+
+        // Parse S3 path to get prefix
+        S3PathInfo path_info = parse_s3_path(remote_dir);
+
+        // List remote tar.gz files
+        std::vector<io::FileInfo> remote_files;
+        status = list_remote_files(path_info.key, &remote_files);
+        if (!status.ok()) {
+            throw std::runtime_error("Failed to list files from " + remote_dir 
+ ": " +
+                                     status.to_string());
+        }
+
+        bool has_download = false;
+        for (const auto& remote_file : remote_files) {
+            if (remote_file.is_file && 
remote_file.file_name.ends_with(".tar.gz")) {
+                std::filesystem::path file_path(remote_file.file_name);
+                std::string file_name = file_path.filename().string();
+                std::string temp_file_path = 
std::filesystem::path(local_directory) / file_name;
+
+                // Build complete S3 path
+                std::string full_s3_path =
+                        fmt::format("s3://{}/{}", config_.bucket, 
remote_file.file_name);
+
+                // Download tar.gz file
+                std::string downloaded_file = download_file(full_s3_path, 
temp_file_path);
+                if (!downloaded_file.empty()) {
+                    // Extract to target directory
+                    status = extract_tar_gz(temp_file_path, local_directory);
+                    if (status.ok()) {
+                        // Delete temporary tar.gz file
+                        std::filesystem::remove(temp_file_path);
+                        LOG(INFO) << "Extracted and cleaned up: " << file_name;
+                        has_download = true;

Review Comment:
   Here you will download several files. and if one of these files succeed, 
this method will return ok. Which is not right.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to