This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new c7ec98f2b6f [Fix](Catalog)Ensure preExecutionAuthenticator is properly 
initialized (#50839)
c7ec98f2b6f is described below

commit c7ec98f2b6f7097071e12d905852def25fbfadc3
Author: Calvin Kirs <guoqi...@selectdb.com>
AuthorDate: Fri May 23 11:39:46 2025 +0800

    [Fix](Catalog)Ensure preExecutionAuthenticator is properly initialized 
(#50839)
    
    ### What problem does this PR solve?
    
    https://github.com/apache/doris/pull/50623
    Optimize the initialization logic of preExecutionAuthenticator and
    remove redundant logic from HadoopIcebergCatalog."
    
    This change improves the robustness of the code by guaranteeing that the
    required initialization logic is executed, preventing potential null
    references or incomplete setup.
    fe.log
    ```
    2025-05-13 04:57:21,348 INFO (thrift-server-pool-22|191) 
[InternalCatalog.createDb():466] createDb dbName = 
regression_test_external_table_p0_broker_load, id = 1747082008077
    2025-05-13 04:57:21,351 WARN (thrift-server-pool-9|191) 
[FrontendServiceImpl.fetchSchemaTableData():2341] Failed to fetchSchemaTableData
    java.lang.RuntimeException: NullPointerException: Cannot invoke 
"org.apache.doris.common.security.authentication.PreExecutionAuthenticator.execute(java.util.concurrent.Callable)"
 because the return value of 
"org.apache.doris.datasource.ExternalCatalog.getPreExecutionAuthenticator()" is 
null
            at 
org.apache.doris.datasource.iceberg.IcebergMetadataCache.loadTable(IcebergMetadataCache.java:133)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
org.apache.doris.datasource.iceberg.IcebergMetadataCache.lambda$new$1(IcebergMetadataCache.java:71)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newMappingFunction$2(LocalLoadingCache.java:145)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.LocalCache.lambda$statsAware$0(LocalCache.java:139)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.BoundedLocalCache.lambda$doComputeIfAbsent$14(BoundedLocalCache.java:2406)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
java.util.concurrent.ConcurrentHashMap.compute(ConcurrentHashMap.java:1916) 
~[?:?]
            at 
com.github.benmanes.caffeine.cache.BoundedLocalCache.doComputeIfAbsent(BoundedLocalCache.java:2404)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.BoundedLocalCache.computeIfAbsent(BoundedLocalCache.java:2387)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.LocalCache.computeIfAbsent(LocalCache.java:108)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.LocalLoadingCache.get(LocalLoadingCache.java:56)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
org.apache.doris.datasource.iceberg.IcebergMetadataCache.getIcebergTable(IcebergMetadataCache.java:94)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
org.apache.doris.datasource.iceberg.IcebergMetadataCache.loadSnapshots(IcebergMetadataCache.java:113)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
org.apache.doris.datasource.iceberg.IcebergMetadataCache.lambda$new$0(IcebergMetadataCache.java:63)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newMappingFunction$2(LocalLoadingCache.java:145)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.LocalCache.lambda$statsAware$0(LocalCache.java:139)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.BoundedLocalCache.lambda$doComputeIfAbsent$14(BoundedLocalCache.java:2406)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
java.util.concurrent.ConcurrentHashMap.compute(ConcurrentHashMap.java:1916) 
~[?:?]
            at 
com.github.benmanes.caffeine.cache.BoundedLocalCache.doComputeIfAbsent(BoundedLocalCache.java:2404)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.BoundedLocalCache.computeIfAbsent(BoundedLocalCache.java:2387)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.LocalCache.computeIfAbsent(LocalCache.java:108)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
com.github.benmanes.caffeine.cache.LocalLoadingCache.get(LocalLoadingCache.java:56)
 ~[hive-catalog-shade-2.1.4.jar:2.1.4]
            at 
org.apache.doris.datasource.iceberg.IcebergMetadataCache.getSnapshotList(IcebergMetadataCache.java:89)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
org.apache.doris.tablefunction.MetadataGenerator.icebergMetadataResult(MetadataGenerator.java:349)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
org.apache.doris.tablefunction.MetadataGenerator.getMetadataTable(MetadataGenerator.java:225)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at 
org.apache.doris.service.FrontendServiceImpl.fetchSchemaTableData(FrontendServiceImpl.java:2335)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            at jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native 
Method) ~[?:?]
            at 
jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
 ~[?:?]
            at 
jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:?]
            at java.lang.reflect.Method.invoke(Method.java:568) ~[?:?]
            at 
org.apache.doris.service.FeServer.lambda$start$0(FeServer.java:60) 
~[doris-fe.jar:1.2-SNAPSHOT]
            at jdk.proxy3.$Proxy67.fetchSchemaTableData(Unknown Source) ~[?:?]
            at 
org.apache.doris.thrift.FrontendService$Processor$fetchSchemaTableData.getResult(FrontendService.java:4777)
 ~[fe-common-1.2-SNAPSHOT.jar:1.2-SNAPSHOT]
            at 
org.apache.doris.thrift.FrontendService$Processor$fetchSchemaTableData.getResult(FrontendService.java:4757)
 ~[fe-common-1.2-SNAPSHOT.jar:1.2-SNAPSHOT]
            at 
org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) 
~[libthrift-0.16.0.jar:0.16.0]
            at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:38) 
~[libthrift-0.16.0.jar:0.16.0]
            at 
org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:250)
 ~[libthrift-0.16.0.jar:0.16.0]
            at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) 
~[?:?]
            at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) 
~[?:?]
            at java.lang.Thread.run(Thread.java:833) ~[?:?]
    Caused by: java.lang.NullPointerException: Cannot invoke 
"org.apache.doris.common.security.authentication.PreExecutionAuthenticator.execute(java.util.concurrent.Callable)"
 because the return value of 
"org.apache.doris.datasource.ExternalCatalog.getPreExecutionAuthenticator()" is 
null
            at 
org.apache.doris.datasource.iceberg.IcebergMetadataCache.loadTable(IcebergMetadataCache.java:130)
 ~[doris-fe.jar:1.2-SNAPSHOT]
            ... 38 more
    ```
    be.log
    ```
    W 2025-05-13 04:57:21,496 14827 status.h:451] meet error status: 
[INTERNAL_ERROR]TStatus: NullPointerException: Cannot invoke 
"org.apache.doris.common.security.authentication.PreExecutionAuthenticator.execute(java.util.concurrent.Callable)"
 because the return value of 
"org.apache.doris.datasource.ExternalCatalog.getPreExecutionAuthenticator()" is 
null
    
            0#  doris::Status doris::Status::create<true>(doris::TStatus 
const&) at 
/var/local/ldb-toolchain/bin/../lib/gcc/x86_64-linux-gnu/11/../../../../include/c++/11/bits/basic_string.h:658
            1#  
doris::vectorized::MetaScanner::_fetch_metadata(doris::TMetaScanRange const&) 
at /home/zcp/repo_center/doris_master/doris/be/src/common/status.h:517
            2#  doris::vectorized::MetaScanner::open(doris::RuntimeState*) at 
/home/zcp/repo_center/doris_master/doris/be/src/common/status.h:517
            3#  
doris::vectorized::ScannerScheduler::_scanner_scan(std::shared_ptr<doris::vectorized::ScannerContext>,
 std::shared_ptr<doris::vectorized::ScanTask>) at 
/home/zcp/repo_center/doris_master/doris/be/src/vec/exec/scan/scanner_scheduler.cpp:0
            4#  std::_Function_handler<void (), 
doris::vectorized::ScannerScheduler::submit(std::shared_ptr<doris::vectorized::ScannerContext>,
 std::shared_ptr<doris::vectorized::ScanTask>)::$_1::operator()() 
const::{lambda()#1}>::_M_invoke(std::_Any_data const&) at 
/var/local/ldb-toolchain/bin/../lib/gcc/x86_64-linux-gnu/11/../../../../include/c++/11/bits/shared_ptr_base.h:1149
            5#  doris::ThreadPool::dispatch_thread() at 
/home/zcp/repo_center/doris_master/doris/be/src/util/threadpool.cpp:0
            6#  doris::Thread::supervise_thread(void*) at 
/var/local/ldb-toolchain/bin/../usr/include/pthread.h:563
            7#  ?
            8#  ?
    W 2025-05-13 04:57:21,496 14827 meta_scanner.cpp:295] fetch schema table 
data from master failed, errmsg=[INTERNAL_ERROR]TStatus: NullPointerException: 
Cannot invoke 
"org.apache.doris.common.security.authentication.PreExecutionAuthenticator.execute(java.util.concurrent.Callable)"
 because the return value of 
"org.apache.doris.datasource.ExternalCatalog.getPreExecutionAuthenticator()" is 
null
    
    ```
---
 .../authentication/PreExecutionAuthenticator.java    | 15 +++++++++++++++
 .../org/apache/doris/datasource/ExternalCatalog.java | 19 +++++++++++++++++++
 .../doris/datasource/hive/HMSExternalCatalog.java    | 18 +++++++-----------
 .../doris/datasource/hive/HiveMetadataOps.java       |  2 +-
 .../datasource/iceberg/IcebergExternalCatalog.java   | 16 ++++++++--------
 .../iceberg/IcebergHadoopExternalCatalog.java        | 20 +++++---------------
 6 files changed, 55 insertions(+), 35 deletions(-)

diff --git 
a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/PreExecutionAuthenticator.java
 
b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/PreExecutionAuthenticator.java
index a64dd4cf717..93967247330 100644
--- 
a/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/PreExecutionAuthenticator.java
+++ 
b/fe/fe-common/src/main/java/org/apache/doris/common/security/authentication/PreExecutionAuthenticator.java
@@ -17,6 +17,8 @@
 
 package org.apache.doris.common.security.authentication;
 
+import org.apache.hadoop.conf.Configuration;
+
 import java.util.concurrent.Callable;
 
 /**
@@ -39,6 +41,19 @@ public class PreExecutionAuthenticator {
     public PreExecutionAuthenticator() {
     }
 
+    /**
+     * Constructor to initialize the PreExecutionAuthenticator object.
+     * This constructor is responsible for initializing the Hadoop 
authenticator required for Kerberos authentication
+     * based on the provided configuration information.
+     *
+     * @param configuration Configuration information used to obtain Kerberos 
authentication settings
+     */
+    public PreExecutionAuthenticator(Configuration configuration) {
+        AuthenticationConfig config = 
AuthenticationConfig.getKerberosConfig(configuration);
+        this.hadoopAuthenticator = 
HadoopAuthenticator.getHadoopAuthenticator(config);
+    }
+
+
     /**
      * Executes the specified task with necessary authentication.
      * <p>If a HadoopAuthenticator is set, the task will be executed within a
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
index 923e2faae1d..cc0f0676796 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java
@@ -193,6 +193,17 @@ public abstract class ExternalCatalog
         this.comment = Strings.nullToEmpty(comment);
     }
 
+    /**
+     * Initializes the PreExecutionAuthenticator instance.
+     * This method ensures that the authenticator is created only once in a 
thread-safe manner.
+     * If additional authentication logic is required, it should be extended 
and implemented in subclasses.
+     */
+    protected synchronized void initPreExecutionAuthenticator() {
+        if (preExecutionAuthenticator == null) {
+            preExecutionAuthenticator = new PreExecutionAuthenticator();
+        }
+    }
+
     public Configuration getConfiguration() {
         // build configuration is costly, so we cache it.
         if (cachedConf != null) {
@@ -230,6 +241,11 @@ public abstract class ExternalCatalog
         }
     }
 
+    public ExternalMetadataOps getMetadataOps() {
+        makeSureInitialized();
+        return metadataOps;
+    }
+
     // Will be called when creating catalog(so when as replaying)
     // to add some default properties if missing.
     public void setDefaultPropsIfMissing(boolean isReplay) {
@@ -1257,6 +1273,9 @@ public abstract class ExternalCatalog
     }
 
     public PreExecutionAuthenticator getPreExecutionAuthenticator() {
+        if (null == preExecutionAuthenticator) {
+            throw new RuntimeException("PreExecutionAuthenticator is null, 
please confirm it is initialized.");
+        }
         return preExecutionAuthenticator;
     }
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
index 96405bbf1e9..b9896c89972 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
@@ -23,8 +23,6 @@ import org.apache.doris.cluster.ClusterNamespace;
 import org.apache.doris.common.Config;
 import org.apache.doris.common.DdlException;
 import org.apache.doris.common.ThreadPoolManager;
-import org.apache.doris.common.security.authentication.AuthenticationConfig;
-import org.apache.doris.common.security.authentication.HadoopAuthenticator;
 import 
org.apache.doris.common.security.authentication.PreExecutionAuthenticator;
 import org.apache.doris.common.util.Util;
 import org.apache.doris.datasource.CatalogProperty;
@@ -46,7 +44,6 @@ import org.apache.doris.transaction.TransactionManagerFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Strings;
-import lombok.Getter;
 import org.apache.commons.lang3.math.NumberUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.iceberg.hive.HiveCatalog;
@@ -78,8 +75,6 @@ public class HMSExternalCatalog extends ExternalCatalog {
 
     private static final int FILE_SYSTEM_EXECUTOR_THREAD_NUM = 16;
     private ThreadPoolExecutor fileSystemExecutor;
-    @Getter
-    private HadoopAuthenticator authenticator;
 
     private int hmsEventsBatchSizePerRpc = -1;
     private boolean enableHmsEventsIncrementalSync = false;
@@ -157,14 +152,15 @@ public class HMSExternalCatalog extends ExternalCatalog {
     }
 
     @Override
-    protected void initLocalObjectsImpl() {
-        this.preExecutionAuthenticator = new PreExecutionAuthenticator();
-        if (this.authenticator == null) {
-            AuthenticationConfig config = 
AuthenticationConfig.getKerberosConfig(getConfiguration());
-            this.authenticator = 
HadoopAuthenticator.getHadoopAuthenticator(config);
-            
this.preExecutionAuthenticator.setHadoopAuthenticator(authenticator);
+    protected synchronized void initPreExecutionAuthenticator() {
+        if (preExecutionAuthenticator == null) {
+            preExecutionAuthenticator = new 
PreExecutionAuthenticator(getConfiguration());
         }
+    }
 
+    @Override
+    protected void initLocalObjectsImpl() {
+        initPreExecutionAuthenticator();
         HiveConf hiveConf = null;
         JdbcClientConfig jdbcClientConfig = null;
         String hiveMetastoreType = 
catalogProperty.getOrDefault(HMSProperties.HIVE_METASTORE_TYPE, "");
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
index 410d4bb9de2..d1e635f50f4 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetadataOps.java
@@ -72,7 +72,7 @@ public class HiveMetadataOps implements ExternalMetadataOps {
         this(catalog, createCachedClient(hiveConf,
                 Math.max(MIN_CLIENT_POOL_SIZE, 
Config.max_external_cache_loader_thread_pool_size),
                 jdbcClientConfig));
-        hadoopAuthenticator = catalog.getAuthenticator();
+        hadoopAuthenticator = 
catalog.getPreExecutionAuthenticator().getHadoopAuthenticator();
         client.setHadoopAuthenticator(hadoopAuthenticator);
     }
 
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java
index 225e14af420..bd5997ac386 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalCatalog.java
@@ -18,8 +18,6 @@
 package org.apache.doris.datasource.iceberg;
 
 import org.apache.doris.common.ThreadPoolManager;
-import org.apache.doris.common.security.authentication.AuthenticationConfig;
-import org.apache.doris.common.security.authentication.HadoopAuthenticator;
 import 
org.apache.doris.common.security.authentication.PreExecutionAuthenticator;
 import org.apache.doris.datasource.ExternalCatalog;
 import org.apache.doris.datasource.InitCatalogLog;
@@ -55,14 +53,16 @@ public abstract class IcebergExternalCatalog extends 
ExternalCatalog {
     // Create catalog based on catalog type
     protected abstract void initCatalog();
 
+    @Override
+    protected synchronized void initPreExecutionAuthenticator() {
+        if (preExecutionAuthenticator == null) {
+            preExecutionAuthenticator = new 
PreExecutionAuthenticator(getConfiguration());
+        }
+    }
+
     @Override
     protected void initLocalObjectsImpl() {
-        preExecutionAuthenticator = new PreExecutionAuthenticator();
-        // TODO If the storage environment does not support Kerberos (such as 
s3),
-        //      there is no need to generate a simple authentication 
information anymore.
-        AuthenticationConfig config = 
AuthenticationConfig.getKerberosConfig(getConfiguration());
-        HadoopAuthenticator authenticator = 
HadoopAuthenticator.getHadoopAuthenticator(config);
-        preExecutionAuthenticator.setHadoopAuthenticator(authenticator);
+        initPreExecutionAuthenticator();
         initCatalog();
         IcebergMetadataOps ops = 
ExternalMetadataOperations.newIcebergMetadataOps(this, catalog);
         transactionManager = 
TransactionManagerFactory.createIcebergTransactionManager(ops);
diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java
index cd84093c747..56470a4d071 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergHadoopExternalCatalog.java
@@ -18,8 +18,6 @@
 package org.apache.doris.datasource.iceberg;
 
 import org.apache.doris.catalog.HdfsResource;
-import org.apache.doris.common.security.authentication.AuthenticationConfig;
-import org.apache.doris.common.security.authentication.HadoopAuthenticator;
 import org.apache.doris.datasource.CatalogProperty;
 import org.apache.doris.datasource.property.PropertyConverter;
 
@@ -57,22 +55,14 @@ public class IcebergHadoopExternalCatalog extends 
IcebergExternalCatalog {
 
         Configuration conf = getConfiguration();
         initS3Param(conf);
-
-        //create the authenticator first
-        if (preExecutionAuthenticator.getHadoopAuthenticator() == null) {
-            AuthenticationConfig config = 
AuthenticationConfig.getKerberosConfig(conf);
-            HadoopAuthenticator authenticator = 
HadoopAuthenticator.getHadoopAuthenticator(config);
-            preExecutionAuthenticator.setHadoopAuthenticator(authenticator);
-        }
-
         // initialize hadoop catalog
+        Map<String, String> catalogProperties = 
catalogProperty.getProperties();
+        String warehouse = 
catalogProperty.getHadoopProperties().get(CatalogProperties.WAREHOUSE_LOCATION);
+        HadoopCatalog hadoopCatalog = new HadoopCatalog();
+        hadoopCatalog.setConf(conf);
+        catalogProperties.put(CatalogProperties.WAREHOUSE_LOCATION, warehouse);
         try {
             this.catalog = preExecutionAuthenticator.execute(() -> {
-                Map<String, String> catalogProperties = 
catalogProperty.getProperties();
-                String warehouse = 
catalogProperty.getHadoopProperties().get(CatalogProperties.WAREHOUSE_LOCATION);
-                HadoopCatalog hadoopCatalog = new HadoopCatalog();
-                hadoopCatalog.setConf(conf);
-                catalogProperties.put(CatalogProperties.WAREHOUSE_LOCATION, 
warehouse);
                 hadoopCatalog.initialize(getName(), catalogProperties);
                 return hadoopCatalog;
             });


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to