This is an automated email from the ASF dual-hosted git repository.

adonisling pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 3a0a79b4a0  [Improvement][SparkLoad] Use system env configs when users 
don't set env configs. (#21837)
3a0a79b4a0 is described below

commit 3a0a79b4a0f0897a143bbfbdee6be6477b2c6cd4
Author: Chuang Li <64473732+codecooke...@users.noreply.github.com>
AuthorDate: Wed Aug 30 15:14:40 2023 +0800

     [Improvement][SparkLoad] Use system env configs when users don't set env 
configs. (#21837)
---
 .../org/apache/doris/catalog/SparkResource.java    | 29 +++++++++++------
 .../apache/doris/catalog/SparkResourceTest.java    | 37 ++++++++++++++++++++++
 2 files changed, 56 insertions(+), 10 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java 
b/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java
index 98ebe54b96..453a50ad11 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/SparkResource.java
@@ -37,7 +37,12 @@ import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.Logger;
 
 import java.io.File;
+import java.util.Collections;
 import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 /**
  * Spark resource for etl or query.
@@ -159,16 +164,20 @@ public class SparkResource extends Resource {
     }
 
     public Map<String, String> getEnvConfigsWithoutPrefix() {
-        Map<String, String> envConfig = Maps.newHashMap();
-        if (envConfigs != null) {
-            for (Map.Entry<String, String> entry : envConfigs.entrySet()) {
-                if (entry.getKey().startsWith(ENV_PREFIX)) {
-                    String key = entry.getKey().substring(ENV_PREFIX.length());
-                    envConfig.put(key, entry.getValue());
-                }
-            }
-        }
-        return envConfig;
+        return Stream.concat(
+                        getSystemEnvConfigs().entrySet().stream(),
+                        
Optional.ofNullable(envConfigs).orElse(Collections.emptyMap()).entrySet().stream()
+                )
+                .filter(entry -> entry.getKey().startsWith(ENV_PREFIX))
+                .collect(Collectors.toMap(
+                        entry -> entry.getKey().substring(ENV_PREFIX.length()),
+                        Entry::getValue,
+                        (oldValue, newValue) -> newValue
+                ));
+    }
+
+    public Map<String, String> getSystemEnvConfigs() {
+        return System.getenv();
     }
 
     public Pair<String, String> getYarnResourcemanagerAddressPair() {
diff --git 
a/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java 
b/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java
index 0184acc5c2..ac047ed17f 100644
--- a/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java
+++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/SparkResourceTest.java
@@ -36,6 +36,8 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.util.Collections;
+import java.util.HashMap;
 import java.util.Map;
 
 public class SparkResourceTest {
@@ -195,4 +197,39 @@ public class SparkResourceTest {
         stmt.analyze(analyzer);
         Resource.fromStmt(stmt);
     }
+
+    @Test
+    public void testGetEnvConfigsWithoutPrefix() {
+        Map<String, String> envConfigs = new HashMap<>();
+        envConfigs.put("env.testKey2", "testValue2");
+        SparkResource resource = new SparkResource("test", Maps.newHashMap(), 
null, null, Maps.newHashMap(),
+                envConfigs) {
+            @Override
+            public Map<String, String> getSystemEnvConfigs() {
+                return Collections.singletonMap("env.testKey1", "testValue1");
+            }
+        };
+        Map<String, String> expected1 = new HashMap<>();
+        expected1.put("testKey1", "testValue1");
+        expected1.put("testKey2", "testValue2");
+        Map<String, String> actual1 = resource.getEnvConfigsWithoutPrefix();
+        Assert.assertEquals(expected1, actual1);
+
+        Map<String, String> envConfigs2 = new HashMap<>();
+        envConfigs2.put("env.testKey1", "testValue3");
+        envConfigs2.put("env.testKey2", "testValue2");
+        SparkResource resource2 = new SparkResource("test2", 
Maps.newHashMap(), null, null, Maps.newHashMap(),
+                envConfigs2) {
+            @Override
+            public Map<String, String> getSystemEnvConfigs() {
+                return Collections.singletonMap("env.testKey1", "testValue1");
+            }
+        };
+        Map<String, String> expected2 = new HashMap<>();
+        expected2.put("testKey1", "testValue3");
+        expected2.put("testKey2", "testValue2");
+        Map<String, String> actual2 = resource2.getEnvConfigsWithoutPrefix();
+        Assert.assertEquals(expected2, actual2);
+    }
+
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to