This is an automated email from the ASF dual-hosted git repository. diwu pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/doris-spark-connector.git
The following commit(s) were added to refs/heads/master by this push: new a599c56 [fix](spark load) fix ut and add build spark load ci (#272) a599c56 is described below commit a599c56c063ac8f57677c3a9c4ae7ccb23e51f53 Author: gnehil <adamlee...@gmail.com> AuthorDate: Tue Feb 25 17:04:00 2025 +0800 [fix](spark load) fix ut and add build spark load ci (#272) --- .github/workflows/build-extension.yml | 4 ++++ .../java/org/apache/doris/common/DppResult.java | 25 +--------------------- .../java/org/apache/doris/config/JobConfig.java | 2 +- .../org/apache/doris/config/JobConfigTest.java | 1 + .../org/apache/doris/load/job/PullLoaderTest.java | 3 --- 5 files changed, 7 insertions(+), 28 deletions(-) diff --git a/.github/workflows/build-extension.yml b/.github/workflows/build-extension.yml index 707e311..008bc35 100644 --- a/.github/workflows/build-extension.yml +++ b/.github/workflows/build-extension.yml @@ -69,3 +69,7 @@ jobs: - name: Build spark connector 3.5 run: | cd spark-doris-connector && mvn clean install ${MVN_OPT} -Pspark-3.5 -pl spark-doris-connector-spark-3.5 -am + + - name: Build spark load + run: | + cd spark-load && mvn clean package ${MVN_OPT} -Pspark3,scala_2.12 diff --git a/spark-load/spark-load-common/src/main/java/org/apache/doris/common/DppResult.java b/spark-load/spark-load-common/src/main/java/org/apache/doris/common/DppResult.java index f839c70..b25ba39 100644 --- a/spark-load/spark-load-common/src/main/java/org/apache/doris/common/DppResult.java +++ b/spark-load/spark-load-common/src/main/java/org/apache/doris/common/DppResult.java @@ -59,7 +59,7 @@ public class DppResult implements Serializable { public long scannedBytes; public DppResult() { - isSuccess = true; + isSuccess = false; failedReason = ""; scannedRows = 0; fileNumber = 0; @@ -71,27 +71,4 @@ public class DppResult implements Serializable { scannedBytes = 0; } - // @JsonCreator - // public DppResult(@JsonProperty(value = "is_success", required = true) boolean isSuccess, - // @JsonProperty(value = "failed_reason", required = true) String failedReason, - // @JsonProperty(value = "scanned_rows", required = true) long scannedRows, - // @JsonProperty(value = "file_number", required = true) long fileNumber, - // @JsonProperty(value = "file_size", required = true) long fileSize, - // @JsonProperty(value = "normal_rows", required = true) long normalRows, - // @JsonProperty(value = "abnormal_rows", required = true) long abnormalRows, - // @JsonProperty(value = "unselect_rows", required = true) long unselectRows, - // @JsonProperty("partial_abnormal_rows") String partialAbnormalRows, - // @JsonProperty("scanned_bytes") long scannedBytes) { - // this.isSuccess = isSuccess; - // this.failedReason = failedReason; - // this.scannedRows = scannedRows; - // this.fileNumber = fileNumber; - // this.fileSize = fileSize; - // this.normalRows = normalRows; - // this.abnormalRows = abnormalRows; - // this.unselectRows = unselectRows; - // this.partialAbnormalRows = partialAbnormalRows; - // this.scannedBytes = scannedBytes; - // } - } diff --git a/spark-load/spark-load-core/src/main/java/org/apache/doris/config/JobConfig.java b/spark-load/spark-load-core/src/main/java/org/apache/doris/config/JobConfig.java index 831e8ac..7c2ce9c 100644 --- a/spark-load/spark-load-core/src/main/java/org/apache/doris/config/JobConfig.java +++ b/spark-load/spark-load-core/src/main/java/org/apache/doris/config/JobConfig.java @@ -240,7 +240,7 @@ public class JobConfig { if (hadoopProperties == null || hadoopProperties.isEmpty()) { return; } - if (!workingDir.startsWith("s3")) { + if (StringUtils.isNoneBlank(workingDir) && !workingDir.startsWith("s3")) { if (!hadoopProperties.containsKey("fs.defaultFS")) { throw new IllegalArgumentException("fs.defaultFS is empty"); } diff --git a/spark-load/spark-load-core/src/test/java/org/apache/doris/config/JobConfigTest.java b/spark-load/spark-load-core/src/test/java/org/apache/doris/config/JobConfigTest.java index c4e6f00..f265838 100644 --- a/spark-load/spark-load-core/src/test/java/org/apache/doris/config/JobConfigTest.java +++ b/spark-load/spark-load-core/src/test/java/org/apache/doris/config/JobConfigTest.java @@ -178,6 +178,7 @@ public class JobConfigTest { public void checkHadoopProperties() throws IOException { JobConfig jobConfig = new JobConfig(); + jobConfig.setWorkingDir("hdfs:///xxx"); Map<String, String> hadoopProperties = new HashMap<>(); jobConfig.setHadoopProperties(hadoopProperties); diff --git a/spark-load/spark-load-core/src/test/java/org/apache/doris/load/job/PullLoaderTest.java b/spark-load/spark-load-core/src/test/java/org/apache/doris/load/job/PullLoaderTest.java index a0c56a6..010d655 100644 --- a/spark-load/spark-load-core/src/test/java/org/apache/doris/load/job/PullLoaderTest.java +++ b/spark-load/spark-load-core/src/test/java/org/apache/doris/load/job/PullLoaderTest.java @@ -133,9 +133,6 @@ class PullLoaderTest { Files.write(file3.toPath(), Collections.singletonList("test")); assertThrows(SparkLoadException.class, () -> ((Recoverable)loader).canBeRecovered()); - Files.write(file3.toPath(), Collections.singletonList("{}")); - assertThrows(SparkLoadException.class, () -> ((Recoverable)loader).canBeRecovered()); - Files.write(file3.toPath(), Collections.singletonList("{\"is_success\":false,\"failed_reason\":\"\"," + "\"scanned_rows\":0,\"file_number\":0,\"file_size\":0,\"normal_rows\":0,\"abnormal_rows\":0," + "\"unselect_rows\":0,\"partial_abnormal_rows\":\"\",\"scanned_bytes\":0}\n")); --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org For additional commands, e-mail: commits-h...@doris.apache.org