This is an automated email from the ASF dual-hosted git repository.

kxiao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 8392e499835 [fix](hudi) fix wrong schema when query hudi table on obs 
(#26789)
8392e499835 is described below

commit 8392e499835829e9605015d567ed02679a8d9873
Author: Mingyu Chen <[email protected]>
AuthorDate: Sun Nov 12 11:10:30 2023 +0800

    [fix](hudi) fix wrong schema when query hudi table on obs (#26789)
---
 .../java/org/apache/doris/planner/external/hudi/HudiScanNode.java    | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java
index b0306ab19c6..4b8b5dcef0d 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/planner/external/hudi/HudiScanNode.java
@@ -26,6 +26,7 @@ import org.apache.doris.catalog.Type;
 import org.apache.doris.catalog.external.ExternalTable;
 import org.apache.doris.common.AnalysisException;
 import org.apache.doris.common.UserException;
+import org.apache.doris.common.util.S3Util;
 import org.apache.doris.datasource.hive.HivePartition;
 import org.apache.doris.planner.ListPartitionPrunerV2;
 import org.apache.doris.planner.PlanNodeId;
@@ -42,6 +43,7 @@ import org.apache.doris.thrift.THudiFileDesc;
 import org.apache.doris.thrift.TTableFormatFileDesc;
 
 import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
 import org.apache.avro.Schema;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
@@ -284,7 +286,8 @@ public class HudiScanNode extends HiveScanNode {
                     noLogsSplitNum.incrementAndGet();
                     String filePath = baseFile.getPath();
                     long fileSize = baseFile.getFileSize();
-                    splits.add(new FileSplit(new Path(filePath), 0, fileSize, 
fileSize, new String[0],
+                    splits.add(new 
FileSplit(S3Util.toScanRangeLocation(filePath, Maps.newHashMap()),
+                            0, fileSize, fileSize, new String[0],
                             partition.getPartitionValues()));
                 });
             } else {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to