This is an automated email from the ASF dual-hosted git repository.

yiguolei pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/branch-2.1 by this push:
     new 5ac3aee460c branch-2.1: [opt](max-compute) avoid repeated location 
path creation (#43383)
5ac3aee460c is described below

commit 5ac3aee460c43c6b5cd704df0358e6749bfea28c
Author: github-actions[bot] 
<41898282+github-actions[bot]@users.noreply.github.com>
AuthorDate: Sun Nov 10 10:11:37 2024 +0800

    branch-2.1: [opt](max-compute) avoid repeated location path creation 
(#43383)
    
    Cherry-picked from #43355
    
    Co-authored-by: Mingyu Chen (Rayner) <morning...@163.com>
---
 .../doris/datasource/maxcompute/source/MaxComputeScanNode.java    | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/source/MaxComputeScanNode.java
 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/source/MaxComputeScanNode.java
index 4ec4319c228..e0b84b0860e 100644
--- 
a/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/source/MaxComputeScanNode.java
+++ 
b/fe/fe-core/src/main/java/org/apache/doris/datasource/maxcompute/source/MaxComputeScanNode.java
@@ -81,8 +81,10 @@ import java.util.stream.Collectors;
 public class MaxComputeScanNode extends FileQueryScanNode {
 
     private final MaxComputeExternalTable table;
-    TableBatchReadSession tableBatchReadSession;
+    private TableBatchReadSession tableBatchReadSession;
     private Predicate filterPredicate;
+    private static final LocationPath ROW_OFFSET_PATH = new 
LocationPath("/row_offset", Maps.newHashMap());
+    private static final LocationPath BYTE_SIZE_PATH = new 
LocationPath("/byte_size", Maps.newHashMap());
 
     public MaxComputeScanNode(PlanNodeId id, TupleDescriptor desc, boolean 
needCheckColumnPriv) {
         this(id, desc, "MCScanNode", StatisticalType.MAX_COMPUTE_SCAN_NODE, 
needCheckColumnPriv);
@@ -441,7 +443,7 @@ public class MaxComputeScanNode extends FileQueryScanNode {
 
                 for (com.aliyun.odps.table.read.split.InputSplit split : 
assigner.getAllSplits()) {
                     MaxComputeSplit maxComputeSplit =
-                            new MaxComputeSplit(new LocationPath("/byte_size", 
Maps.newHashMap()),
+                            new MaxComputeSplit(BYTE_SIZE_PATH,
                                     ((IndexedInputSplit) 
split).getSplitIndex(), -1,
                                     mcCatalog.getSplitByteSize(),
                                     modificationTime, null,
@@ -464,7 +466,7 @@ public class MaxComputeScanNode extends FileQueryScanNode {
                             assigner.getSplitByRowOffset(offset, 
recordsPerSplit);
 
                     MaxComputeSplit maxComputeSplit =
-                            new MaxComputeSplit(new 
LocationPath("/row_offset", Maps.newHashMap()),
+                            new MaxComputeSplit(ROW_OFFSET_PATH,
                             offset, recordsPerSplit, totalRowCount, 
modificationTime, null,
                             Collections.emptyList());
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to