This is an automated email from the ASF dual-hosted git repository.

panxiaolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 127987f307f [opt](arena) lazy memory allocation in arena (#36498)
127987f307f is described below

commit 127987f307f2815bd65f0be002738431b68b4ec8
Author: zhiqiang <seuhezhiqi...@163.com>
AuthorDate: Mon Jul 1 14:57:38 2024 +0800

    [opt](arena) lazy memory allocation in arena (#36498)
    
    Arena should not allocate memory in this constructor.
    
    After this pr merged, we shuold revert
    https://github.com/apache/doris/pull/36299
---
 be/src/vec/common/arena.h | 59 +++++++++++++++++++++++++++++++++++++----------
 1 file changed, 47 insertions(+), 12 deletions(-)

diff --git a/be/src/vec/common/arena.h b/be/src/vec/common/arena.h
index 4ab3ee4c606..65e8c1dfabe 100644
--- a/be/src/vec/common/arena.h
+++ b/be/src/vec/common/arena.h
@@ -84,20 +84,22 @@ private:
         size_t used() const { return pos - begin; }
     };
 
-    size_t growth_factor;
-    size_t linear_growth_threshold;
+    size_t growth_factor = 2;
+    size_t linear_growth_threshold = 128 * 1024 * 1024;
 
     /// Last contiguous chunk of memory.
     Chunk* head = nullptr;
-    size_t size_in_bytes;
+    size_t size_in_bytes = 0;
+    size_t _initial_size = 4096;
     // The memory used by all chunks, excluding head.
-    size_t _used_size_no_head;
+    size_t _used_size_no_head = 0;
 
     static size_t round_up_to_page_size(size_t s) { return (s + 4096 - 1) / 
4096 * 4096; }
 
     /// If chunks size is less than 'linear_growth_threshold', then use 
exponential growth, otherwise - linear growth
     ///  (to not allocate too much excessive memory).
-    size_t next_size(size_t min_next_size) const {
+    size_t next_size(size_t min_next_size) {
+        DCHECK(head != nullptr);
         size_t size_after_grow = 0;
 
         if (head->size() < linear_growth_threshold) {
@@ -120,12 +122,20 @@ private:
     }
 
     /// Add next contiguous chunk of memory with size not less than specified.
-    void NO_INLINE add_chunk(size_t min_size) {
+    void NO_INLINE _add_chunk(size_t min_size) {
+        DCHECK(head != nullptr);
         _used_size_no_head += head->used();
         head = new Chunk(next_size(min_size + pad_right), head);
         size_in_bytes += head->size();
     }
 
+    void _init_head_if_needed() {
+        if (UNLIKELY(head == nullptr)) {
+            head = new Chunk(_initial_size, nullptr);
+            size_in_bytes += head->size();
+        }
+    }
+
     friend class ArenaAllocator;
     template <size_t>
     friend class AlignedArenaAllocator;
@@ -135,15 +145,18 @@ public:
           size_t linear_growth_threshold_ = 128 * 1024 * 1024)
             : growth_factor(growth_factor_),
               linear_growth_threshold(linear_growth_threshold_),
-              head(new Chunk(initial_size_, nullptr)),
-              size_in_bytes(head->size()),
+              _initial_size(initial_size_),
               _used_size_no_head(0) {}
 
     ~Arena() { delete head; }
 
     /// Get piece of memory, without alignment.
     char* alloc(size_t size) {
-        if (UNLIKELY(head->pos + size > head->end)) add_chunk(size);
+        _init_head_if_needed();
+
+        if (UNLIKELY(head->pos + size > head->end)) {
+            _add_chunk(size);
+        }
 
         char* res = head->pos;
         head->pos += size;
@@ -153,6 +166,8 @@ public:
 
     /// Get piece of memory with alignment
     char* aligned_alloc(size_t size, size_t alignment) {
+        _init_head_if_needed();
+
         do {
             void* head_pos = head->pos;
             size_t space = head->end - head->pos;
@@ -165,7 +180,7 @@ public:
                 return res;
             }
 
-            add_chunk(size + alignment);
+            _add_chunk(size + alignment);
         } while (true);
     }
 
@@ -180,6 +195,8 @@ public:
          * the allocation it intended to roll back was indeed the last one.
       */
     void* rollback(size_t size) {
+        DCHECK(head != nullptr);
+
         head->pos -= size;
         ASAN_POISON_MEMORY_REGION(head->pos, size + pad_right);
         return head->pos;
@@ -208,6 +225,8 @@ public:
             return result;
         }
 
+        DCHECK(head != nullptr);
+
         // Extend an existing memory range with 'additional_bytes'.
 
         // This method only works for extending the last allocation. For lack 
of
@@ -291,6 +310,10 @@ public:
     * and only 128M can be reused when you apply for 4G memory again.
     */
     void clear() {
+        if (head == nullptr) {
+            return;
+        }
+
         if (head->prev) {
             delete head->prev;
             head->prev = nullptr;
@@ -303,9 +326,21 @@ public:
     /// Size of chunks in bytes.
     size_t size() const { return size_in_bytes; }
 
-    size_t used_size() const { return _used_size_no_head + head->used(); }
+    size_t used_size() const {
+        if (head == nullptr) {
+            return _used_size_no_head;
+        }
+
+        return _used_size_no_head + head->used();
+    }
+
+    size_t remaining_space_in_current_chunk() const {
+        if (head == nullptr) {
+            return 0;
+        }
 
-    size_t remaining_space_in_current_chunk() const { return 
head->remaining(); }
+        return head->remaining();
+    }
 };
 
 using ArenaPtr = std::shared_ptr<Arena>;


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@doris.apache.org
For additional commands, e-mail: commits-h...@doris.apache.org

Reply via email to