GRPC Core  9.0.0
arena.h
Go to the documentation of this file.
1 /*
2  *
3  * Copyright 2017 gRPC authors.
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  *
17  */
18 
19 // \file Arena based allocator
20 // Allows very fast allocation of memory, but that memory cannot be freed until
21 // the arena as a whole is freed
22 // Tracks the total memory allocated against it, so that future arenas can
23 // pre-allocate the right amount of memory
24 
25 #ifndef GRPC_CORE_LIB_GPRPP_ARENA_H
26 #define GRPC_CORE_LIB_GPRPP_ARENA_H
27 
29 
30 #include <new>
31 #include <utility>
32 
33 #include <grpc/support/alloc.h>
34 #include <grpc/support/sync.h>
35 
36 #include "src/core/lib/gpr/alloc.h"
39 
40 #include <stddef.h>
41 
42 namespace grpc_core {
43 
44 class Arena {
45  public:
46  // Create an arena, with \a initial_size bytes in the first allocated buffer.
47  static Arena* Create(size_t initial_size);
48 
49  // Create an arena, with \a initial_size bytes in the first allocated buffer,
50  // and return both a void pointer to the returned arena and a void* with the
51  // first allocation.
52  static std::pair<Arena*, void*> CreateWithAlloc(size_t initial_size,
53  size_t alloc_size);
54 
55  // Destroy an arena, returning the total number of bytes allocated.
56  size_t Destroy();
57  // Allocate \a size bytes from the arena.
58  void* Alloc(size_t size) {
59  static constexpr size_t base_size =
61  size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(size);
62  size_t begin = total_used_.FetchAdd(size, MemoryOrder::RELAXED);
63  if (begin + size <= initial_zone_size_) {
64  return reinterpret_cast<char*>(this) + base_size + begin;
65  } else {
66  return AllocZone(size);
67  }
68  }
69 
70  // TODO(roth): We currently assume that all callers need alignment of 16
71  // bytes, which may be wrong in some cases. When we have time, we should
72  // change this to instead use the alignment of the type being allocated by
73  // this method.
74  template <typename T, typename... Args>
75  T* New(Args&&... args) {
76  T* t = static_cast<T*>(Alloc(sizeof(T)));
77  new (t) T(std::forward<Args>(args)...);
78  return t;
79  }
80 
81  private:
82  struct Zone {
83  Zone* prev;
84  };
85 
86  // Initialize an arena.
87  // Parameters:
88  // initial_size: The initial size of the whole arena in bytes. These bytes
89  // are contained within 'zone 0'. If the arena user ends up requiring more
90  // memory than the arena contains in zone 0, subsequent zones are allocated
91  // on demand and maintained in a tail-linked list.
92  //
93  // initial_alloc: Optionally, construct the arena as though a call to
94  // Alloc() had already been made for initial_alloc bytes. This provides a
95  // quick optimization (avoiding an atomic fetch-add) for the common case
96  // where we wish to create an arena and then perform an immediate
97  // allocation.
98  explicit Arena(size_t initial_size, size_t initial_alloc = 0)
99  : total_used_(initial_alloc), initial_zone_size_(initial_size) {}
100 
101  ~Arena();
102 
103  void* AllocZone(size_t size);
104 
105  // Keep track of the total used size. We use this in our call sizing
106  // hysteresis.
107  Atomic<size_t> total_used_;
108  size_t initial_zone_size_;
109  gpr_spinlock arena_growth_spinlock_ = GPR_SPINLOCK_STATIC_INITIALIZER;
110  // If the initial arena allocation wasn't enough, we allocate additional zones
111  // in a reverse linked list. Each additional zone consists of (1) a pointer to
112  // the zone added before this zone (null if this is the first additional zone)
113  // and (2) the allocated memory. The arena itself maintains a pointer to the
114  // last zone; the zone list is reverse-walked during arena destruction only.
115  Zone* last_zone_ = nullptr;
116 };
117 
118 } // namespace grpc_core
119 
120 #endif /* GRPC_CORE_LIB_GPRPP_ARENA_H */
Definition: arena.h:44
static Arena * Create(size_t initial_size)
Definition: arena.cc:63
T * New(Args &&... args)
Definition: arena.h:75
size_t Destroy()
Definition: arena.cc:77
static std::pair< Arena *, void * > CreateWithAlloc(size_t initial_size, size_t alloc_size)
Definition: arena.cc:67
void * Alloc(size_t size)
Definition: arena.h:58
T FetchAdd(Arg arg, MemoryOrder order=MemoryOrder::SEQ_CST)
Definition: atomic.h:71
Round Robin Policy.
Definition: backend_metric.cc:24
#define GPR_SPINLOCK_STATIC_INITIALIZER
Definition: spinlock.h:37
#define GPR_ROUND_UP_TO_ALIGNMENT_SIZE(x)
Given a size, round up to the next multiple of sizeof(void*).
Definition: alloc.h:25
Definition: spinlock.h:28