aboutsummaryrefslogtreecommitdiff
path: root/pw_allocator/public/pw_allocator/split_free_list_allocator.h
blob: 46fd9d9714615af61abd3d815f04c0e12b1935ce (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
// Copyright 2023 The Pigweed Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
//     https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
#pragma once

#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <optional>

#include "pw_allocator/allocator.h"
#include "pw_allocator/block.h"
#include "pw_bytes/alignment.h"
#include "pw_bytes/span.h"
#include "pw_result/result.h"
#include "pw_status/status.h"

namespace pw::allocator {

/// Block-independent base class of SplitFreeListAllocator.
///
/// This class contains static methods which do not depend on the template
/// parameters of ``SplitFreeListAllocator`` that are used to determine block
/// type. This allows the methods to be defined in a separate source file and
/// use macros that cannot be used in headers, e.g. PW_CHECK.
///
/// This class should not be used directly. Instead, see
/// ``SplitFreeListAllocator``.
class BaseSplitFreeListAllocator : public Allocator {
 protected:
  constexpr BaseSplitFreeListAllocator() = default;

  /// Crashes with an informational method that the given block is allocated.
  ///
  /// This method is meant to be called by ``SplitFreeListAllocator``s
  /// destructor. There must not be any outstanding allocations from an
  /// when it is destroyed.
  static void CrashOnAllocated(void* allocated);
};

/// This memory allocator uses a free list to track unallocated blocks, with a
/// twist: Allocations above or below a given threshold are taken from
/// respectively lower or higher addresses from within the allocator's memory
/// region. This has the effect of decreasing fragmentation as similarly-sized
/// allocations are grouped together.
///
/// NOTE!! Do NOT use memory returned from this allocator as the backing for
/// another allocator. If this is done, the `Query` method will incorrectly
/// think pointers returned by that alloator were created by this one, and
/// report that this allocator can de/reallocate them.
template <typename BlockType = Block<>>
class SplitFreeListAllocator : public BaseSplitFreeListAllocator {
 public:
  using Range = typename BlockType::Range;

  constexpr SplitFreeListAllocator() = default;
  ~SplitFreeListAllocator() override;

  // Not copyable.
  SplitFreeListAllocator(const SplitFreeListAllocator&) = delete;
  SplitFreeListAllocator& operator=(const SplitFreeListAllocator&) = delete;

  /// Sets the memory region to be used by this allocator, and the threshold at
  /// which allocations are considerd "large" or "small". Large and small
  /// allocations return lower and higher addresses, respectively.
  ///
  /// @param[in]  region              The memory region for this allocator.
  /// @param[in]  threshold           Allocations of this size of larger are
  ///                                 "large" and come from lower addresses.
  /// @retval     OK                  The allocator is initialized.
  /// @retval     INVALID_ARGUMENT    The memory region is null.
  /// @retval     RESOURCE_EXHAUSTED  The region is too small for `BlockType`.
  /// @retval     OUT_OF_RANGE        The region too large for `BlockType`.
  Status Init(ByteSpan region, size_t threshold);

  /// Returns an iterable range of blocks tracking the memory of this allocator.
  Range blocks() const;

 private:
  using ReverseRange = typename BlockType::ReverseRange;

  /// @copydoc Allocator::Dispatch
  Status DoQuery(const void* ptr, Layout layout) const override;

  /// @copydoc Allocator::Allocate
  void* DoAllocate(Layout layout) override;

  /// Allocate a large chunk of memory.
  ///
  /// Allocations larger than the threshold will be allocated from lower
  /// addresses. If a block needs to be fragmented, the returned pointer will be
  /// from the lower-addressed part of the block.
  ///
  /// @param[in]  layout      Describes the memory to be allocated.
  void* DoAllocateLarge(Layout layout);

  /// Allocate a small chunk of memory.
  ///
  /// Allocations smaller than the threshold will be allocated from higher
  /// addresses. If a block needs to be fragmented, the returned pointer will be
  /// from the higher-addressed part of the block.
  ///
  /// @param[in]  layout      Describes the memory to be allocated.
  void* DoAllocateSmall(Layout layout);

  /// @copydoc Allocator::Deallocate
  void DoDeallocate(void* ptr, Layout layout) override;

  /// @copydoc Allocator::Resize
  bool DoResize(void* ptr, Layout layout, size_t new_size) override;

  // Represents the entire memory region for this allocator.
  void* begin_ = nullptr;
  void* end_ = nullptr;

  // Represents the range of blocks that include free blocks. Blocks outside
  // this range are guaranteed to be in use. These are effectively cached values
  // used to speed up searching for free blocks.
  BlockType* first_free_ = nullptr;
  BlockType* last_free_ = nullptr;

  // The boundary between what are consider "small" and "large" allocations.
  size_t threshold_ = 0;
};

// Template method implementations

template <typename BlockType>
SplitFreeListAllocator<BlockType>::~SplitFreeListAllocator() {
  auto* begin = BlockType::FromUsableSpace(static_cast<std::byte*>(begin_));
  for (auto* block : Range(begin)) {
    if (block->Used()) {
      CrashOnAllocated(block);
    }
  }
}

template <typename BlockType>
typename BlockType::Range SplitFreeListAllocator<BlockType>::blocks() const {
  auto* begin = BlockType::FromUsableSpace(static_cast<std::byte*>(begin_));
  return Range(begin);
}

template <typename BlockType>
Status SplitFreeListAllocator<BlockType>::Init(ByteSpan region,
                                               size_t threshold) {
  if (region.data() == nullptr) {
    return Status::InvalidArgument();
  }
  if (BlockType::kCapacity < region.size()) {
    return Status::OutOfRange();
  }

  // Blocks need to be aligned. Find the first aligned address, and use as much
  // of the memory region as possible.
  auto addr = reinterpret_cast<uintptr_t>(region.data());
  auto aligned = AlignUp(addr, BlockType::kAlignment);
  Result<BlockType*> result = BlockType::Init(region.subspan(aligned - addr));
  if (!result.ok()) {
    return result.status();
  }

  // Initially, the block list is a single free block.
  BlockType* block = *result;
  begin_ = block->UsableSpace();
  end_ = reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(begin_) +
                                 block->InnerSize());
  first_free_ = block;
  last_free_ = block;

  threshold_ = threshold;
  return OkStatus();
}

template <typename BlockType>
Status SplitFreeListAllocator<BlockType>::DoQuery(const void* ptr,
                                                  Layout) const {
  return (ptr < begin_ || end_ <= ptr) ? Status::OutOfRange() : OkStatus();
}

template <typename BlockType>
void* SplitFreeListAllocator<BlockType>::DoAllocate(Layout layout) {
  if (begin_ == nullptr || layout.size() == 0) {
    return nullptr;
  }
  size_t size = layout.size();
  size_t alignment = std::max(layout.alignment(), BlockType::kAlignment);
  layout = Layout(size, alignment);
  return size < threshold_ ? DoAllocateSmall(layout) : DoAllocateLarge(layout);
}

template <typename BlockType>
void* SplitFreeListAllocator<BlockType>::DoAllocateSmall(Layout layout) {
  // Update the cached last free block.
  while (last_free_->Used() && first_free_ != last_free_) {
    last_free_ = last_free_->Prev();
  }
  // Search backwards for the first block that can hold this allocation.
  for (auto* block : ReverseRange(last_free_, first_free_)) {
    if (BlockType::AllocLast(block, layout.size(), layout.alignment()).ok()) {
      return block->UsableSpace();
    }
  }
  // No valid block found.
  return nullptr;
}

template <typename BlockType>
void* SplitFreeListAllocator<BlockType>::DoAllocateLarge(Layout layout) {
  // Update the cached first free block.
  while (first_free_->Used() && first_free_ != last_free_) {
    first_free_ = first_free_->Next();
  }
  // Search forwards for the first block that can hold this allocation.
  for (auto* block : Range(first_free_, last_free_)) {
    if (BlockType::AllocFirst(block, layout.size(), layout.alignment()).ok()) {
      // A new last free block may be split off the end of the allocated block.
      if (last_free_ <= block) {
        last_free_ = block->Last() ? block : block->Next();
      }
      return block->UsableSpace();
    }
  }
  // No valid block found.
  return nullptr;
}

template <typename BlockType>
void SplitFreeListAllocator<BlockType>::DoDeallocate(void* ptr, Layout) {
  // Do nothing if uninitialized or no memory block pointer.
  if (begin_ == nullptr || ptr < begin_ || end_ <= ptr) {
    return;
  }
  auto* block = BlockType::FromUsableSpace(static_cast<std::byte*>(ptr));
  block->CrashIfInvalid();

  // Free the block and merge it with its neighbors, if possible.
  BlockType::Free(block);

  // Update the first and/or last free block pointers.
  if (block < first_free_) {
    first_free_ = block;
  }
  if (block->Last() || last_free_ < block->Next()) {
    last_free_ = block;
  }
}

template <typename BlockType>
bool SplitFreeListAllocator<BlockType>::DoResize(void* ptr,
                                                 Layout layout,
                                                 size_t new_size) {
  // Fail to resize is uninitialized or invalid parameters.
  if (begin_ == nullptr || !DoQuery(ptr, layout).ok()) {
    return false;
  }

  // Ensure that this allocation came from this object.
  auto* block = BlockType::FromUsableSpace(static_cast<std::byte*>(ptr));
  block->CrashIfInvalid();

  bool next_is_first_free = !block->Last() && block->Next() == first_free_;
  bool next_is_last_free = !block->Last() && block->Next() == last_free_;
  if (!BlockType::Resize(block, new_size).ok()) {
    return false;
  }

  // The block after this one may have grown or shrunk.
  if (next_is_first_free) {
    first_free_ = block->Last() ? block : block->Next();
  }
  if (next_is_last_free) {
    last_free_ = block->Last() ? block : block->Next();
  }
  return true;
}

}  // namespace pw::allocator