aboutsummaryrefslogtreecommitdiff
path: root/pw_allocator/block.cc
blob: bbf1a34b6dc013bb99a0da77bdc749466626ae24 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
// Copyright 2020 The Pigweed Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
//     https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.

#include "pw_allocator/block.h"

#include <cstring>

namespace pw::allocator {

Status Block::Init(const std::span<std::byte> region, Block** block) {
  // Ensure the region we're given is aligned and sized accordingly
  if (reinterpret_cast<uintptr_t>(region.data()) % alignof(Block) != 0) {
    return Status::InvalidArgument();
  }

  if (region.size() < sizeof(Block)) {
    return Status::InvalidArgument();
  }

  union {
    Block* block;
    std::byte* bytes;
  } aliased;
  aliased.bytes = region.data();

  // Make "next" point just past the end of this block; forming a linked list
  // with the following storage. Since the space between this block and the
  // next are implicitly part of the raw data, size can be computed by
  // subtracting the pointers.
  aliased.block->next = reinterpret_cast<Block*>(region.end());
  aliased.block->MarkLast();

  aliased.block->prev = nullptr;
  *block = aliased.block;
#if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
  (*block)->PoisonBlock();
#endif  // PW_ALLOCATOR_POISON_ENABLE
  return Status::Ok();
}

Status Block::Split(size_t head_block_inner_size, Block** new_block) {
  if (new_block == nullptr) {
    return Status::InvalidArgument();
  }

  // Don't split used blocks.
  // TODO: Relax this restriction? Flag to enable/disable this check?
  if (Used()) {
    return Status::FailedPrecondition();
  }

  // First round the head_block_inner_size up to a alignof(Block) bounary.
  // This ensures that the next block header is aligned accordingly.
  // Alignment must be a power of two, hence align()-1 will return the
  // remainder.
  auto align_bit_mask = alignof(Block) - 1;
  size_t aligned_head_block_inner_size = head_block_inner_size;
  if ((head_block_inner_size & align_bit_mask) != 0) {
    aligned_head_block_inner_size =
        (head_block_inner_size & ~align_bit_mask) + alignof(Block);
  }

  // (1) Are we trying to allocate a head block larger than the current head
  // block? This may happen because of the alignment above.
  if (aligned_head_block_inner_size > InnerSize()) {
    return Status::OutOfRange();
  }

  // (2) Does the resulting block have enough space to store the header?
  // TODO: What to do if the returned section is empty (i.e. remaining
  // size == sizeof(Block))?
  if (InnerSize() - aligned_head_block_inner_size <
      sizeof(Block) + 2 * PW_ALLOCATOR_POISON_OFFSET) {
    return Status::ResourceExhausted();
  }

  // Create the new block inside the current one.
  Block* new_next = reinterpret_cast<Block*>(
      // From the current position...
      reinterpret_cast<intptr_t>(this) +
      // skip past the current header...
      sizeof(*this) +
      // add the poison bytes before usable space ...
      PW_ALLOCATOR_POISON_OFFSET +
      // into the usable bytes by the new inner size...
      aligned_head_block_inner_size +
      // add the poison bytes after the usable space ...
      PW_ALLOCATOR_POISON_OFFSET);

  // If we're inserting in the middle, we need to update the current next
  // block to point to what we're inserting
  if (!Last()) {
    Next()->prev = new_next;
  }

  // Copy next verbatim so the next block also gets the "last"-ness
  new_next->next = next;
  new_next->prev = this;

  // Update the current block to point to the new head.
  next = new_next;

  *new_block = next;

#if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
  PoisonBlock();
  (*new_block)->PoisonBlock();
#endif  // PW_ALLOCATOR_POISON_ENABLE

  return Status::Ok();
}

Status Block::MergeNext() {
  // Anything to merge with?
  if (Last()) {
    return Status::OutOfRange();
  }

  // Is this or the next block in use?
  if (Used() || Next()->Used()) {
    return Status::FailedPrecondition();
  }

  // Simply enough, this block's next pointer becomes the next block's
  // next pointer. We then need to re-wire the "next next" block's prev
  // pointer to point back to us though.
  next = Next()->next;

  // Copying the pointer also copies the "last" status, so this is safe.
  if (!Last()) {
    Next()->prev = this;
  }

  return Status::Ok();
}

Status Block::MergePrev() {
  // We can't merge if we have no previous. After that though, merging with
  // the previous block is just MergeNext from the previous block.
  if (prev == nullptr) {
    return Status::OutOfRange();
  }

  // WARNING: This class instance will still exist, but technically be invalid
  // after this has been invoked. Be careful when doing anything with `this`
  // After doing the below.
  return prev->MergeNext();
}

// TODO(pwbug/234): Add stack tracing to locate which call to the heap operation
// caused the corruption.
// TODO: Add detailed information to log report and leave succinct messages
// in the crash message.
void Block::CrashIfInvalid() {
  switch (CheckStatus()) {
    case VALID:
      break;
    case MISALIGNED:
      PW_DCHECK(false, "The block at address %p is not aligned.", this);
      break;
    case NEXT_MISMATCHED:
      PW_DCHECK(false,
                "The 'prev' field in the next block (%p) does not match the "
                "address of the current block (%p).",
                Next()->Prev(),
                this);
      break;
    case PREV_MISMATCHED:
      PW_DCHECK(false,
                "The 'next' field in the previous block (%p) does not match "
                "the address of the current block (%p).",
                Prev()->Next(),
                this);
      break;
    case POISON_CORRUPTED:
      PW_DCHECK(
          false, "The poisoned pattern in the block at %p is corrupted.", this);
      break;
  }
}

// This function will return a Block::BlockStatus that is either VALID or
// indicates the reason why the Block is invalid. If the Block is invalid at
// multiple points, this function will only return one of the reasons.
Block::BlockStatus Block::CheckStatus() const {
  // Make sure the Block is aligned.
  if (reinterpret_cast<uintptr_t>(this) % alignof(Block) != 0) {
    return BlockStatus::MISALIGNED;
  }

  // Test if the prev/next pointer for this Block matches.
  if (!Last() && (this >= Next() || this != Next()->Prev())) {
    return BlockStatus::NEXT_MISMATCHED;
  }

  if (Prev() && (this <= Prev() || this != Prev()->Next())) {
    return BlockStatus::PREV_MISMATCHED;
  }

#if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
  if (!this->CheckPoisonBytes()) {
    return BlockStatus::POISON_CORRUPTED;
  }
#endif  // PW_ALLOCATOR_POISON_ENABLE
  return BlockStatus::VALID;
}

// Paint sizeof(void*) bytes before and after the usable space in Block as the
// randomized function pattern.
void Block::PoisonBlock() {
#if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
  std::byte* front_region = reinterpret_cast<std::byte*>(this) + sizeof(*this);
  memcpy(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET);

  std::byte* end_region =
      reinterpret_cast<std::byte*>(Next()) - PW_ALLOCATOR_POISON_OFFSET;
  memcpy(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET);
#endif  // PW_ALLOCATOR_POISON_ENABLE
}

bool Block::CheckPoisonBytes() const {
#if defined(PW_ALLOCATOR_POISON_ENABLE) && PW_ALLOCATOR_POISON_ENABLE
  std::byte* front_region = reinterpret_cast<std::byte*>(
      reinterpret_cast<intptr_t>(this) + sizeof(*this));
  if (std::memcmp(front_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) {
    return false;
  }
  std::byte* end_region = reinterpret_cast<std::byte*>(
      reinterpret_cast<intptr_t>(this->Next()) - PW_ALLOCATOR_POISON_OFFSET);
  if (std::memcmp(end_region, POISON_PATTERN, PW_ALLOCATOR_POISON_OFFSET)) {
    return false;
  }
#endif  // PW_ALLOCATOR_POISON_ENABLE
  return true;
}

}  // namespace pw::allocator