µOS++ IIIe Reference 7.0.0
The third edition of µOS++, a POSIX inspired open source framework, written in C++
Loading...
Searching...
No Matches
first-fit-top.cpp
Go to the documentation of this file.
1/*
2 * This file is part of the µOS++ distribution.
3 * (https://github.com/micro-os-plus)
4 * Copyright (c) 2016 Liviu Ionescu.
5 *
6 * Permission is hereby granted, free of charge, to any person
7 * obtaining a copy of this software and associated documentation
8 * files (the "Software"), to deal in the Software without
9 * restriction, including without limitation the rights to use,
10 * copy, modify, merge, publish, distribute, sublicense, and/or
11 * sell copies of the Software, and to permit persons to whom
12 * the Software is furnished to do so, subject to the following
13 * conditions:
14 *
15 * The above copyright notice and this permission notice shall be
16 * included in all copies or substantial portions of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
20 * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
22 * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
23 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25 * OTHER DEALINGS IN THE SOFTWARE.
26 */
27
29#include <memory>
30
31// ----------------------------------------------------------------------------
32
33#if defined(__clang__)
34#pragma clang diagnostic ignored "-Wc++98-compat"
35#endif
36
37// ----------------------------------------------------------------------------
38
39namespace os
40{
41 namespace memory
42 {
43
44 // ========================================================================
45
47 {
48 trace::printf ("first_fit_top::%s() @%p %s\n", __func__, this, name ());
49 }
50
51 void
52 first_fit_top::internal_construct_ (void* addr, std::size_t bytes)
53 {
54 assert(bytes > chunk_minsize);
55
56 arena_addr_ = addr;
57 total_bytes_ = bytes;
58
59 // Align address for first chunk.
60 void* res;
61 // Possibly adjust the last two parameters.
62 res = std::align (chunk_align, chunk_minsize, arena_addr_, total_bytes_);
63 // std::align() will fail if it cannot fit the min chunk.
64 if (res == nullptr)
65 {
66 assert(res != nullptr);
67 }
68 assert((total_bytes_ % chunk_align) == 0);
69
71 }
72
73 void
75 {
76 // Fill it with the first chunk.
77 chunk_t* chunk = reinterpret_cast<chunk_t*> (arena_addr_);
78 // Entire arena is a big free chunk.
79 chunk->size = total_bytes_;
80 // Mark the end of the list with a null pointer.
81 chunk->next = nullptr;
82
83 allocated_bytes_ = 0;
84 max_allocated_bytes_ = 0;
85 free_bytes_ = total_bytes_;
86 allocated_chunks_ = 0;
87 free_chunks_ = 1;
88
89 // Remember first chunk as list head.
90 free_list_ = chunk;
91 }
92
93 void
95 {
96#if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
97 trace::printf ("first_fit_top::%s() @%p %s\n", __func__, this, name ());
98#endif
99
100 internal_reset_ ();
101 }
102
103#pragma GCC diagnostic push
104// Needed because 'alignment' is used only in trace calls.
105#pragma GCC diagnostic ignored "-Wunused-parameter"
106
123 void*
124 first_fit_top::do_allocate (std::size_t bytes, std::size_t alignment)
125 {
126 std::size_t block_padding = calc_block_padding (alignment);
127 std::size_t alloc_size = rtos::memory::align_size (bytes, chunk_align);
128 alloc_size += block_padding;
129 alloc_size += chunk_offset;
130
131 std::size_t block_minchunk = calc_block_minchunk (block_padding);
132 alloc_size = os::rtos::memory::max (alloc_size, block_minchunk);
133
134 chunk_t* chunk;
135
136 while (true)
137 {
138 chunk_t* prev_chunk = free_list_;
139 chunk = prev_chunk;
140
141 while (chunk)
142 {
143 int rem = static_cast<int> (chunk->size - alloc_size);
144 if (rem >= 0)
145 {
146 if ((static_cast<std::size_t> (rem)) >= block_minchunk)
147 {
148 // Found a chunk that is much larger than required size
149 // (at least one more chunk is available);
150 // break it into two chunks and return the second one.
151
152 chunk->size = static_cast<std::size_t> (rem);
153 chunk =
154 reinterpret_cast<chunk_t *> (reinterpret_cast<char *> (chunk)
155 + rem);
156 chunk->size = alloc_size;
157
158 // Splitting one chunk creates one more chunk.
159 ++free_chunks_;
160 }
161 else
162 {
163 // Found a chunk that is exactly the size or slightly
164 // larger than the requested size; return this chunk.
165
166 if (prev_chunk == chunk)
167 {
168 // This implies p==r==free_list, i.e. the list head.
169 // The next chunk becomes the first list element.
170 free_list_ = chunk->next;
171
172 // If this was the last chunk, the free list is empty.
173 }
174 else
175 {
176 // Normal case. Remove it from the free_list.
177 prev_chunk->next = chunk->next;
178 }
179 }
180 break;
181 }
182 prev_chunk = chunk;
183 chunk = chunk->next;
184 }
185
186 if (chunk != nullptr)
187 {
188 break;
189 }
190
191 if (out_of_memory_handler_ == nullptr)
192 {
193#if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
194 trace::printf ("first_fit_top::%s(%u,%u)=0 @%p %s\n", __func__,
195 bytes, alignment, this, name ());
196#endif
197
198 return nullptr;
199 }
200
201#if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
202 trace::printf ("first_fit_top::%s(%u,%u) @%p %s out of memory\n",
203 __func__, bytes, alignment, this, name ());
204#endif
205 out_of_memory_handler_ ();
206
207 // If the handler returned, assume it freed some memory
208 // and try again to allocate.
209 }
210
211 void* aligned_payload = internal_align_ (chunk, bytes, alignment);
212
213#if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
214 trace::printf ("first_fit_top::%s(%u,%u)=%p,%u @%p %s\n", __func__, bytes,
215 alignment, aligned_payload, alloc_size, this, name ());
216#endif
217
218 return aligned_payload;
219 }
220
235 void
236 first_fit_top::do_deallocate (void* addr, std::size_t bytes,
237 std::size_t alignment) noexcept
238 {
239#if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
240 trace::printf ("first_fit_top::%s(%p,%u,%u) @%p %s\n", __func__, addr,
241 bytes, alignment, this, name ());
242#endif
243
244 // The address must be inside the arena; no exceptions.
245 if ((addr < arena_addr_)
246 || (addr > (static_cast<char*> (arena_addr_) + total_bytes_)))
247 {
248 assert(false);
249 return;
250 }
251
252 // Compute the chunk address from the user address.
253 chunk_t* chunk = reinterpret_cast<chunk_t *> (static_cast<char *> (addr)
254 - chunk_offset);
255
256 // If the block was aligned, the offset appears as size; adjust back.
257 if (static_cast<std::ptrdiff_t> (chunk->size) < 0)
258 {
259 chunk = reinterpret_cast<chunk_t *> (reinterpret_cast<char *> (chunk)
260 + static_cast<std::ptrdiff_t> (chunk->size));
261 }
262
263 if (bytes)
264 {
265 // If size is known, validate.
266 // (when called from free(), the size is not known).
267 if (bytes + chunk_offset > chunk->size)
268 {
269 assert(false);
270 return;
271 }
272 }
273
274 // Update statistics.
275 // What is subtracted from allocated is added to free.
276 internal_decrease_allocated_statistics (chunk->size);
277
278 // If the free list is empty, create it with the current chunk, alone.
279 if (free_list_ == nullptr)
280 {
281 // Mark the end of the list with a null pointer.
282 chunk->next = nullptr;
283
284 // The chunk becomes the first list element.
285 free_list_ = chunk;
286 assert(free_chunks_ == 1);
287
288 return;
289 }
290
291 // The free list exists; is the chunk before the list head?
292 if (chunk < free_list_)
293 {
294 // Is the chunk *right* before the list head?
295 if (reinterpret_cast<char *> (chunk) + chunk->size
296 == reinterpret_cast<char *> (free_list_))
297 {
298 // Coalesce chunk to the list head.
299 chunk->size += free_list_->size;
300 chunk->next = free_list_->next; // May be nullptr
301
302 // Coalescing means one less chunk.
303 --free_chunks_;
304 }
305 else
306 {
307 // Insert before the list head.
308 chunk->next = free_list_;
309 }
310 // The chunk becomes the new list head.
311 free_list_ = chunk;
312
313 return;
314 }
315
316 // Walk the free list to find the place to insert,
317 // (the list must be ordered by addresses).
318 // Warning: not deterministic!
319
320 chunk_t* next_chunk = free_list_;
321 chunk_t* prev_chunk;
322 do
323 {
324 prev_chunk = next_chunk;
325 next_chunk = next_chunk->next;
326 }
327 while (next_chunk != nullptr && next_chunk <= chunk);
328
329 // Now prev_chunk <= chunk and either next_chunk == nullptr or
330 // next_chunk > chunk.
331 // Try to merge with chunks immediately before/after it.
332
333 if (reinterpret_cast<char *> (prev_chunk) + prev_chunk->size
334 == reinterpret_cast<char *> (chunk))
335 {
336 // Chunk to be freed is adjacent to a free chunk before it.
337 prev_chunk->size += chunk->size;
338
339 // Coalescing means one less chunk.
340 --free_chunks_;
341
342 // If the merged chunk is also adjacent to the chunk after it,
343 // merge again. Does not match if next_chunk == nullptr.
344 if (reinterpret_cast<char *> (prev_chunk) + prev_chunk->size
345 == reinterpret_cast<char *> (next_chunk))
346 {
347 prev_chunk->size += next_chunk->size;
348 prev_chunk->next = next_chunk->next;
349
350 // Coalescing means one less chunk.
351 --free_chunks_;
352 }
353 }
354 else if (reinterpret_cast<char *> (prev_chunk) + prev_chunk->size
355 > reinterpret_cast<char *> (chunk))
356 {
357 // Already freed.
358
359 // Revert statistics.
360 // What is subtracted from free is added to allocated.
361 allocated_bytes_ += chunk->size;
362 free_bytes_ -= chunk->size;
363 ++allocated_chunks_;
364 --free_chunks_;
365
366 trace::printf ("first_fit_top::%s(%p,%u,%u) @%p %s already freed\n",
367 __func__, addr, bytes, alignment, this, name ());
368
369 return;
370 }
371 // Does not match if next_chunk == nullptr.
372 else if (reinterpret_cast<char *> (chunk) + chunk->size
373 == reinterpret_cast<char *> (next_chunk))
374 {
375 // The chunk to be freed is adjacent to a free chunk after it.
376 chunk->size += next_chunk->size;
377 chunk->next = next_chunk->next; // May be nullptr.
378 prev_chunk->next = chunk;
379
380 // Coalescing means one less chunk.
381 --free_chunks_;
382 }
383 else
384 {
385 // Not adjacent to any chunk. Just insert it.
386 // The result is a new fragment. Not great...
387 chunk->next = next_chunk; // May be nullptr.
388 prev_chunk->next = chunk;
389 }
390 }
391
392 std::size_t
393 first_fit_top::do_max_size (void) const noexcept
394 {
395 return total_bytes_;
396 }
397
398 void*
399 first_fit_top::internal_align_ (chunk_t* chunk, std::size_t bytes,
400 std::size_t alignment)
401 {
402 // Update statistics.
403 // The value subtracted from free is added to allocated.
405
406 // Compute pointer to payload area.
407 char* payload = reinterpret_cast<char *> (chunk) + chunk_offset;
408
409 // Align it to user provided alignment.
410 void* aligned_payload = payload;
411 std::size_t aligned_size = chunk->size - chunk_offset;
412
413 void* res;
414 res = std::align (alignment, bytes, aligned_payload, aligned_size);
415 if (res == nullptr)
416 {
417 assert(res != nullptr);
418 }
419
420 // Compute the possible alignment offset.
421 std::ptrdiff_t offset = static_cast<char *> (aligned_payload) - payload;
422 if (offset)
423 {
424 // If non-zero, store it in the gap left by alignment in the
425 // chunk header.
426
427 chunk_t* adj_chunk =
428 reinterpret_cast<chunk_t *> (static_cast<char *> (aligned_payload)
429 - chunk_offset);
430 adj_chunk->size = static_cast<std::size_t> (-offset);
431 }
432
433 assert(
434 (reinterpret_cast<uintptr_t> (aligned_payload) & (alignment - 1))
435 == 0);
436
437 return aligned_payload;
438 }
439
440#pragma GCC diagnostic pop
441
442 // --------------------------------------------------------------------------
443 } /* namespace memory */
444} /* namespace os */
445
446// ----------------------------------------------------------------------------
virtual ~first_fit_top() override
Destruct the memory resource object instance.
virtual void do_reset(void) noexcept override
Implementation of the function to reset the memory manager.
void * internal_align_(chunk_t *chunk, std::size_t bytes, std::size_t alignment)
Internal function to align a chunk.
virtual void do_deallocate(void *addr, std::size_t bytes, std::size_t alignment) noexcept override
Implementation of the memory deallocator.
void internal_reset_(void) noexcept
Internal function to reset the memory resource.
virtual void * do_allocate(std::size_t bytes, std::size_t alignment) override
Implementation of the memory allocator.
virtual std::size_t do_max_size(void) const noexcept override
Implementation of the function to get max size.
void internal_construct_(void *addr, std::size_t bytes)
Internal function to construct the memory resource.
const char * name(void) const
Get object name.
Definition os-decls.h:774
void internal_increase_allocated_statistics(std::size_t bytes) noexcept
Update statistics after allocation.
int printf(const char *format,...)
Write a formatted string to the trace device.
Definition trace.cpp:74
constexpr std::size_t align_size(std::size_t size, std::size_t align) noexcept
Helper function to align size values.
Definition os-memory.h:97
constexpr std::size_t max(std::size_t a, std::size_t b)
Definition os-memory.h:85
System namespace.