µOS++ IIIe Reference  v6.3.15
“Perfekt ist nicht gut genug”
The third edition of µOS++, a POSIX inspired open source system, written in C++.
first-fit-top.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of the µOS++ distribution.
3  * (https://github.com/micro-os-plus)
4  * Copyright (c) 2016 Liviu Ionescu.
5  *
6  * Permission is hereby granted, free of charge, to any person
7  * obtaining a copy of this software and associated documentation
8  * files (the "Software"), to deal in the Software without
9  * restriction, including without limitation the rights to use,
10  * copy, modify, merge, publish, distribute, sublicense, and/or
11  * sell copies of the Software, and to permit persons to whom
12  * the Software is furnished to do so, subject to the following
13  * conditions:
14  *
15  * The above copyright notice and this permission notice shall be
16  * included in all copies or substantial portions of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
20  * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
22  * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
23  * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
25  * OTHER DEALINGS IN THE SOFTWARE.
26  */
27 
29 #include <memory>
30 
31 // ----------------------------------------------------------------------------
32 
33 namespace os
34 {
35  namespace memory
36  {
37 
38  // ========================================================================
39 
44  {
45  trace::printf ("first_fit_top::%s() @%p %s\n", __func__, this, name ());
46  }
47 
51  void
52  first_fit_top::internal_construct_ (void* addr, std::size_t bytes)
53  {
54  assert(bytes > chunk_minsize);
55 
56  arena_addr_ = addr;
57  total_bytes_ = bytes;
58 
59  // Align address for first chunk.
60  void* res;
61  // Possibly adjust the last two parameters.
62  res = std::align (chunk_align, chunk_minsize, arena_addr_, total_bytes_);
63  // std::align() will fail if it cannot fit the min chunk.
64  if (res == nullptr)
65  {
66  assert(res != nullptr);
67  }
68  assert((total_bytes_ % chunk_align) == 0);
69 
70  internal_reset_ ();
71  }
72 
76  void
78  {
79  // Fill it with the first chunk.
80  chunk_t* chunk = reinterpret_cast<chunk_t*> (arena_addr_);
81  // Entire arena is a big free chunk.
82  chunk->size = total_bytes_;
83  // Mark the end of the list with a null pointer.
84  chunk->next = nullptr;
85 
86  allocated_bytes_ = 0;
87  max_allocated_bytes_ = 0;
88  free_bytes_ = total_bytes_;
89  allocated_chunks_ = 0;
90  free_chunks_ = 1;
91 
92  // Remember first chunk as list head.
93  free_list_ = chunk;
94  }
95 
99  void
100  first_fit_top::do_reset (void) noexcept
101  {
102 #if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
103  trace::printf ("first_fit_top::%s() @%p %s\n", __func__, this, name ());
104 #endif
105 
106  internal_reset_ ();
107  }
108 
109 #pragma GCC diagnostic push
110 // Needed because 'alignment' is used only in trace calls.
111 #pragma GCC diagnostic ignored "-Wunused-parameter"
112 
129  void*
130  first_fit_top::do_allocate (std::size_t bytes, std::size_t alignment)
131  {
132  std::size_t block_padding = calc_block_padding (alignment);
133  std::size_t alloc_size = rtos::memory::align_size (bytes, chunk_align);
134  alloc_size += block_padding;
135  alloc_size += chunk_offset;
136 
137  std::size_t block_minchunk = calc_block_minchunk (block_padding);
138  alloc_size = os::rtos::memory::max (alloc_size, block_minchunk);
139 
140  chunk_t* chunk;
141 
142  while (true)
143  {
144  chunk_t* prev_chunk = free_list_;
145  chunk = prev_chunk;
146 
147  while (chunk)
148  {
149  int rem = static_cast<int> (chunk->size - alloc_size);
150  if (rem >= 0)
151  {
152  if ((static_cast<std::size_t> (rem)) >= block_minchunk)
153  {
154  // Found a chunk that is much larger than required size
155  // (at least one more chunk is available);
156  // break it into two chunks and return the second one.
157 
158  chunk->size = static_cast<std::size_t> (rem);
159  chunk =
160  reinterpret_cast<chunk_t *> (reinterpret_cast<char *> (chunk)
161  + rem);
162  chunk->size = alloc_size;
163 
164  // Splitting one chunk creates one more chunk.
165  ++free_chunks_;
166  }
167  else
168  {
169  // Found a chunk that is exactly the size or slightly
170  // larger than the requested size; return this chunk.
171 
172  if (prev_chunk == chunk)
173  {
174  // This implies p==r==free_list, i.e. the list head.
175  // The next chunk becomes the first list element.
176  free_list_ = chunk->next;
177 
178  // If this was the last chunk, the free list is empty.
179  }
180  else
181  {
182  // Normal case. Remove it from the free_list.
183  prev_chunk->next = chunk->next;
184  }
185  }
186  break;
187  }
188  prev_chunk = chunk;
189  chunk = chunk->next;
190  }
191 
192  if (chunk != nullptr)
193  {
194  break;
195  }
196 
197  if (out_of_memory_handler_ == nullptr)
198  {
199 #if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
200  trace::printf ("first_fit_top::%s(%u,%u)=0 @%p %s\n", __func__,
201  bytes, alignment, this, name ());
202 #endif
203 
204  return nullptr;
205  }
206 
207 #if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
208  trace::printf ("first_fit_top::%s(%u,%u) @%p %s out of memory\n",
209  __func__, bytes, alignment, this, name ());
210 #endif
211  out_of_memory_handler_ ();
212 
213  // If the handler returned, assume it freed some memory
214  // and try again to allocate.
215  }
216 
217  void* aligned_payload = internal_align_ (chunk, bytes, alignment);
218 
219 #if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
220  trace::printf ("first_fit_top::%s(%u,%u)=%p,%u @%p %s\n", __func__, bytes,
221  alignment, aligned_payload, alloc_size, this, name ());
222 #endif
223 
224  return aligned_payload;
225  }
226 
242  void
243  first_fit_top::do_deallocate (void* addr, std::size_t bytes,
244  std::size_t alignment) noexcept
245  {
246 #if defined(OS_TRACE_LIBCPP_MEMORY_RESOURCE)
247  trace::printf ("first_fit_top::%s(%p,%u,%u) @%p %s\n", __func__, addr,
248  bytes, alignment, this, name ());
249 #endif
250 
251  // The address must be inside the arena; no exceptions.
252  if ((addr < arena_addr_)
253  || (addr > (static_cast<char*> (arena_addr_) + total_bytes_)))
254  {
255  assert(false);
256  return;
257  }
258 
259  // Compute the chunk address from the user address.
260  chunk_t* chunk = reinterpret_cast<chunk_t *> (static_cast<char *> (addr)
261  - chunk_offset);
262 
263  // If the block was aligned, the offset appears as size; adjust back.
264  if (static_cast<std::ptrdiff_t> (chunk->size) < 0)
265  {
266  chunk = reinterpret_cast<chunk_t *> (reinterpret_cast<char *> (chunk)
267  + static_cast<std::ptrdiff_t> (chunk->size));
268  }
269 
270  if (bytes)
271  {
272  // If size is known, validate.
273  // (when called from free(), the size is not known).
274  if (bytes + chunk_offset > chunk->size)
275  {
276  assert(false);
277  return;
278  }
279  }
280 
281  // Update statistics.
282  // What is subtracted from allocated is added to free.
284 
285  // If the free list is empty, create it with the current chunk, alone.
286  if (free_list_ == nullptr)
287  {
288  // Mark the end of the list with a null pointer.
289  chunk->next = nullptr;
290 
291  // The chunk becomes the first list element.
292  free_list_ = chunk;
293  assert(free_chunks_ == 1);
294 
295  return;
296  }
297 
298  // The free list exists; is the chunk before the list head?
299  if (chunk < free_list_)
300  {
301  // Is the chunk *right* before the list head?
302  if (reinterpret_cast<char *> (chunk) + chunk->size
303  == reinterpret_cast<char *> (free_list_))
304  {
305  // Coalesce chunk to the list head.
306  chunk->size += free_list_->size;
307  chunk->next = free_list_->next; // May be nullptr
308 
309  // Coalescing means one less chunk.
310  --free_chunks_;
311  }
312  else
313  {
314  // Insert before the list head.
315  chunk->next = free_list_;
316  }
317  // The chunk becomes the new list head.
318  free_list_ = chunk;
319 
320  return;
321  }
322 
323  // Walk the free list to find the place to insert,
324  // (the list must be ordered by addresses).
325  // Warning: not deterministic!
326 
327  chunk_t* next_chunk = free_list_;
328  chunk_t* prev_chunk;
329  do
330  {
331  prev_chunk = next_chunk;
332  next_chunk = next_chunk->next;
333  }
334  while (next_chunk != nullptr && next_chunk <= chunk);
335 
336  // Now prev_chunk <= chunk and either next_chunk == nullptr or
337  // next_chunk > chunk.
338  // Try to merge with chunks immediately before/after it.
339 
340  if (reinterpret_cast<char *> (prev_chunk) + prev_chunk->size
341  == reinterpret_cast<char *> (chunk))
342  {
343  // Chunk to be freed is adjacent to a free chunk before it.
344  prev_chunk->size += chunk->size;
345 
346  // Coalescing means one less chunk.
347  --free_chunks_;
348 
349  // If the merged chunk is also adjacent to the chunk after it,
350  // merge again. Does not match if next_chunk == nullptr.
351  if (reinterpret_cast<char *> (prev_chunk) + prev_chunk->size
352  == reinterpret_cast<char *> (next_chunk))
353  {
354  prev_chunk->size += next_chunk->size;
355  prev_chunk->next = next_chunk->next;
356 
357  // Coalescing means one less chunk.
358  --free_chunks_;
359  }
360  }
361  else if (reinterpret_cast<char *> (prev_chunk) + prev_chunk->size
362  > reinterpret_cast<char *> (chunk))
363  {
364  // Already freed.
365 
366  // Revert statistics.
367  // What is subtracted from free is added to allocated.
368  allocated_bytes_ += chunk->size;
369  free_bytes_ -= chunk->size;
370  ++allocated_chunks_;
371  --free_chunks_;
372 
373  trace::printf ("first_fit_top::%s(%p,%u,%u) @%p %s already freed\n",
374  __func__, addr, bytes, alignment, this, name ());
375 
376  return;
377  }
378  // Does not match if next_chunk == nullptr.
379  else if (reinterpret_cast<char *> (chunk) + chunk->size
380  == reinterpret_cast<char *> (next_chunk))
381  {
382  // The chunk to be freed is adjacent to a free chunk after it.
383  chunk->size += next_chunk->size;
384  chunk->next = next_chunk->next; // May be nullptr.
385  prev_chunk->next = chunk;
386 
387  // Coalescing means one less chunk.
388  --free_chunks_;
389  }
390  else
391  {
392  // Not adjacent to any chunk. Just insert it.
393  // The result is a new fragment. Not great...
394  chunk->next = next_chunk; // May be nullptr.
395  prev_chunk->next = chunk;
396  }
397  }
398 
402  std::size_t
403  first_fit_top::do_max_size (void) const noexcept
404  {
405  return total_bytes_;
406  }
407 
411  void*
412  first_fit_top::internal_align_ (chunk_t* chunk, std::size_t bytes,
413  std::size_t alignment)
414  {
415  // Update statistics.
416  // The value subtracted from free is added to allocated.
418 
419  // Compute pointer to payload area.
420  char* payload = reinterpret_cast<char *> (chunk) + chunk_offset;
421 
422  // Align it to user provided alignment.
423  void* aligned_payload = payload;
424  std::size_t aligned_size = chunk->size - chunk_offset;
425 
426  void* res;
427  res = std::align (alignment, bytes, aligned_payload, aligned_size);
428  if (res == nullptr)
429  {
430  assert(res != nullptr);
431  }
432 
433  // Compute the possible alignment offset.
434  std::ptrdiff_t offset = static_cast<char *> (aligned_payload) - payload;
435  if (offset)
436  {
437  // If non-zero, store it in the gap left by alignment in the
438  // chunk header.
439 
440  chunk_t* adj_chunk =
441  reinterpret_cast<chunk_t *> (static_cast<char *> (aligned_payload)
442  - chunk_offset);
443  adj_chunk->size = static_cast<std::size_t> (-offset);
444  }
445 
446  assert(
447  (reinterpret_cast<uintptr_t> (aligned_payload) & (alignment - 1))
448  == 0);
449 
450  return aligned_payload;
451  }
452 
453 #pragma GCC diagnostic pop
454 
455  // --------------------------------------------------------------------------
456  } /* namespace memory */
457 } /* namespace os */
458 
459 // ----------------------------------------------------------------------------
constexpr std::size_t max(std::size_t a, std::size_t b)
Definition: os-memory.h:74
void internal_decrease_allocated_statistics(std::size_t bytes) noexcept
Update statistics after deallocation.
Definition: os-memory.cpp:492
virtual void * do_allocate(std::size_t bytes, std::size_t alignment) override
Implementation of the memory allocator.
void * internal_align_(chunk_t *chunk, std::size_t bytes, std::size_t alignment)
Internal function to align a chunk.
void internal_construct_(void *addr, std::size_t bytes)
Internal function to construct the memory resource.
System namespace.
virtual std::size_t do_max_size(void) const noexcept override
Implementation of the function to get max size.
constexpr std::size_t align_size(std::size_t size, std::size_t align) noexcept
Helper function to align size values.
Definition: os-memory.h:86
const char * name(void) const
Get object name.
Definition: os-decls.h:760
virtual void do_deallocate(void *addr, std::size_t bytes, std::size_t alignment) noexcept override
Implementation of the memory deallocator.
virtual void do_reset(void) noexcept override
Implementation of the function to reset the memory manager.
int printf(const char *format,...)
Write a formatted string to the trace device.
Definition: trace.cpp:74
virtual ~first_fit_top() override
Destruct the memory resource object instance.
void internal_reset_(void) noexcept
Internal function to reset the memory resource.
void internal_increase_allocated_statistics(std::size_t bytes) noexcept
Update statistics after allocation.
Definition: os-memory.cpp:476