1use crate::prelude::*;
2use alloc::collections::BTreeMap;
3use core::cmp;
4use core::{alloc::Layout, num::NonZeroU32, ops::Bound};
5
6pub(crate) struct FreeList {
8 capacity: usize,
10 free_block_index_to_len: BTreeMap<u32, u32>,
13}
14
15const ALIGN_U32: u32 = 16;
19const ALIGN_USIZE: usize = ALIGN_U32 as usize;
20
21impl FreeList {
22 pub fn layout(size: usize) -> Layout {
25 Layout::from_size_align(size, ALIGN_USIZE).unwrap()
26 }
27
28 pub fn new(capacity: usize) -> Self {
31 log::trace!("FreeList::new({capacity})");
32 let mut free_list = FreeList {
33 capacity,
34 free_block_index_to_len: BTreeMap::new(),
35 };
36 free_list.reset();
37 free_list
38 }
39
40 fn max_size(&self) -> usize {
41 let cap = cmp::min(self.capacity, usize::try_from(u32::MAX).unwrap());
42 round_usize_down_to_pow2(cap.saturating_sub(ALIGN_USIZE), ALIGN_USIZE)
43 }
44
45 fn check_layout(&self, layout: Layout) -> Result<u32> {
48 ensure!(
49 layout.align() <= ALIGN_USIZE,
50 "requested allocation's alignment of {} is greater than max supported alignment of {ALIGN_USIZE}",
51 layout.align(),
52 );
53
54 ensure!(
55 layout.size() <= self.max_size(),
56 "requested allocation's size of {} is greater than the max supported size of {}",
57 layout.size(),
58 self.max_size(),
59 );
60
61 let alloc_size = u32::try_from(layout.size())
62 .context("requested allocation's size does not fit in a u32")?;
63 alloc_size
64 .checked_next_multiple_of(ALIGN_U32)
65 .ok_or_else(|| {
66 anyhow!(
67 "failed to round allocation size of {alloc_size} up to next \
68 multiple of {ALIGN_USIZE}"
69 )
70 })
71 }
72
73 fn first_fit(&mut self, alloc_size: u32) -> Option<(u32, u32)> {
76 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
77
78 let (&block_index, &block_len) = self
79 .free_block_index_to_len
80 .iter()
81 .find(|(_idx, len)| **len >= alloc_size)?;
82
83 debug_assert_eq!(block_index % ALIGN_U32, 0);
84 debug_assert_eq!(block_len % ALIGN_U32, 0);
85
86 let entry = self.free_block_index_to_len.remove(&block_index);
87 debug_assert!(entry.is_some());
88
89 Some((block_index, block_len))
90 }
91
92 fn maybe_split(&mut self, alloc_size: u32, block_index: u32, block_len: u32) -> u32 {
97 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
98 debug_assert_eq!(block_index % ALIGN_U32, 0);
99 debug_assert_eq!(block_len % ALIGN_U32, 0);
100
101 if block_len - alloc_size < ALIGN_U32 {
102 return block_len;
104 }
105
106 let new_block_len = alloc_size;
109 let split_start = block_index + alloc_size;
110 let split_len = block_len - alloc_size;
111
112 debug_assert_eq!(new_block_len % ALIGN_U32, 0);
113 debug_assert_eq!(split_start % ALIGN_U32, 0);
114 debug_assert_eq!(split_len % ALIGN_U32, 0);
115
116 self.free_block_index_to_len.insert(split_start, split_len);
117
118 new_block_len
119 }
120
121 pub fn alloc(&mut self, layout: Layout) -> Result<Option<NonZeroU32>> {
132 log::trace!("FreeList::alloc({layout:?})");
133 let alloc_size = self.check_layout(layout)?;
134 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
135
136 let (block_index, block_len) = match self.first_fit(alloc_size) {
137 None => return Ok(None),
138 Some(tup) => tup,
139 };
140 debug_assert_ne!(block_index, 0);
141 debug_assert_eq!(block_index % ALIGN_U32, 0);
142 debug_assert!(block_len >= alloc_size);
143 debug_assert_eq!(block_len % ALIGN_U32, 0);
144
145 let block_len = self.maybe_split(alloc_size, block_index, block_len);
146 debug_assert!(block_len >= alloc_size);
147 debug_assert_eq!(block_len % ALIGN_U32, 0);
148
149 #[cfg(debug_assertions)]
151 self.check_integrity();
152
153 log::trace!("FreeList::alloc({layout:?}) -> {block_index:#x}");
154 Ok(Some(unsafe { NonZeroU32::new_unchecked(block_index) }))
155 }
156
157 pub fn dealloc(&mut self, index: NonZeroU32, layout: Layout) {
159 log::trace!("FreeList::dealloc({index:#x}, {layout:?})");
160
161 let index = index.get();
162 debug_assert_eq!(index % ALIGN_U32, 0);
163
164 let alloc_size = self.check_layout(layout).unwrap();
165 debug_assert_eq!(alloc_size % ALIGN_U32, 0);
166
167 let prev_block = self
168 .free_block_index_to_len
169 .range((Bound::Unbounded, Bound::Excluded(index)))
170 .next_back()
171 .map(|(idx, len)| (*idx, *len));
172
173 let next_block = self
174 .free_block_index_to_len
175 .range((Bound::Excluded(index), Bound::Unbounded))
176 .next()
177 .map(|(idx, len)| (*idx, *len));
178
179 match (prev_block, next_block) {
182 (Some((prev_index, prev_len)), Some((next_index, next_len)))
185 if blocks_are_contiguous(prev_index, prev_len, index)
186 && blocks_are_contiguous(index, alloc_size, next_index) =>
187 {
188 log::trace!(
189 "merging blocks {prev_index:#x}..{prev_len:#x}, {index:#x}..{index_end:#x}, {next_index:#x}..{next_end:#x}",
190 prev_len = prev_index + prev_len,
191 index_end = index + u32::try_from(layout.size()).unwrap(),
192 next_end = next_index + next_len,
193 );
194 self.free_block_index_to_len.remove(&next_index);
195 let merged_block_len = next_index + next_len - prev_index;
196 debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
197 *self.free_block_index_to_len.get_mut(&prev_index).unwrap() = merged_block_len;
198 }
199
200 (Some((prev_index, prev_len)), _)
202 if blocks_are_contiguous(prev_index, prev_len, index) =>
203 {
204 log::trace!(
205 "merging blocks {prev_index:#x}..{prev_len:#x}, {index:#x}..{index_end:#x}",
206 prev_len = prev_index + prev_len,
207 index_end = index + u32::try_from(layout.size()).unwrap(),
208 );
209 let merged_block_len = index + alloc_size - prev_index;
210 debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
211 *self.free_block_index_to_len.get_mut(&prev_index).unwrap() = merged_block_len;
212 }
213
214 (_, Some((next_index, next_len)))
216 if blocks_are_contiguous(index, alloc_size, next_index) =>
217 {
218 log::trace!(
219 "merging blocks {index:#x}..{index_end:#x}, {next_index:#x}..{next_end:#x}",
220 index_end = index + u32::try_from(layout.size()).unwrap(),
221 next_end = next_index + next_len,
222 );
223 self.free_block_index_to_len.remove(&next_index);
224 let merged_block_len = next_index + next_len - index;
225 debug_assert_eq!(merged_block_len % ALIGN_U32, 0);
226 self.free_block_index_to_len.insert(index, merged_block_len);
227 }
228
229 (_, _) => {
232 log::trace!("cannot merge blocks");
233 self.free_block_index_to_len.insert(index, alloc_size);
234 }
235 }
236
237 #[cfg(debug_assertions)]
240 self.check_integrity();
241 }
242
243 #[cfg(debug_assertions)]
253 fn check_integrity(&self) {
254 let mut prev_end = None;
255 for (&index, &len) in self.free_block_index_to_len.iter() {
256 let end = index + len;
258 assert!(usize::try_from(end).unwrap() <= self.capacity);
259
260 if let Some(prev_end) = prev_end {
262 assert!(prev_end < index);
268 }
269
270 assert_eq!(index % ALIGN_U32, 0);
272
273 assert_eq!(len % ALIGN_U32, 0);
275
276 prev_end = Some(end);
277 }
278 }
279
280 pub fn reset(&mut self) {
282 let end = u32::try_from(self.capacity).unwrap_or_else(|_| {
283 assert!(self.capacity > usize::try_from(u32::MAX).unwrap());
284 u32::MAX
285 });
286
287 let start = ALIGN_U32;
291
292 let len = round_u32_down_to_pow2(end.saturating_sub(start), ALIGN_U32);
293
294 let entire_range = if len >= ALIGN_U32 {
295 Some((start, len))
296 } else {
297 None
298 };
299
300 self.free_block_index_to_len.clear();
301 self.free_block_index_to_len.extend(entire_range);
302 }
303}
304
305#[inline]
306fn blocks_are_contiguous(prev_index: u32, prev_len: u32, next_index: u32) -> bool {
307 let end_of_prev = prev_index + prev_len;
313 debug_assert!(
314 next_index >= end_of_prev,
315 "overlapping blocks: \n\
316 \t prev_index = {prev_index:#x}\n\
317 \t prev_len = {prev_len:#x}\n\
318 \tend_of_prev = {end_of_prev:#x}\n\
319 \t next_index = {next_index:#x}\n\
320 `next_index` should be >= `end_of_prev`"
321 );
322 let delta_to_next = next_index - end_of_prev;
323 delta_to_next < ALIGN_U32
324}
325
326#[inline]
327fn round_u32_down_to_pow2(value: u32, divisor: u32) -> u32 {
328 debug_assert!(divisor > 0);
329 debug_assert!(divisor.is_power_of_two());
330 value & !(divisor - 1)
331}
332
333#[inline]
334fn round_usize_down_to_pow2(value: usize, divisor: usize) -> usize {
335 debug_assert!(divisor > 0);
336 debug_assert!(divisor.is_power_of_two());
337 value & !(divisor - 1)
338}
339
340#[cfg(test)]
341mod tests {
342 use super::*;
343 use crate::hash_map::HashMap;
344 use proptest::prelude::*;
345 use std::num::NonZeroUsize;
346
347 fn free_list_block_len_and_size(free_list: &FreeList) -> (usize, Option<usize>) {
348 let len = free_list.free_block_index_to_len.len();
349 let size = free_list
350 .free_block_index_to_len
351 .values()
352 .next()
353 .map(|s| usize::try_from(*s).unwrap());
354 (len, size)
355 }
356
357 proptest! {
358 #[test]
367 #[cfg_attr(miri, ignore)]
368 fn check_no_fragmentation((capacity, ops) in ops()) {
369 let mut live = HashMap::new();
371
372 let mut deferred = vec![];
376
377 let mut free_list = FreeList::new(capacity.get());
379
380 let (initial_len, initial_size) = free_list_block_len_and_size(&free_list);
381 assert!(initial_len == 0 || initial_len == 1);
382 assert!(initial_size.unwrap_or(0) <= capacity.get());
383 assert_eq!(initial_size.unwrap_or(0), free_list.max_size());
384
385 for (id, op) in ops {
387 match op {
388 Op::Alloc(layout) => {
389 if let Ok(Some(ptr)) = free_list.alloc(layout) {
390 live.insert(id, ptr);
391 }
392 }
393 Op::Dealloc(layout) => {
394 if let Some(ptr) = live.remove(&id) {
395 free_list.dealloc(ptr, layout);
396 } else {
397 deferred.push((id, layout));
398 }
399 }
400 }
401 }
402
403 for (id, layout) in deferred {
406 if let Some(ptr) = live.remove(&id) {
410 free_list.dealloc(ptr, layout);
411 }
412 }
413
414 assert!(live.is_empty());
419
420 let (final_len, final_size) = free_list_block_len_and_size(&free_list);
421
422 assert_eq!(final_len, initial_len);
425
426 assert_eq!(final_size, initial_size);
428 }
429 }
430
431 #[derive(Clone, Debug)]
432 enum Op {
433 Alloc(Layout),
434 Dealloc(Layout),
435 }
436
437 fn clamp_to_pow2_in_range(x: usize, max: usize) -> usize {
443 let log_x = max.ilog2() as usize;
444 if log_x == 0 {
445 return 1;
446 }
447 let divisor = usize::MAX / log_x;
448 let y = 1_usize << (x / divisor);
449 assert!(y.is_power_of_two(), "{y} is not a power of two");
450 assert!(y <= max, "{y} is larger than {max}");
451 y
452 }
453
454 fn arbitrary_layout(max_size: NonZeroUsize, size: usize, align: usize) -> Layout {
457 let max_size = std::cmp::min(max_size.get(), usize::try_from(isize::MAX).unwrap());
460
461 let align = clamp_to_pow2_in_range(align, super::ALIGN_USIZE);
464
465 let size = size % (max_size + 1);
467
468 let size = round_usize_down_to_pow2(size, align);
473 assert!(size <= max_size);
474
475 assert_ne!(align, 0);
478 assert!(align.is_power_of_two());
479 assert_eq!(size % align, 0);
480 assert!(size <= usize::try_from(isize::MAX).unwrap());
481
482 Layout::from_size_align(size, align).unwrap()
483 }
484
485 fn ops() -> impl Strategy<Value = (NonZeroUsize, Vec<(usize, Op)>)> {
488 any::<usize>().prop_flat_map(|capacity| {
489 let capacity =
490 NonZeroUsize::new(capacity).unwrap_or_else(|| NonZeroUsize::new(1 << 31).unwrap());
491
492 (
493 Just(capacity),
494 (any::<usize>(), any::<usize>(), any::<usize>())
495 .prop_flat_map(move |(id, size, align)| {
496 let layout = arbitrary_layout(capacity, size, align);
497 vec![
498 Just((id, Op::Alloc(layout))),
499 Just((id, Op::Dealloc(layout))),
500 ]
501 })
502 .prop_shuffle(),
503 )
504 })
505 }
506
507 #[test]
508 fn allocate_no_split() {
509 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 2);
512
513 assert_eq!(free_list.free_block_index_to_len.len(), 1);
514 assert_eq!(
515 free_list.max_size(),
516 usize::try_from(ALIGN_U32).unwrap() * 2
517 );
518
519 free_list
521 .alloc(
522 Layout::from_size_align(
523 usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
524 ALIGN_USIZE,
525 )
526 .unwrap(),
527 )
528 .expect("allocation within 'static' free list limits")
529 .expect("have free space available for allocation");
530
531 assert_eq!(free_list.free_block_index_to_len.len(), 0);
533 }
534
535 #[test]
536 fn allocate_and_split() {
537 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 3);
540
541 assert_eq!(free_list.free_block_index_to_len.len(), 1);
542 assert_eq!(
543 free_list.max_size(),
544 usize::try_from(ALIGN_U32).unwrap() * 3
545 );
546
547 free_list
549 .alloc(
550 Layout::from_size_align(
551 usize::try_from(ALIGN_U32).unwrap() + ALIGN_USIZE,
552 ALIGN_USIZE,
553 )
554 .unwrap(),
555 )
556 .expect("allocation within 'static' free list limits")
557 .expect("have free space available for allocation");
558
559 assert_eq!(free_list.free_block_index_to_len.len(), 1);
561 }
562
563 #[test]
564 fn dealloc_merge_prev_and_next() {
565 let layout =
566 Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
567
568 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
569 assert_eq!(
570 free_list.free_block_index_to_len.len(),
571 1,
572 "initially one big free block"
573 );
574
575 let a = free_list
576 .alloc(layout)
577 .expect("allocation within 'static' free list limits")
578 .expect("have free space available for allocation");
579 assert_eq!(
580 free_list.free_block_index_to_len.len(),
581 1,
582 "should have split the block to allocate `a`"
583 );
584
585 let b = free_list
586 .alloc(layout)
587 .expect("allocation within 'static' free list limits")
588 .expect("have free space available for allocation");
589 assert_eq!(
590 free_list.free_block_index_to_len.len(),
591 1,
592 "should have split the block to allocate `b`"
593 );
594
595 free_list.dealloc(a, layout);
596 assert_eq!(
597 free_list.free_block_index_to_len.len(),
598 2,
599 "should have two non-contiguous free blocks after deallocating `a`"
600 );
601
602 free_list.dealloc(b, layout);
603 assert_eq!(
604 free_list.free_block_index_to_len.len(),
605 1,
606 "should have merged `a` and `b` blocks with the rest to form a \
607 single, contiguous free block after deallocating `b`"
608 );
609 }
610
611 #[test]
612 fn dealloc_merge_with_prev_and_not_next() {
613 let layout =
614 Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
615
616 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
617 assert_eq!(
618 free_list.free_block_index_to_len.len(),
619 1,
620 "initially one big free block"
621 );
622
623 let a = free_list
624 .alloc(layout)
625 .expect("allocation within 'static' free list limits")
626 .expect("have free space available for allocation");
627 let b = free_list
628 .alloc(layout)
629 .expect("allocation within 'static' free list limits")
630 .expect("have free space available for allocation");
631 let c = free_list
632 .alloc(layout)
633 .expect("allocation within 'static' free list limits")
634 .expect("have free space available for allocation");
635 assert_eq!(
636 free_list.free_block_index_to_len.len(),
637 1,
638 "should have split the block to allocate `a`, `b`, and `c`"
639 );
640
641 free_list.dealloc(a, layout);
642 assert_eq!(
643 free_list.free_block_index_to_len.len(),
644 2,
645 "should have two non-contiguous free blocks after deallocating `a`"
646 );
647
648 free_list.dealloc(b, layout);
649 assert_eq!(
650 free_list.free_block_index_to_len.len(),
651 2,
652 "should have merged `a` and `b` blocks, but not merged with the \
653 rest of the free space"
654 );
655
656 let _ = c;
657 }
658
659 #[test]
660 fn dealloc_merge_with_next_and_not_prev() {
661 let layout =
662 Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
663
664 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
665 assert_eq!(
666 free_list.free_block_index_to_len.len(),
667 1,
668 "initially one big free block"
669 );
670
671 let a = free_list
672 .alloc(layout)
673 .expect("allocation within 'static' free list limits")
674 .expect("have free space available for allocation");
675 let b = free_list
676 .alloc(layout)
677 .expect("allocation within 'static' free list limits")
678 .expect("have free space available for allocation");
679 let c = free_list
680 .alloc(layout)
681 .expect("allocation within 'static' free list limits")
682 .expect("have free space available for allocation");
683 assert_eq!(
684 free_list.free_block_index_to_len.len(),
685 1,
686 "should have split the block to allocate `a`, `b`, and `c`"
687 );
688
689 free_list.dealloc(a, layout);
690 assert_eq!(
691 free_list.free_block_index_to_len.len(),
692 2,
693 "should have two non-contiguous free blocks after deallocating `a`"
694 );
695
696 free_list.dealloc(c, layout);
697 assert_eq!(
698 free_list.free_block_index_to_len.len(),
699 2,
700 "should have merged `c` block with rest of the free space, but not \
701 with `a` block"
702 );
703
704 let _ = b;
705 }
706
707 #[test]
708 fn dealloc_no_merge() {
709 let layout =
710 Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE).unwrap();
711
712 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 100);
713 assert_eq!(
714 free_list.free_block_index_to_len.len(),
715 1,
716 "initially one big free block"
717 );
718
719 let a = free_list
720 .alloc(layout)
721 .expect("allocation within 'static' free list limits")
722 .expect("have free space available for allocation");
723 let b = free_list
724 .alloc(layout)
725 .expect("allocation within 'static' free list limits")
726 .expect("have free space available for allocation");
727 let c = free_list
728 .alloc(layout)
729 .expect("allocation within 'static' free list limits")
730 .expect("have free space available for allocation");
731 let d = free_list
732 .alloc(layout)
733 .expect("allocation within 'static' free list limits")
734 .expect("have free space available for allocation");
735 assert_eq!(
736 free_list.free_block_index_to_len.len(),
737 1,
738 "should have split the block to allocate `a`, `b`, `c`, and `d`"
739 );
740
741 free_list.dealloc(a, layout);
742 assert_eq!(
743 free_list.free_block_index_to_len.len(),
744 2,
745 "should have two non-contiguous free blocks after deallocating `a`"
746 );
747
748 free_list.dealloc(c, layout);
749 assert_eq!(
750 free_list.free_block_index_to_len.len(),
751 3,
752 "should not have merged `c` block `a` block or rest of the free \
753 space"
754 );
755
756 let _ = (b, d);
757 }
758
759 #[test]
760 fn alloc_size_too_large() {
761 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
763 assert_eq!(
764 free_list.max_size(),
765 usize::try_from(ALIGN_U32).unwrap() * 10
766 );
767
768 assert!(free_list
771 .alloc(
772 Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap() * 20, ALIGN_USIZE)
773 .unwrap(),
774 )
775 .is_err());
776 }
777
778 #[test]
779 fn alloc_align_too_large() {
780 let mut free_list = FreeList::new(ALIGN_USIZE + usize::try_from(ALIGN_U32).unwrap() * 10);
782 assert_eq!(
783 free_list.max_size(),
784 usize::try_from(ALIGN_U32).unwrap() * 10
785 );
786
787 assert!(free_list
790 .alloc(
791 Layout::from_size_align(usize::try_from(ALIGN_U32).unwrap(), ALIGN_USIZE * 2)
792 .unwrap(),
793 )
794 .is_err());
795 }
796
797 #[test]
798 fn all_pairwise_alloc_dealloc_orderings() {
799 let tests: &[fn(&mut FreeList, Layout)] = &[
800 |f, l| {
801 let a = f.alloc(l).unwrap().unwrap();
802 let b = f.alloc(l).unwrap().unwrap();
803 f.dealloc(a, l);
804 f.dealloc(b, l);
805 },
806 |f, l| {
807 let a = f.alloc(l).unwrap().unwrap();
808 let b = f.alloc(l).unwrap().unwrap();
809 f.dealloc(b, l);
810 f.dealloc(a, l);
811 },
812 |f, l| {
813 let a = f.alloc(l).unwrap().unwrap();
814 f.dealloc(a, l);
815 let b = f.alloc(l).unwrap().unwrap();
816 f.dealloc(b, l);
817 },
818 ];
819
820 let l = Layout::from_size_align(16, 8).unwrap();
821 for test in tests {
822 let mut f = FreeList::new(0x100);
823 test(&mut f, l);
824 }
825 }
826}