From 58d3a89cd5a28116d200f1c4e79a48d3228a7ac2 Mon Sep 17 00:00:00 2001 From: Jan Niehusmann Date: Wed, 28 Jan 2026 23:27:47 +0000 Subject: [PATCH 1/3] Work around https://github.com/yvt/rlsf/pull/21 --- src/tlsf.rs | 36 +++++++++++++++++++++++++++++------- 1 file changed, 29 insertions(+), 7 deletions(-) diff --git a/src/tlsf.rs b/src/tlsf.rs index 7155aaa..a95bad7 100644 --- a/src/tlsf.rs +++ b/src/tlsf.rs @@ -70,21 +70,43 @@ impl Heap { /// This function will panic if either of the following are true: /// /// - this function is called more than ONCE. - /// - `size == 0`. + /// - `size`, after aligning start and end to `rlsf::GRANULARITY`, is smaller than `rlsf::GRANULARITY * 2`. pub unsafe fn init(&self, start_addr: usize, size: usize) { assert!(size > 0); critical_section::with(|cs| { let mut heap = self.heap.borrow_ref_mut(cs); assert!(!heap.initialized); - heap.initialized = true; - let block: NonNull<[u8]> = - NonNull::slice_from_raw_parts(NonNull::new_unchecked(start_addr as *mut u8), size); - heap.tlsf.insert_free_block_ptr(block); - heap.raw_block = Some(block); - heap.raw_block_size = size; + // Work around https://github.com/yvt/rlsf/pull/21 by aligning block before passing + // it to `Tlsf::insert_free_block_ptr`. + if let Some((aligned_start_addr, usable_size)) = Self::align(start_addr, size) { + let block: NonNull<[u8]> = NonNull::slice_from_raw_parts( + NonNull::new_unchecked(aligned_start_addr as *mut u8), + usable_size, + ); + if heap.tlsf.insert_free_block_ptr(block).is_some() { + heap.initialized = true; + heap.raw_block = Some(block); + heap.raw_block_size = size; + } + } + if !heap.initialized { + panic!("Allocation too small for heap"); + } }); } + /// Align `start_addr` to `rlsf::GRANULARITY` and make + /// `size` a multiple of `2*rlsf::GRANULARITY`. + fn align(start_addr: usize, size: usize) -> Option<(usize, usize)> { + let align_offset: usize = (start_addr as *const u8).align_offset(rlsf::GRANULARITY); + if align_offset >= size { + return None; + } + let reduced_size: usize = size - align_offset; + let usable_size: usize = reduced_size - (reduced_size % (rlsf::GRANULARITY * 2)); + Some((start_addr + align_offset, usable_size)) + } + fn alloc(&self, layout: Layout) -> Option> { critical_section::with(|cs| self.heap.borrow_ref_mut(cs).tlsf.allocate(layout)) } From a10552aa6c63b0eabdb0cf69fa49eadd6f8f17d2 Mon Sep 17 00:00:00 2001 From: Jan Niehusmann Date: Wed, 28 Jan 2026 23:28:48 +0000 Subject: [PATCH 2/3] Add changelog entry --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d2f5c2..120d41c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ### Fixed - Fix panic in `tlsf::Heap::used`. +- Work around https://github.com/yvt/rlsf/pull/21 by aligning block before passing it + to `Tlsf::insert_free_block_ptr`. ## [v0.7.0] - 2026-01-03 From 65df6d7c1787e94890053a53871ab820ade03eb0 Mon Sep 17 00:00:00 2001 From: Jan Niehusmann Date: Sat, 31 Jan 2026 19:04:12 +0000 Subject: [PATCH 3/3] Properly fix calls to the rlsf library --- CHANGELOG.md | 4 ++-- src/tlsf.rs | 30 ++++++++---------------------- 2 files changed, 10 insertions(+), 24 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 120d41c..0be11f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,8 +10,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/). ### Fixed - Fix panic in `tlsf::Heap::used`. -- Work around https://github.com/yvt/rlsf/pull/21 by aligning block before passing it - to `Tlsf::insert_free_block_ptr`. +- Fix panic in `tlsf::Heap::free` in case the value returned from `insert_free_block_ptr` + does not conver the full memory range passed in. ## [v0.7.0] - 2026-01-03 diff --git a/src/tlsf.rs b/src/tlsf.rs index a95bad7..57f91a0 100644 --- a/src/tlsf.rs +++ b/src/tlsf.rs @@ -76,18 +76,16 @@ impl Heap { critical_section::with(|cs| { let mut heap = self.heap.borrow_ref_mut(cs); assert!(!heap.initialized); - // Work around https://github.com/yvt/rlsf/pull/21 by aligning block before passing - // it to `Tlsf::insert_free_block_ptr`. - if let Some((aligned_start_addr, usable_size)) = Self::align(start_addr, size) { + let block: NonNull<[u8]> = + NonNull::slice_from_raw_parts(NonNull::new_unchecked(start_addr as *mut u8), size); + if let Some(actual_size) = heap.tlsf.insert_free_block_ptr(block) { let block: NonNull<[u8]> = NonNull::slice_from_raw_parts( - NonNull::new_unchecked(aligned_start_addr as *mut u8), - usable_size, + NonNull::new_unchecked(start_addr as *mut u8), + actual_size.get(), ); - if heap.tlsf.insert_free_block_ptr(block).is_some() { - heap.initialized = true; - heap.raw_block = Some(block); - heap.raw_block_size = size; - } + heap.initialized = true; + heap.raw_block = Some(block); + heap.raw_block_size = size; } if !heap.initialized { panic!("Allocation too small for heap"); @@ -95,18 +93,6 @@ impl Heap { }); } - /// Align `start_addr` to `rlsf::GRANULARITY` and make - /// `size` a multiple of `2*rlsf::GRANULARITY`. - fn align(start_addr: usize, size: usize) -> Option<(usize, usize)> { - let align_offset: usize = (start_addr as *const u8).align_offset(rlsf::GRANULARITY); - if align_offset >= size { - return None; - } - let reduced_size: usize = size - align_offset; - let usable_size: usize = reduced_size - (reduced_size % (rlsf::GRANULARITY * 2)); - Some((start_addr + align_offset, usable_size)) - } - fn alloc(&self, layout: Layout) -> Option> { critical_section::with(|cs| self.heap.borrow_ref_mut(cs).tlsf.allocate(layout)) }