Skip to content

Commit 06f1379

Browse files
committed
Add choice of Two-Level Segregated Fit and Linked List First Fit
1 parent e37e935 commit 06f1379

File tree

4 files changed

+238
-103
lines changed

4 files changed

+238
-103
lines changed

Cargo.toml

+9-3
Original file line numberDiff line numberDiff line change
@@ -24,14 +24,20 @@ name = "embedded-alloc"
2424
version = "0.5.0"
2525

2626
[features]
27+
default = ["llff"]
2728
allocator_api = []
2829

30+
# Use the Two-Level Segregated Fit allocator
31+
tlsf = ["rlsf", "const-default"]
32+
# Use the LinkedList first-fit allocator
33+
llff = ["linked_list_allocator"]
34+
2935
[dependencies]
3036
critical-section = "1.0"
37+
linked_list_allocator = { version = "0.10.5", default-features = false, optional = true }
38+
rlsf = { version = "0.2.1", default-features = false, optional = true }
39+
const-default = { version = "1.0.0", default-features = false, optional = true }
3140

32-
[dependencies.linked_list_allocator]
33-
default-features = false
34-
version = "0.10.5"
3541

3642
[dev-dependencies]
3743
cortex-m = { version = "0.7.6", features = ["critical-section-single-core"] }

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ Starting with Rust 1.68, this crate can be used as a global allocator on stable
2323
extern crate alloc;
2424

2525
use cortex_m_rt::entry;
26-
use embedded_alloc::Heap;
26+
use embedded_alloc::LlffHeap as Heap;
2727

2828
#[global_allocator]
2929
static HEAP: Heap = Heap::empty();

examples/global_alloc.rs

+4-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,10 @@ extern crate alloc;
66
use alloc::vec::Vec;
77
use core::panic::PanicInfo;
88
use cortex_m_rt::entry;
9-
use embedded_alloc::Heap;
9+
// Linked-List First Fit Heap allocator (feature = "llff")
10+
use embedded_alloc::LlffHeap as Heap;
11+
// Two-Level Segregated Fit Heap allocator (feature = "tlsf")
12+
// use embedded_alloc::TlsfHeap as Heap;
1013

1114
#[global_allocator]
1215
static HEAP: Heap = Heap::empty();

src/lib.rs

+224-98
Original file line numberDiff line numberDiff line change
@@ -7,121 +7,247 @@ use core::cell::RefCell;
77
use core::ptr::{self, NonNull};
88

99
use critical_section::Mutex;
10-
use linked_list_allocator::Heap as LLHeap;
1110

12-
pub struct Heap {
13-
heap: Mutex<RefCell<LLHeap>>,
14-
}
11+
#[cfg(feature = "llff")]
12+
pub use llff::Heap as LlffHeap;
13+
#[cfg(feature = "tlsf")]
14+
pub use tlsf::Heap as TlsfHeap;
1515

16-
impl Heap {
17-
/// Crate a new UNINITIALIZED heap allocator
18-
///
19-
/// You must initialize this heap using the
20-
/// [`init`](Self::init) method before using the allocator.
21-
pub const fn empty() -> Heap {
22-
Heap {
23-
heap: Mutex::new(RefCell::new(LLHeap::empty())),
24-
}
16+
#[cfg(feature = "llff")]
17+
mod llff {
18+
use super::*;
19+
use linked_list_allocator::Heap as LLHeap;
20+
21+
pub struct Heap {
22+
heap: Mutex<RefCell<LLHeap>>,
2523
}
2624

27-
/// Initializes the heap
28-
///
29-
/// This function must be called BEFORE you run any code that makes use of the
30-
/// allocator.
31-
///
32-
/// `start_addr` is the address where the heap will be located.
33-
///
34-
/// `size` is the size of the heap in bytes.
35-
///
36-
/// Note that:
37-
///
38-
/// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
39-
/// be the smallest address used.
40-
///
41-
/// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
42-
/// `0x1000` and `size` is `0x30000` then the allocator won't use memory at
43-
/// addresses `0x31000` and larger.
44-
///
45-
/// # Safety
46-
///
47-
/// Obey these or Bad Stuff will happen.
48-
///
49-
/// - This function must be called exactly ONCE.
50-
/// - `size > 0`
51-
pub unsafe fn init(&self, start_addr: usize, size: usize) {
52-
critical_section::with(|cs| {
53-
self.heap
54-
.borrow(cs)
55-
.borrow_mut()
56-
.init(start_addr as *mut u8, size);
57-
});
25+
impl Heap {
26+
/// Crate a new UNINITIALIZED heap allocator
27+
///
28+
/// You must initialize this heap using the
29+
/// [`init`](Self::init) method before using the allocator.
30+
pub const fn empty() -> Heap {
31+
Heap {
32+
heap: Mutex::new(RefCell::new(LLHeap::empty())),
33+
}
34+
}
35+
36+
/// Initializes the heap
37+
///
38+
/// This function must be called BEFORE you run any code that makes use of the
39+
/// allocator.
40+
///
41+
/// `start_addr` is the address where the heap will be located.
42+
///
43+
/// `size` is the size of the heap in bytes.
44+
///
45+
/// Note that:
46+
///
47+
/// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
48+
/// be the smallest address used.
49+
///
50+
/// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
51+
/// `0x1000` and `size` is `0x30000` then the allocator won't use memory at
52+
/// addresses `0x31000` and larger.
53+
///
54+
/// # Safety
55+
///
56+
/// Obey these or Bad Stuff will happen.
57+
///
58+
/// - This function must be called exactly ONCE.
59+
/// - `size > 0`
60+
pub unsafe fn init(&self, start_addr: usize, size: usize) {
61+
critical_section::with(|cs| {
62+
self.heap
63+
.borrow(cs)
64+
.borrow_mut()
65+
.init(start_addr as *mut u8, size);
66+
});
67+
}
68+
69+
/// Returns an estimate of the amount of bytes in use.
70+
pub fn used(&self) -> usize {
71+
critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().used())
72+
}
73+
74+
/// Returns an estimate of the amount of bytes available.
75+
pub fn free(&self) -> usize {
76+
critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().free())
77+
}
5878
}
5979

60-
/// Returns an estimate of the amount of bytes in use.
61-
pub fn used(&self) -> usize {
62-
critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().used())
80+
unsafe impl GlobalAlloc for Heap {
81+
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
82+
critical_section::with(|cs| {
83+
self.heap
84+
.borrow(cs)
85+
.borrow_mut()
86+
.allocate_first_fit(layout)
87+
.ok()
88+
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
89+
})
90+
}
91+
92+
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
93+
critical_section::with(|cs| {
94+
self.heap
95+
.borrow(cs)
96+
.borrow_mut()
97+
.deallocate(NonNull::new_unchecked(ptr), layout)
98+
});
99+
}
63100
}
64101

65-
/// Returns an estimate of the amount of bytes available.
66-
pub fn free(&self) -> usize {
67-
critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().free())
102+
#[cfg(feature = "allocator_api")]
103+
mod allocator_api {
104+
use super::*;
105+
use core::{
106+
alloc::{AllocError, Allocator, Layout},
107+
ptr::NonNull,
108+
};
109+
110+
unsafe impl Allocator for Heap {
111+
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
112+
match layout.size() {
113+
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
114+
size => critical_section::with(|cs| {
115+
self.heap
116+
.borrow(cs)
117+
.borrow_mut()
118+
.allocate_first_fit(layout)
119+
.map(|allocation| NonNull::slice_from_raw_parts(allocation, size))
120+
.map_err(|_| AllocError)
121+
}),
122+
}
123+
}
124+
125+
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
126+
if layout.size() != 0 {
127+
critical_section::with(|cs| {
128+
self.heap
129+
.borrow(cs)
130+
.borrow_mut()
131+
.deallocate(NonNull::new_unchecked(ptr.as_ptr()), layout)
132+
});
133+
}
134+
}
135+
}
68136
}
69137
}
70138

71-
unsafe impl GlobalAlloc for Heap {
72-
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
73-
critical_section::with(|cs| {
74-
self.heap
75-
.borrow(cs)
76-
.borrow_mut()
77-
.allocate_first_fit(layout)
78-
.ok()
79-
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
80-
})
81-
}
139+
#[cfg(feature = "tlsf")]
140+
mod tlsf {
141+
use super::*;
142+
use const_default::ConstDefault;
143+
use rlsf::Tlsf;
144+
145+
type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;
82146

83-
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
84-
critical_section::with(|cs| {
85-
self.heap
86-
.borrow(cs)
87-
.borrow_mut()
88-
.deallocate(NonNull::new_unchecked(ptr), layout)
89-
});
147+
pub struct Heap {
148+
heap: Mutex<RefCell<TlsfHeap>>,
90149
}
91-
}
92150

93-
#[cfg(feature = "allocator_api")]
94-
mod allocator_api {
95-
use core::{
96-
alloc::{AllocError, Allocator, Layout},
97-
ptr::NonNull,
98-
};
99-
100-
use crate::Heap;
101-
102-
unsafe impl Allocator for Heap {
103-
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
104-
match layout.size() {
105-
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
106-
size => critical_section::with(|cs| {
107-
self.heap
108-
.borrow(cs)
109-
.borrow_mut()
110-
.allocate_first_fit(layout)
111-
.map(|allocation| NonNull::slice_from_raw_parts(allocation, size))
112-
.map_err(|_| AllocError)
113-
}),
151+
impl Heap {
152+
/// Crate a new UNINITIALIZED heap allocator
153+
///
154+
/// You must initialize this heap using the
155+
/// [`init`](Self::init) method before using the allocator.
156+
pub const fn empty() -> Heap {
157+
Heap {
158+
heap: Mutex::new(RefCell::new(ConstDefault::DEFAULT)),
114159
}
115160
}
116161

117-
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
118-
if layout.size() != 0 {
119-
critical_section::with(|cs| {
120-
self.heap
121-
.borrow(cs)
122-
.borrow_mut()
123-
.deallocate(NonNull::new_unchecked(ptr.as_ptr()), layout)
124-
});
162+
/// Initializes the heap
163+
///
164+
/// This function must be called BEFORE you run any code that makes use of the
165+
/// allocator.
166+
///
167+
/// `start_addr` is the address where the heap will be located.
168+
///
169+
/// `size` is the size of the heap in bytes.
170+
///
171+
/// Note that:
172+
///
173+
/// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
174+
/// be the smallest address used.
175+
///
176+
/// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
177+
/// `0x1000` and `size` is `0x30000` then the allocator won't use memory at
178+
/// addresses `0x31000` and larger.
179+
///
180+
/// # Safety
181+
///
182+
/// Obey these or Bad Stuff will happen.
183+
///
184+
/// - This function must be called exactly ONCE.
185+
/// - `size > 0`
186+
pub unsafe fn init(&self, start_addr: usize, size: usize) {
187+
critical_section::with(|cs| {
188+
let block: &[u8] = core::slice::from_raw_parts(start_addr as *const u8, size);
189+
self.heap
190+
.borrow(cs)
191+
.borrow_mut()
192+
.insert_free_block_ptr(block.into());
193+
});
194+
}
195+
}
196+
197+
unsafe impl GlobalAlloc for Heap {
198+
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
199+
critical_section::with(|cs| {
200+
self.heap
201+
.borrow(cs)
202+
.borrow_mut()
203+
.allocate(layout)
204+
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
205+
})
206+
}
207+
208+
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
209+
critical_section::with(|cs| {
210+
self.heap
211+
.borrow(cs)
212+
.borrow_mut()
213+
.deallocate(NonNull::new_unchecked(ptr), layout.align())
214+
});
215+
}
216+
}
217+
218+
#[cfg(feature = "allocator_api")]
219+
mod allocator_api {
220+
use super::*;
221+
use core::{
222+
alloc::{AllocError, Allocator, Layout},
223+
ptr::NonNull,
224+
};
225+
226+
unsafe impl Allocator for Heap {
227+
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
228+
match layout.size() {
229+
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
230+
size => critical_section::with(|cs| {
231+
self.heap
232+
.borrow(cs)
233+
.borrow_mut()
234+
.allocate(layout)
235+
.map_or(Err(AllocError), |allocation| {
236+
Ok(NonNull::slice_from_raw_parts(allocation, size))
237+
})
238+
}),
239+
}
240+
}
241+
242+
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
243+
if layout.size() != 0 {
244+
critical_section::with(|cs| {
245+
self.heap
246+
.borrow(cs)
247+
.borrow_mut()
248+
.deallocate(NonNull::new_unchecked(ptr.as_ptr()), layout.align())
249+
});
250+
}
125251
}
126252
}
127253
}

0 commit comments

Comments
 (0)