Skip to content

Commit 6e51fc7

Browse files
authored
Merge pull request #88 from rust-embedded/update-docs
Update documentation for TLSF and LLFF heaps
2 parents b52091a + 69187bf commit 6e51fc7

File tree

6 files changed

+256
-237
lines changed

6 files changed

+256
-237
lines changed

CHANGELOG.md

+10
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,16 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](http://keepachangelog.com/)
66
and this project adheres to [Semantic Versioning](http://semver.org/).
77

8+
## [Unreleased]
9+
10+
### Added
11+
12+
- Added a Two-Level Segregated Fit heap with the `tlsf` feature.
13+
14+
### Changed
15+
16+
- The `Heap` struct has been renamed to `LlffHeap` and requires the `llff` feature.
17+
818
## [v0.5.1] - 2023-11-04
919

1020
### Added

Cargo.toml

-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,6 @@ linked_list_allocator = { version = "0.10.5", default-features = false, optional
3838
rlsf = { version = "0.2.1", default-features = false, optional = true }
3939
const-default = { version = "1.0.0", default-features = false, optional = true }
4040

41-
4241
[dev-dependencies]
4342
cortex-m = { version = "0.7.6", features = ["critical-section-single-core"] }
4443
cortex-m-rt = "0.7"

README.md

+9-1
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,17 @@ For a full usage example, see [`examples/global_alloc.rs`](https://github.com/ru
4848

4949
For this to work, an implementation of [`critical-section`](https://github.com/rust-embedded/critical-section) must be provided.
5050

51-
For simple use cases you may enable the `critical-section-single-core` feature in the [cortex-m](https://github.com/rust-embedded/cortex-m) crate.
51+
For simple use cases with Cortex-M CPUs you may enable the `critical-section-single-core` feature in the [cortex-m](https://github.com/rust-embedded/cortex-m) crate.
5252
Please refer to the documentation of [`critical-section`](https://docs.rs/critical-section) for further guidance.
5353

54+
## Features
55+
56+
There are two heaps available to use:
57+
58+
* `llff`: Provides `LlffHeap`, a Linked List First Fit heap.
59+
* `tlsf`: Provides `TlsfHeap`, a Two-Level Segregated Fit heap.
60+
61+
The best heap to use will depend on your application, see [#78](https://github.com/rust-embedded/embedded-alloc/pull/78) for more discussion.
5462

5563
## License
5664

src/lib.rs

+5-235
Original file line numberDiff line numberDiff line change
@@ -1,244 +1,14 @@
11
#![doc = include_str!("../README.md")]
22
#![no_std]
33
#![cfg_attr(feature = "allocator_api", feature(allocator_api, alloc_layout_extra))]
4-
5-
use core::alloc::{GlobalAlloc, Layout};
6-
use core::cell::RefCell;
7-
use core::ptr::{self, NonNull};
8-
9-
use critical_section::Mutex;
4+
#![warn(missing_docs)]
105

116
#[cfg(feature = "llff")]
12-
pub use llff::Heap as LlffHeap;
7+
mod llff;
138
#[cfg(feature = "tlsf")]
14-
pub use tlsf::Heap as TlsfHeap;
9+
mod tlsf;
1510

1611
#[cfg(feature = "llff")]
17-
mod llff {
18-
use super::*;
19-
use linked_list_allocator::Heap as LLHeap;
20-
21-
pub struct Heap {
22-
heap: Mutex<RefCell<LLHeap>>,
23-
}
24-
25-
impl Heap {
26-
/// Create a new UNINITIALIZED heap allocator
27-
///
28-
/// You must initialize this heap using the
29-
/// [`init`](Self::init) method before using the allocator.
30-
pub const fn empty() -> Heap {
31-
Heap {
32-
heap: Mutex::new(RefCell::new(LLHeap::empty())),
33-
}
34-
}
35-
36-
/// Initializes the heap
37-
///
38-
/// This function must be called BEFORE you run any code that makes use of the
39-
/// allocator.
40-
///
41-
/// `start_addr` is the address where the heap will be located.
42-
///
43-
/// `size` is the size of the heap in bytes.
44-
///
45-
/// Note that:
46-
///
47-
/// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
48-
/// be the smallest address used.
49-
///
50-
/// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
51-
/// `0x1000` and `size` is `0x30000` then the allocator won't use memory at
52-
/// addresses `0x31000` and larger.
53-
///
54-
/// # Safety
55-
///
56-
/// Obey these or Bad Stuff will happen.
57-
///
58-
/// - This function must be called exactly ONCE.
59-
/// - `size > 0`
60-
pub unsafe fn init(&self, start_addr: usize, size: usize) {
61-
critical_section::with(|cs| {
62-
self.heap
63-
.borrow(cs)
64-
.borrow_mut()
65-
.init(start_addr as *mut u8, size);
66-
});
67-
}
68-
69-
/// Returns an estimate of the amount of bytes in use.
70-
pub fn used(&self) -> usize {
71-
critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().used())
72-
}
73-
74-
/// Returns an estimate of the amount of bytes available.
75-
pub fn free(&self) -> usize {
76-
critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().free())
77-
}
78-
79-
fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
80-
critical_section::with(|cs| {
81-
self.heap
82-
.borrow(cs)
83-
.borrow_mut()
84-
.allocate_first_fit(layout)
85-
.ok()
86-
})
87-
}
88-
89-
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
90-
critical_section::with(|cs| {
91-
self.heap
92-
.borrow(cs)
93-
.borrow_mut()
94-
.deallocate(NonNull::new_unchecked(ptr), layout)
95-
});
96-
}
97-
}
98-
99-
unsafe impl GlobalAlloc for Heap {
100-
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
101-
self.alloc(layout)
102-
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
103-
}
104-
105-
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
106-
self.dealloc(ptr, layout);
107-
}
108-
}
109-
110-
#[cfg(feature = "allocator_api")]
111-
mod allocator_api {
112-
use super::*;
113-
use core::{
114-
alloc::{AllocError, Allocator, Layout},
115-
ptr::NonNull,
116-
};
117-
118-
unsafe impl Allocator for Heap {
119-
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
120-
match layout.size() {
121-
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
122-
size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
123-
Ok(NonNull::slice_from_raw_parts(allocation, size))
124-
}),
125-
}
126-
}
127-
128-
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
129-
if layout.size() != 0 {
130-
self.dealloc(ptr.as_ptr(), layout);
131-
}
132-
}
133-
}
134-
}
135-
}
136-
12+
pub use llff::Heap as LlffHeap;
13713
#[cfg(feature = "tlsf")]
138-
mod tlsf {
139-
use super::*;
140-
use const_default::ConstDefault;
141-
use rlsf::Tlsf;
142-
143-
type TlsfHeap = Tlsf<'static, usize, usize, { usize::BITS as usize }, { usize::BITS as usize }>;
144-
145-
pub struct Heap {
146-
heap: Mutex<RefCell<TlsfHeap>>,
147-
}
148-
149-
impl Heap {
150-
/// Create a new UNINITIALIZED heap allocator
151-
///
152-
/// You must initialize this heap using the
153-
/// [`init`](Self::init) method before using the allocator.
154-
pub const fn empty() -> Heap {
155-
Heap {
156-
heap: Mutex::new(RefCell::new(ConstDefault::DEFAULT)),
157-
}
158-
}
159-
160-
/// Initializes the heap
161-
///
162-
/// This function must be called BEFORE you run any code that makes use of the
163-
/// allocator.
164-
///
165-
/// `start_addr` is the address where the heap will be located.
166-
///
167-
/// `size` is the size of the heap in bytes.
168-
///
169-
/// Note that:
170-
///
171-
/// - The heap grows "upwards", towards larger addresses. Thus `start_addr` will
172-
/// be the smallest address used.
173-
///
174-
/// - The largest address used is `start_addr + size - 1`, so if `start_addr` is
175-
/// `0x1000` and `size` is `0x30000` then the allocator won't use memory at
176-
/// addresses `0x31000` and larger.
177-
///
178-
/// # Safety
179-
///
180-
/// Obey these or Bad Stuff will happen.
181-
///
182-
/// - This function must be called exactly ONCE.
183-
/// - `size > 0`
184-
pub unsafe fn init(&self, start_addr: usize, size: usize) {
185-
critical_section::with(|cs| {
186-
let block: &[u8] = core::slice::from_raw_parts(start_addr as *const u8, size);
187-
self.heap
188-
.borrow(cs)
189-
.borrow_mut()
190-
.insert_free_block_ptr(block.into());
191-
});
192-
}
193-
194-
fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
195-
critical_section::with(|cs| self.heap.borrow(cs).borrow_mut().allocate(layout))
196-
}
197-
198-
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
199-
critical_section::with(|cs| {
200-
self.heap
201-
.borrow(cs)
202-
.borrow_mut()
203-
.deallocate(NonNull::new_unchecked(ptr), layout.align())
204-
})
205-
}
206-
}
207-
208-
unsafe impl GlobalAlloc for Heap {
209-
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
210-
self.alloc(layout)
211-
.map_or(ptr::null_mut(), |allocation| allocation.as_ptr())
212-
}
213-
214-
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
215-
self.dealloc(ptr, layout)
216-
}
217-
}
218-
219-
#[cfg(feature = "allocator_api")]
220-
mod allocator_api {
221-
use super::*;
222-
use core::{
223-
alloc::{AllocError, Allocator, Layout},
224-
ptr::NonNull,
225-
};
226-
227-
unsafe impl Allocator for Heap {
228-
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
229-
match layout.size() {
230-
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
231-
size => self.alloc(layout).map_or(Err(AllocError), |allocation| {
232-
Ok(NonNull::slice_from_raw_parts(allocation, size))
233-
}),
234-
}
235-
}
236-
237-
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
238-
if layout.size() != 0 {
239-
self.dealloc(ptr.as_ptr(), layout);
240-
}
241-
}
242-
}
243-
}
244-
}
14+
pub use tlsf::Heap as TlsfHeap;

0 commit comments

Comments
 (0)