allocator.rs 3.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. /*
  2. * Copyright (c) 2006-2025, RT-Thread Development Team
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. *
  6. * Change Logs:
  7. * Date Author Notes
  8. * 2025-10-10 foxglove RT-Thread GlobalAlloc implementation
  9. */
  10. use crate::api::mem::{mem_alloc, mem_alloc_aligned, mem_free, mem_free_aligned, mem_realloc};
  11. use core::alloc::{GlobalAlloc, Layout};
  12. use crate::panic::panic_on_atomic_context;
  13. use core::ptr;
  14. use core::ffi::c_void;
  15. pub struct RttAlloc;
  16. unsafe impl GlobalAlloc for RttAlloc {
  17. unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
  18. panic_on_atomic_context("alloc");
  19. let size = layout.size();
  20. let align = layout.align();
  21. /* Handle zero-sized allocations */
  22. if size == 0 {
  23. return ptr::null_mut();
  24. }
  25. /* Use aligned allocation if alignment is greater than default
  26. * RT-Thread's default alignment is typically 4 or 8 bytes */
  27. if align > 8 {
  28. match mem_alloc_aligned(size, align) {
  29. Some(ptr) => ptr as *mut u8,
  30. None => ptr::null_mut(),
  31. }
  32. } else {
  33. match mem_alloc(size) {
  34. Some(ptr) => ptr as *mut u8,
  35. None => ptr::null_mut(),
  36. }
  37. }
  38. }
  39. unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
  40. panic_on_atomic_context("dealloc");
  41. if ptr.is_null() {
  42. return;
  43. }
  44. let align = layout.align();
  45. /* Use aligned deallocation if the original allocation was aligned */
  46. if align > 8 {
  47. mem_free_aligned(ptr as *mut c_void);
  48. } else {
  49. mem_free(ptr as *mut c_void);
  50. }
  51. }
  52. unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
  53. panic_on_atomic_context("realloc");
  54. /* Handle zero-sized new allocation */
  55. if new_size == 0 {
  56. self.dealloc(ptr, layout);
  57. return ptr::null_mut();
  58. }
  59. /* Handle null pointer (equivalent to alloc) */
  60. if ptr.is_null() {
  61. let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
  62. return self.alloc(new_layout);
  63. }
  64. /* For aligned allocations, we need to handle realloc manually
  65. * since RT-Thread's rt_realloc may not preserve alignment */
  66. let align = layout.align();
  67. if align > 8 {
  68. let new_ptr = match mem_alloc_aligned(new_size, align) {
  69. Some(ptr) => ptr as *mut u8,
  70. None => return ptr::null_mut(),
  71. };
  72. /* Copy data from old to new */
  73. let copy_size = core::cmp::min(layout.size(), new_size);
  74. ptr::copy_nonoverlapping(ptr, new_ptr, copy_size);
  75. mem_free_aligned(ptr as *mut c_void);
  76. new_ptr
  77. } else {
  78. match mem_realloc(ptr as *mut c_void, new_size) {
  79. Some(new_ptr) => new_ptr as *mut u8,
  80. None => ptr::null_mut(),
  81. }
  82. }
  83. }
  84. }