alloc/
alloc.rs

1//! Memory allocation APIs
2
3#![stable(feature = "alloc_module", since = "1.28.0")]
4
5#[stable(feature = "alloc_module", since = "1.28.0")]
6#[doc(inline)]
7pub use core::alloc::*;
8#[cfg(not(test))]
9use core::hint;
10#[cfg(not(test))]
11use core::ptr::{self, NonNull};
12
13unsafe extern "Rust" {
14    // These are the magic symbols to call the global allocator. rustc generates
15    // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute
16    // (the code expanding that attribute macro generates those functions), or to call
17    // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
18    // otherwise.
19    // The rustc fork of LLVM 14 and earlier also special-cases these function names to be able to optimize them
20    // like `malloc`, `realloc`, and `free`, respectively.
21    #[rustc_allocator]
22    #[rustc_nounwind]
23    fn __rust_alloc(size: usize, align: usize) -> *mut u8;
24    #[rustc_deallocator]
25    #[rustc_nounwind]
26    fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
27    #[rustc_reallocator]
28    #[rustc_nounwind]
29    fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
30    #[rustc_allocator_zeroed]
31    #[rustc_nounwind]
32    fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
33
34    static __rust_no_alloc_shim_is_unstable: u8;
35}
36
37/// The global memory allocator.
38///
39/// This type implements the [`Allocator`] trait by forwarding calls
40/// to the allocator registered with the `#[global_allocator]` attribute
41/// if there is one, or the `std` crate’s default.
42///
43/// Note: while this type is unstable, the functionality it provides can be
44/// accessed through the [free functions in `alloc`](self#functions).
45#[unstable(feature = "allocator_api", issue = "32838")]
46#[derive(Copy, Clone, Default, Debug)]
47#[cfg(not(test))]
48// the compiler needs to know when a Box uses the global allocator vs a custom one
49#[lang = "global_alloc_ty"]
50pub struct Global;
51
52#[cfg(test)]
53pub use std::alloc::Global;
54
55/// Allocates memory with the global allocator.
56///
57/// This function forwards calls to the [`GlobalAlloc::alloc`] method
58/// of the allocator registered with the `#[global_allocator]` attribute
59/// if there is one, or the `std` crate’s default.
60///
61/// This function is expected to be deprecated in favor of the `allocate` method
62/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
63///
64/// # Safety
65///
66/// See [`GlobalAlloc::alloc`].
67///
68/// # Examples
69///
70/// ```
71/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
72///
73/// unsafe {
74///     let layout = Layout::new::<u16>();
75///     let ptr = alloc(layout);
76///     if ptr.is_null() {
77///         handle_alloc_error(layout);
78///     }
79///
80///     *(ptr as *mut u16) = 42;
81///     assert_eq!(*(ptr as *mut u16), 42);
82///
83///     dealloc(ptr, layout);
84/// }
85/// ```
86#[stable(feature = "global_alloc", since = "1.28.0")]
87#[must_use = "losing the pointer will leak memory"]
88#[inline]
89#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
90pub unsafe fn alloc(layout: Layout) -> *mut u8 {
91    unsafe {
92        // Make sure we don't accidentally allow omitting the allocator shim in
93        // stable code until it is actually stabilized.
94        core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
95
96        __rust_alloc(layout.size(), layout.align())
97    }
98}
99
100/// Deallocates memory with the global allocator.
101///
102/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
103/// of the allocator registered with the `#[global_allocator]` attribute
104/// if there is one, or the `std` crate’s default.
105///
106/// This function is expected to be deprecated in favor of the `deallocate` method
107/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
108///
109/// # Safety
110///
111/// See [`GlobalAlloc::dealloc`].
112#[stable(feature = "global_alloc", since = "1.28.0")]
113#[inline]
114#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
115pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
116    unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
117}
118
119/// Reallocates memory with the global allocator.
120///
121/// This function forwards calls to the [`GlobalAlloc::realloc`] method
122/// of the allocator registered with the `#[global_allocator]` attribute
123/// if there is one, or the `std` crate’s default.
124///
125/// This function is expected to be deprecated in favor of the `grow` and `shrink` methods
126/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
127///
128/// # Safety
129///
130/// See [`GlobalAlloc::realloc`].
131#[stable(feature = "global_alloc", since = "1.28.0")]
132#[must_use = "losing the pointer will leak memory"]
133#[inline]
134#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
135pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
136    unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
137}
138
139/// Allocates zero-initialized memory with the global allocator.
140///
141/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
142/// of the allocator registered with the `#[global_allocator]` attribute
143/// if there is one, or the `std` crate’s default.
144///
145/// This function is expected to be deprecated in favor of the `allocate_zeroed` method
146/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
147///
148/// # Safety
149///
150/// See [`GlobalAlloc::alloc_zeroed`].
151///
152/// # Examples
153///
154/// ```
155/// use std::alloc::{alloc_zeroed, dealloc, handle_alloc_error, Layout};
156///
157/// unsafe {
158///     let layout = Layout::new::<u16>();
159///     let ptr = alloc_zeroed(layout);
160///     if ptr.is_null() {
161///         handle_alloc_error(layout);
162///     }
163///
164///     assert_eq!(*(ptr as *mut u16), 0);
165///
166///     dealloc(ptr, layout);
167/// }
168/// ```
169#[stable(feature = "global_alloc", since = "1.28.0")]
170#[must_use = "losing the pointer will leak memory"]
171#[inline]
172#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
173pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
174    unsafe {
175        // Make sure we don't accidentally allow omitting the allocator shim in
176        // stable code until it is actually stabilized.
177        core::ptr::read_volatile(&__rust_no_alloc_shim_is_unstable);
178
179        __rust_alloc_zeroed(layout.size(), layout.align())
180    }
181}
182
183#[cfg(not(test))]
184impl Global {
185    #[inline]
186    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
187    fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
188        match layout.size() {
189            0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
190            // SAFETY: `layout` is non-zero in size,
191            size => unsafe {
192                let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
193                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
194                Ok(NonNull::slice_from_raw_parts(ptr, size))
195            },
196        }
197    }
198
199    // SAFETY: Same as `Allocator::grow`
200    #[inline]
201    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
202    unsafe fn grow_impl(
203        &self,
204        ptr: NonNull<u8>,
205        old_layout: Layout,
206        new_layout: Layout,
207        zeroed: bool,
208    ) -> Result<NonNull<[u8]>, AllocError> {
209        debug_assert!(
210            new_layout.size() >= old_layout.size(),
211            "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
212        );
213
214        match old_layout.size() {
215            0 => self.alloc_impl(new_layout, zeroed),
216
217            // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
218            // as required by safety conditions. Other conditions must be upheld by the caller
219            old_size if old_layout.align() == new_layout.align() => unsafe {
220                let new_size = new_layout.size();
221
222                // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
223                hint::assert_unchecked(new_size >= old_layout.size());
224
225                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
226                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
227                if zeroed {
228                    raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
229                }
230                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
231            },
232
233            // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
234            // both the old and new memory allocation are valid for reads and writes for `old_size`
235            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
236            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
237            // for `dealloc` must be upheld by the caller.
238            old_size => unsafe {
239                let new_ptr = self.alloc_impl(new_layout, zeroed)?;
240                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
241                self.deallocate(ptr, old_layout);
242                Ok(new_ptr)
243            },
244        }
245    }
246}
247
248#[unstable(feature = "allocator_api", issue = "32838")]
249#[cfg(not(test))]
250unsafe impl Allocator for Global {
251    #[inline]
252    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
253    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
254        self.alloc_impl(layout, false)
255    }
256
257    #[inline]
258    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
259    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
260        self.alloc_impl(layout, true)
261    }
262
263    #[inline]
264    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
265    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
266        if layout.size() != 0 {
267            // SAFETY:
268            // * We have checked that `layout` is non-zero in size.
269            // * The caller is obligated to provide a layout that "fits", and in this case,
270            //   "fit" always means a layout that is equal to the original, because our
271            //   `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
272            //   allocation than requested.
273            // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
274            //   safety documentation.
275            unsafe { dealloc(ptr.as_ptr(), layout) }
276        }
277    }
278
279    #[inline]
280    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
281    unsafe fn grow(
282        &self,
283        ptr: NonNull<u8>,
284        old_layout: Layout,
285        new_layout: Layout,
286    ) -> Result<NonNull<[u8]>, AllocError> {
287        // SAFETY: all conditions must be upheld by the caller
288        unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
289    }
290
291    #[inline]
292    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
293    unsafe fn grow_zeroed(
294        &self,
295        ptr: NonNull<u8>,
296        old_layout: Layout,
297        new_layout: Layout,
298    ) -> Result<NonNull<[u8]>, AllocError> {
299        // SAFETY: all conditions must be upheld by the caller
300        unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
301    }
302
303    #[inline]
304    #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
305    unsafe fn shrink(
306        &self,
307        ptr: NonNull<u8>,
308        old_layout: Layout,
309        new_layout: Layout,
310    ) -> Result<NonNull<[u8]>, AllocError> {
311        debug_assert!(
312            new_layout.size() <= old_layout.size(),
313            "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
314        );
315
316        match new_layout.size() {
317            // SAFETY: conditions must be upheld by the caller
318            0 => unsafe {
319                self.deallocate(ptr, old_layout);
320                Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
321            },
322
323            // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
324            new_size if old_layout.align() == new_layout.align() => unsafe {
325                // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
326                hint::assert_unchecked(new_size <= old_layout.size());
327
328                let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
329                let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
330                Ok(NonNull::slice_from_raw_parts(ptr, new_size))
331            },
332
333            // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
334            // both the old and new memory allocation are valid for reads and writes for `new_size`
335            // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
336            // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
337            // for `dealloc` must be upheld by the caller.
338            new_size => unsafe {
339                let new_ptr = self.allocate(new_layout)?;
340                ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
341                self.deallocate(ptr, old_layout);
342                Ok(new_ptr)
343            },
344        }
345    }
346}
347
348/// The allocator for `Box`.
349#[cfg(all(not(no_global_oom_handling), not(test)))]
350#[lang = "exchange_malloc"]
351#[inline]
352#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
353unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
354    let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
355    match Global.allocate(layout) {
356        Ok(ptr) => ptr.as_mut_ptr(),
357        Err(_) => handle_alloc_error(layout),
358    }
359}
360
361// # Allocation error handler
362
363#[cfg(not(no_global_oom_handling))]
364unsafe extern "Rust" {
365    // This is the magic symbol to call the global alloc error handler. rustc generates
366    // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
367    // default implementations below (`__rdl_oom`) otherwise.
368    fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
369}
370
371/// Signals a memory allocation error.
372///
373/// Callers of memory allocation APIs wishing to cease execution
374/// in response to an allocation error are encouraged to call this function,
375/// rather than directly invoking [`panic!`] or similar.
376///
377/// This function is guaranteed to diverge (not return normally with a value), but depending on
378/// global configuration, it may either panic (resulting in unwinding or aborting as per
379/// configuration for all panics), or abort the process (with no unwinding).
380///
381/// The default behavior is:
382///
383///  * If the binary links against `std` (typically the case), then
384///   print a message to standard error and abort the process.
385///   This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
386///   Future versions of Rust may panic by default instead.
387///
388/// * If the binary does not link against `std` (all of its crates are marked
389///   [`#![no_std]`][no_std]), then call [`panic!`] with a message.
390///   [The panic handler] applies as to any panic.
391///
392/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
393/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
394/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
395/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
396#[stable(feature = "global_alloc", since = "1.28.0")]
397#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
398#[cfg(all(not(no_global_oom_handling), not(test)))]
399#[cold]
400#[optimize(size)]
401pub const fn handle_alloc_error(layout: Layout) -> ! {
402    const fn ct_error(_: Layout) -> ! {
403        panic!("allocation failed");
404    }
405
406    #[inline]
407    fn rt_error(layout: Layout) -> ! {
408        unsafe {
409            __rust_alloc_error_handler(layout.size(), layout.align());
410        }
411    }
412
413    #[cfg(not(feature = "panic_immediate_abort"))]
414    {
415        core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
416    }
417
418    #[cfg(feature = "panic_immediate_abort")]
419    ct_error(layout)
420}
421
422// For alloc test `std::alloc::handle_alloc_error` can be used directly.
423#[cfg(all(not(no_global_oom_handling), test))]
424pub use std::alloc::handle_alloc_error;
425
426#[cfg(all(not(no_global_oom_handling), not(test)))]
427#[doc(hidden)]
428#[allow(unused_attributes)]
429#[unstable(feature = "alloc_internals", issue = "none")]
430pub mod __alloc_error_handler {
431    // called via generated `__rust_alloc_error_handler` if there is no
432    // `#[alloc_error_handler]`.
433    #[rustc_std_internal_symbol]
434    pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! {
435        unsafe extern "Rust" {
436            // This symbol is emitted by rustc next to __rust_alloc_error_handler.
437            // Its value depends on the -Zoom={panic,abort} compiler option.
438            static __rust_alloc_error_handler_should_panic: u8;
439        }
440
441        if unsafe { __rust_alloc_error_handler_should_panic != 0 } {
442            panic!("memory allocation of {size} bytes failed")
443        } else {
444            core::panicking::panic_nounwind_fmt(
445                format_args!("memory allocation of {size} bytes failed"),
446                /* force_no_backtrace */ false,
447            )
448        }
449    }
450}