alloc/alloc.rs
1//! Memory allocation APIs
2
3#![stable(feature = "alloc_module", since = "1.28.0")]
4
5#[stable(feature = "alloc_module", since = "1.28.0")]
6#[doc(inline)]
7pub use core::alloc::*;
8use core::hint;
9use core::ptr::{self, NonNull};
10
11unsafe extern "Rust" {
12 // These are the magic symbols to call the global allocator. rustc generates
13 // them to call the global allocator if there is a `#[global_allocator]` attribute
14 // (the code expanding that attribute macro generates those functions), or to call
15 // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`)
16 // otherwise.
17 #[rustc_allocator]
18 #[rustc_nounwind]
19 #[rustc_std_internal_symbol]
20 #[rustc_allocator_zeroed_variant = "__rust_alloc_zeroed"]
21 fn __rust_alloc(size: usize, align: usize) -> *mut u8;
22 #[rustc_deallocator]
23 #[rustc_nounwind]
24 #[rustc_std_internal_symbol]
25 fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
26 #[rustc_reallocator]
27 #[rustc_nounwind]
28 #[rustc_std_internal_symbol]
29 fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
30 #[rustc_allocator_zeroed]
31 #[rustc_nounwind]
32 #[rustc_std_internal_symbol]
33 fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
34
35 #[rustc_nounwind]
36 #[rustc_std_internal_symbol]
37 fn __rust_no_alloc_shim_is_unstable_v2();
38}
39
40/// The global memory allocator.
41///
42/// This type implements the [`Allocator`] trait by forwarding calls
43/// to the allocator registered with the `#[global_allocator]` attribute
44/// if there is one, or the `std` crate’s default.
45///
46/// Note: while this type is unstable, the functionality it provides can be
47/// accessed through the [free functions in `alloc`](self#functions).
48#[unstable(feature = "allocator_api", issue = "32838")]
49#[derive(Copy, Clone, Default, Debug)]
50// the compiler needs to know when a Box uses the global allocator vs a custom one
51#[lang = "global_alloc_ty"]
52pub struct Global;
53
54/// Allocates memory with the global allocator.
55///
56/// This function forwards calls to the [`GlobalAlloc::alloc`] method
57/// of the allocator registered with the `#[global_allocator]` attribute
58/// if there is one, or the `std` crate’s default.
59///
60/// This function is expected to be deprecated in favor of the `allocate` method
61/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
62///
63/// # Safety
64///
65/// See [`GlobalAlloc::alloc`].
66///
67/// # Examples
68///
69/// ```
70/// use std::alloc::{alloc, dealloc, handle_alloc_error, Layout};
71///
72/// unsafe {
73/// let layout = Layout::new::<u16>();
74/// let ptr = alloc(layout);
75/// if ptr.is_null() {
76/// handle_alloc_error(layout);
77/// }
78///
79/// *(ptr as *mut u16) = 42;
80/// assert_eq!(*(ptr as *mut u16), 42);
81///
82/// dealloc(ptr, layout);
83/// }
84/// ```
85#[stable(feature = "global_alloc", since = "1.28.0")]
86#[must_use = "losing the pointer will leak memory"]
87#[inline]
88#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
89pub unsafe fn alloc(layout: Layout) -> *mut u8 {
90 unsafe {
91 // Make sure we don't accidentally allow omitting the allocator shim in
92 // stable code until it is actually stabilized.
93 __rust_no_alloc_shim_is_unstable_v2();
94
95 __rust_alloc(layout.size(), layout.align())
96 }
97}
98
99/// Deallocates memory with the global allocator.
100///
101/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
102/// of the allocator registered with the `#[global_allocator]` attribute
103/// if there is one, or the `std` crate’s default.
104///
105/// This function is expected to be deprecated in favor of the `deallocate` method
106/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
107///
108/// # Safety
109///
110/// See [`GlobalAlloc::dealloc`].
111#[stable(feature = "global_alloc", since = "1.28.0")]
112#[inline]
113#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
114pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
115 unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
116}
117
118/// Reallocates memory with the global allocator.
119///
120/// This function forwards calls to the [`GlobalAlloc::realloc`] method
121/// of the allocator registered with the `#[global_allocator]` attribute
122/// if there is one, or the `std` crate’s default.
123///
124/// This function is expected to be deprecated in favor of the `grow` and `shrink` methods
125/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
126///
127/// # Safety
128///
129/// See [`GlobalAlloc::realloc`].
130#[stable(feature = "global_alloc", since = "1.28.0")]
131#[must_use = "losing the pointer will leak memory"]
132#[inline]
133#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
134pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
135 unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
136}
137
138/// Allocates zero-initialized memory with the global allocator.
139///
140/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
141/// of the allocator registered with the `#[global_allocator]` attribute
142/// if there is one, or the `std` crate’s default.
143///
144/// This function is expected to be deprecated in favor of the `allocate_zeroed` method
145/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
146///
147/// # Safety
148///
149/// See [`GlobalAlloc::alloc_zeroed`].
150///
151/// # Examples
152///
153/// ```
154/// use std::alloc::{alloc_zeroed, dealloc, handle_alloc_error, Layout};
155///
156/// unsafe {
157/// let layout = Layout::new::<u16>();
158/// let ptr = alloc_zeroed(layout);
159/// if ptr.is_null() {
160/// handle_alloc_error(layout);
161/// }
162///
163/// assert_eq!(*(ptr as *mut u16), 0);
164///
165/// dealloc(ptr, layout);
166/// }
167/// ```
168#[stable(feature = "global_alloc", since = "1.28.0")]
169#[must_use = "losing the pointer will leak memory"]
170#[inline]
171#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
172pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
173 unsafe {
174 // Make sure we don't accidentally allow omitting the allocator shim in
175 // stable code until it is actually stabilized.
176 __rust_no_alloc_shim_is_unstable_v2();
177
178 __rust_alloc_zeroed(layout.size(), layout.align())
179 }
180}
181
182impl Global {
183 #[inline]
184 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
185 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
186 match layout.size() {
187 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
188 // SAFETY: `layout` is non-zero in size,
189 size => unsafe {
190 let raw_ptr = if zeroed { alloc_zeroed(layout) } else { alloc(layout) };
191 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
192 Ok(NonNull::slice_from_raw_parts(ptr, size))
193 },
194 }
195 }
196
197 // SAFETY: Same as `Allocator::grow`
198 #[inline]
199 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
200 unsafe fn grow_impl(
201 &self,
202 ptr: NonNull<u8>,
203 old_layout: Layout,
204 new_layout: Layout,
205 zeroed: bool,
206 ) -> Result<NonNull<[u8]>, AllocError> {
207 debug_assert!(
208 new_layout.size() >= old_layout.size(),
209 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
210 );
211
212 match old_layout.size() {
213 0 => self.alloc_impl(new_layout, zeroed),
214
215 // SAFETY: `new_size` is non-zero as `old_size` is greater than or equal to `new_size`
216 // as required by safety conditions. Other conditions must be upheld by the caller
217 old_size if old_layout.align() == new_layout.align() => unsafe {
218 let new_size = new_layout.size();
219
220 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
221 hint::assert_unchecked(new_size >= old_layout.size());
222
223 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
224 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
225 if zeroed {
226 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
227 }
228 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
229 },
230
231 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
232 // both the old and new memory allocation are valid for reads and writes for `old_size`
233 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
234 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
235 // for `dealloc` must be upheld by the caller.
236 old_size => unsafe {
237 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
238 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
239 self.deallocate(ptr, old_layout);
240 Ok(new_ptr)
241 },
242 }
243 }
244}
245
246#[unstable(feature = "allocator_api", issue = "32838")]
247unsafe impl Allocator for Global {
248 #[inline]
249 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
250 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
251 self.alloc_impl(layout, false)
252 }
253
254 #[inline]
255 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
256 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
257 self.alloc_impl(layout, true)
258 }
259
260 #[inline]
261 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
262 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
263 if layout.size() != 0 {
264 // SAFETY:
265 // * We have checked that `layout` is non-zero in size.
266 // * The caller is obligated to provide a layout that "fits", and in this case,
267 // "fit" always means a layout that is equal to the original, because our
268 // `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
269 // allocation than requested.
270 // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
271 // safety documentation.
272 unsafe { dealloc(ptr.as_ptr(), layout) }
273 }
274 }
275
276 #[inline]
277 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
278 unsafe fn grow(
279 &self,
280 ptr: NonNull<u8>,
281 old_layout: Layout,
282 new_layout: Layout,
283 ) -> Result<NonNull<[u8]>, AllocError> {
284 // SAFETY: all conditions must be upheld by the caller
285 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
286 }
287
288 #[inline]
289 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
290 unsafe fn grow_zeroed(
291 &self,
292 ptr: NonNull<u8>,
293 old_layout: Layout,
294 new_layout: Layout,
295 ) -> Result<NonNull<[u8]>, AllocError> {
296 // SAFETY: all conditions must be upheld by the caller
297 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
298 }
299
300 #[inline]
301 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
302 unsafe fn shrink(
303 &self,
304 ptr: NonNull<u8>,
305 old_layout: Layout,
306 new_layout: Layout,
307 ) -> Result<NonNull<[u8]>, AllocError> {
308 debug_assert!(
309 new_layout.size() <= old_layout.size(),
310 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
311 );
312
313 match new_layout.size() {
314 // SAFETY: conditions must be upheld by the caller
315 0 => unsafe {
316 self.deallocate(ptr, old_layout);
317 Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
318 },
319
320 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
321 new_size if old_layout.align() == new_layout.align() => unsafe {
322 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
323 hint::assert_unchecked(new_size <= old_layout.size());
324
325 let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
326 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
327 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
328 },
329
330 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
331 // both the old and new memory allocation are valid for reads and writes for `new_size`
332 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
333 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
334 // for `dealloc` must be upheld by the caller.
335 new_size => unsafe {
336 let new_ptr = self.allocate(new_layout)?;
337 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
338 self.deallocate(ptr, old_layout);
339 Ok(new_ptr)
340 },
341 }
342 }
343}
344
345/// The allocator for `Box`.
346#[cfg(not(no_global_oom_handling))]
347#[lang = "exchange_malloc"]
348#[inline]
349#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
350unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
351 let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
352 match Global.allocate(layout) {
353 Ok(ptr) => ptr.as_mut_ptr(),
354 Err(_) => handle_alloc_error(layout),
355 }
356}
357
358// # Allocation error handler
359
360#[cfg(not(no_global_oom_handling))]
361unsafe extern "Rust" {
362 // This is the magic symbol to call the global alloc error handler. rustc generates
363 // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the
364 // default implementations below (`__rdl_oom`) otherwise.
365 #[rustc_std_internal_symbol]
366 fn __rust_alloc_error_handler(size: usize, align: usize) -> !;
367}
368
369/// Signals a memory allocation error.
370///
371/// Callers of memory allocation APIs wishing to cease execution
372/// in response to an allocation error are encouraged to call this function,
373/// rather than directly invoking [`panic!`] or similar.
374///
375/// This function is guaranteed to diverge (not return normally with a value), but depending on
376/// global configuration, it may either panic (resulting in unwinding or aborting as per
377/// configuration for all panics), or abort the process (with no unwinding).
378///
379/// The default behavior is:
380///
381/// * If the binary links against `std` (typically the case), then
382/// print a message to standard error and abort the process.
383/// This behavior can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
384/// Future versions of Rust may panic by default instead.
385///
386/// * If the binary does not link against `std` (all of its crates are marked
387/// [`#![no_std]`][no_std]), then call [`panic!`] with a message.
388/// [The panic handler] applies as to any panic.
389///
390/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
391/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
392/// [The panic handler]: https://doc.rust-lang.org/reference/runtime.html#the-panic_handler-attribute
393/// [no_std]: https://doc.rust-lang.org/reference/names/preludes.html#the-no_std-attribute
394#[stable(feature = "global_alloc", since = "1.28.0")]
395#[rustc_const_unstable(feature = "const_alloc_error", issue = "92523")]
396#[cfg(not(no_global_oom_handling))]
397#[cold]
398#[optimize(size)]
399pub const fn handle_alloc_error(layout: Layout) -> ! {
400 const fn ct_error(_: Layout) -> ! {
401 panic!("allocation failed");
402 }
403
404 #[inline]
405 fn rt_error(layout: Layout) -> ! {
406 unsafe {
407 __rust_alloc_error_handler(layout.size(), layout.align());
408 }
409 }
410
411 #[cfg(not(feature = "panic_immediate_abort"))]
412 {
413 core::intrinsics::const_eval_select((layout,), ct_error, rt_error)
414 }
415
416 #[cfg(feature = "panic_immediate_abort")]
417 ct_error(layout)
418}
419
420#[cfg(not(no_global_oom_handling))]
421#[doc(hidden)]
422#[allow(unused_attributes)]
423#[unstable(feature = "alloc_internals", issue = "none")]
424pub mod __alloc_error_handler {
425 // called via generated `__rust_alloc_error_handler` if there is no
426 // `#[alloc_error_handler]`.
427 #[rustc_std_internal_symbol]
428 pub unsafe fn __rdl_oom(size: usize, _align: usize) -> ! {
429 unsafe extern "Rust" {
430 // This symbol is emitted by rustc next to __rust_alloc_error_handler.
431 // Its value depends on the -Zoom={panic,abort} compiler option.
432 #[rustc_std_internal_symbol]
433 fn __rust_alloc_error_handler_should_panic_v2() -> u8;
434 }
435
436 if unsafe { __rust_alloc_error_handler_should_panic_v2() != 0 } {
437 panic!("memory allocation of {size} bytes failed")
438 } else {
439 core::panicking::panic_nounwind_fmt(
440 format_args!("memory allocation of {size} bytes failed"),
441 /* force_no_backtrace */ false,
442 )
443 }
444 }
445}