Skip to main content

core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3#[cfg(not(feature = "ferrocene_subset"))]
4use super::{from_raw_parts, memchr};
5#[cfg(not(feature = "ferrocene_subset"))]
6use crate::ascii;
7#[cfg(not(feature = "ferrocene_subset"))]
8use crate::cmp::{self, BytewiseEq, Ordering};
9#[cfg(not(feature = "ferrocene_subset"))]
10use crate::intrinsics::compare_bytes;
11#[cfg(not(feature = "ferrocene_subset"))]
12use crate::num::NonZero;
13#[cfg(not(feature = "ferrocene_subset"))]
14use crate::ops::ControlFlow;
15
16#[stable(feature = "rust1", since = "1.0.0")]
17#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
18impl<T, U> const PartialEq<[U]> for [T]
19where
20    T: [const] PartialEq<U>,
21{
22    #[inline]
23    fn eq(&self, other: &[U]) -> bool {
24        SlicePartialEq::equal(self, other)
25    }
26}
27
28#[stable(feature = "rust1", since = "1.0.0")]
29#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
30impl<T: [const] Eq> const Eq for [T] {}
31
32/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
33#[stable(feature = "rust1", since = "1.0.0")]
34#[cfg(not(feature = "ferrocene_subset"))]
35impl<T: Ord> Ord for [T] {
36    fn cmp(&self, other: &[T]) -> Ordering {
37        SliceOrd::compare(self, other)
38    }
39}
40
41#[inline]
42#[cfg(not(feature = "ferrocene_subset"))]
43const fn as_underlying(x: ControlFlow<bool>) -> u8 {
44    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
45    // size (which isn't guaranteed but this is libcore). Because they have the same
46    // size, it's a niched implementation, which in one byte means there can't be
47    // any uninitialized memory. The callers then only check for `0` or `1` from this,
48    // which must necessarily match the `Break` variant, and we're fine no matter
49    // what ends up getting picked as the value representing `Continue(())`.
50    unsafe { crate::mem::transmute(x) }
51}
52
53/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
54#[stable(feature = "rust1", since = "1.0.0")]
55#[cfg(not(feature = "ferrocene_subset"))]
56impl<T: PartialOrd> PartialOrd for [T] {
57    #[inline]
58    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
59        SlicePartialOrd::partial_compare(self, other)
60    }
61    #[inline]
62    fn lt(&self, other: &Self) -> bool {
63        // This is certainly not the obvious way to implement these methods.
64        // Unfortunately, using anything that looks at the discriminant means that
65        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
66        // gets very distracted by that, ending up generating extraneous code.
67        // This should be changed to something simpler once either LLVM is smarter,
68        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
69        // niche discriminant checks in a way that doesn't trigger it.
70
71        as_underlying(self.__chaining_lt(other)) == 1
72    }
73    #[inline]
74    fn le(&self, other: &Self) -> bool {
75        as_underlying(self.__chaining_le(other)) != 0
76    }
77    #[inline]
78    fn gt(&self, other: &Self) -> bool {
79        as_underlying(self.__chaining_gt(other)) == 1
80    }
81    #[inline]
82    fn ge(&self, other: &Self) -> bool {
83        as_underlying(self.__chaining_ge(other)) != 0
84    }
85    #[inline]
86    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
87        SliceChain::chaining_lt(self, other)
88    }
89    #[inline]
90    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
91        SliceChain::chaining_le(self, other)
92    }
93    #[inline]
94    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
95        SliceChain::chaining_gt(self, other)
96    }
97    #[inline]
98    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
99        SliceChain::chaining_ge(self, other)
100    }
101}
102
103#[doc(hidden)]
104// intermediate trait for specialization of slice's PartialEq
105#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
106const trait SlicePartialEq<B> {
107    fn equal(&self, other: &[B]) -> bool;
108}
109
110// Generic slice equality
111#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
112impl<A, B> const SlicePartialEq<B> for [A]
113where
114    A: [const] PartialEq<B>,
115{
116    // It's not worth trying to inline the loops underneath here *in MIR*,
117    // and preventing it encourages more useful inlining upstream,
118    // such as in `<str as PartialEq>::eq`.
119    // The codegen backend can still inline it later if needed.
120    #[rustc_no_mir_inline]
121    default fn equal(&self, other: &[B]) -> bool {
122        if self.len() != other.len() {
123            return false;
124        }
125
126        // Implemented as explicit indexing rather
127        // than zipped iterators for performance reasons.
128        // See PR https://github.com/rust-lang/rust/pull/116846
129        // FIXME(const_hack): make this a `for idx in 0..self.len()` loop.
130        let mut idx = 0;
131        while idx < self.len() {
132            // bound checks are optimized away
133            if self[idx] != other[idx] {
134                return false;
135            }
136            idx += 1;
137        }
138
139        true
140    }
141}
142
143// When each element can be compared byte-wise, we can compare all the bytes
144// from the whole size in one call to the intrinsics.
145#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
146#[cfg(not(feature = "ferrocene_subset"))]
147impl<A, B> const SlicePartialEq<B> for [A]
148where
149    A: [const] BytewiseEq<B>,
150{
151    // This is usually a pretty good backend inlining candidate because the
152    // intrinsic tends to just be `memcmp`.  However, as of 2025-12 letting
153    // MIR inline this makes reuse worse because it means that, for example,
154    // `String::eq` doesn't inline, whereas by keeping this from inling all
155    // the wrappers until the call to this disappear.  If the heuristics have
156    // changed and this is no longer fruitful, though, please do remove it.
157    // In the mean time, it's fine to not inline it in MIR because the backend
158    // will still inline it if it things it's important to do so.
159    #[rustc_no_mir_inline]
160    #[inline]
161    fn equal(&self, other: &[B]) -> bool {
162        if self.len() != other.len() {
163            return false;
164        }
165
166        // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
167        // The two slices have been checked to have the same size above.
168        unsafe {
169            let size = size_of_val(self);
170            compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
171        }
172    }
173}
174
175#[doc(hidden)]
176#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
177#[cfg(not(feature = "ferrocene_subset"))]
178// intermediate trait for specialization of slice's PartialOrd
179const trait SlicePartialOrd: Sized {
180    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
181}
182
183#[doc(hidden)]
184#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
185#[cfg(not(feature = "ferrocene_subset"))]
186// intermediate trait for specialization of slice's PartialOrd chaining methods
187const trait SliceChain: Sized {
188    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
189    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
190    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
191    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
192}
193
194#[cfg(not(feature = "ferrocene_subset"))]
195type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
196
197#[cfg(not(feature = "ferrocene_subset"))]
198impl<A: PartialOrd> SlicePartialOrd for A {
199    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
200        let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) {
201            Some(Ordering::Equal) => ControlFlow::Continue(()),
202            non_eq => ControlFlow::Break(non_eq),
203        };
204        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
205        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
206        b
207    }
208}
209
210#[cfg(not(feature = "ferrocene_subset"))]
211impl<A: PartialOrd> SliceChain for A {
212    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
213        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
214    }
215    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
216        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
217    }
218    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
219        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
220    }
221    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
222        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
223    }
224}
225
226#[inline]
227#[cfg(not(feature = "ferrocene_subset"))]
228fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
229    left: &'l [A],
230    right: &'r [A],
231    elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow<B>,
232    len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C>,
233) -> ControlFlow<B, C> {
234    let l = cmp::min(left.len(), right.len());
235
236    // Slice to the loop iteration range to enable bound check
237    // elimination in the compiler
238    let lhs = &left[..l];
239    let rhs = &right[..l];
240
241    for i in 0..l {
242        elem_chain(&lhs[i], &rhs[i])?;
243    }
244
245    len_chain(&left.len(), &right.len())
246}
247
248// This is the impl that we would like to have. Unfortunately it's not sound.
249// See `partial_ord_slice.rs`.
250/*
251impl<A> SlicePartialOrd for A
252where
253    A: Ord,
254{
255    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
256        Some(SliceOrd::compare(left, right))
257    }
258}
259*/
260
261#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
262#[cfg(not(feature = "ferrocene_subset"))]
263impl<A: [const] AlwaysApplicableOrd> const SlicePartialOrd for A {
264    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
265        Some(SliceOrd::compare(left, right))
266    }
267}
268
269#[rustc_specialization_trait]
270#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
271#[cfg(not(feature = "ferrocene_subset"))]
272const trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {}
273
274#[cfg(not(feature = "ferrocene_subset"))]
275macro_rules! always_applicable_ord {
276    ($([$($p:tt)*] $t:ty,)*) => {
277        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
278    }
279}
280
281#[cfg(not(feature = "ferrocene_subset"))]
282always_applicable_ord! {
283    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
284    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
285    [] bool, [] char,
286    [T: ?Sized] *const T, [T: ?Sized] *mut T,
287    [T: AlwaysApplicableOrd] &T,
288    [T: AlwaysApplicableOrd] &mut T,
289    [T: AlwaysApplicableOrd] Option<T>,
290}
291
292#[doc(hidden)]
293#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
294#[cfg(not(feature = "ferrocene_subset"))]
295// intermediate trait for specialization of slice's Ord
296const trait SliceOrd: Sized {
297    fn compare(left: &[Self], right: &[Self]) -> Ordering;
298}
299
300#[cfg(not(feature = "ferrocene_subset"))]
301impl<A: Ord> SliceOrd for A {
302    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
303        let elem_chain = |a, b| match Ord::cmp(a, b) {
304            Ordering::Equal => ControlFlow::Continue(()),
305            non_eq => ControlFlow::Break(non_eq),
306        };
307        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
308        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
309        b
310    }
311}
312
313/// Marks that a type should be treated as an unsigned byte for comparisons.
314///
315/// # Safety
316/// * The type must be readable as an `u8`, meaning it has to have the same
317///   layout as `u8` and always be initialized.
318/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
319///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
320#[rustc_specialization_trait]
321#[cfg(not(feature = "ferrocene_subset"))]
322const unsafe trait UnsignedBytewiseOrd: [const] Ord {}
323
324#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
325#[cfg(not(feature = "ferrocene_subset"))]
326unsafe impl const UnsignedBytewiseOrd for bool {}
327#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
328#[cfg(not(feature = "ferrocene_subset"))]
329unsafe impl const UnsignedBytewiseOrd for u8 {}
330#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
331#[cfg(not(feature = "ferrocene_subset"))]
332unsafe impl const UnsignedBytewiseOrd for NonZero<u8> {}
333#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
334#[cfg(not(feature = "ferrocene_subset"))]
335unsafe impl const UnsignedBytewiseOrd for Option<NonZero<u8>> {}
336#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
337#[cfg(not(feature = "ferrocene_subset"))]
338unsafe impl const UnsignedBytewiseOrd for ascii::Char {}
339
340// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
341// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
342#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
343#[cfg(not(feature = "ferrocene_subset"))]
344impl<A: [const] Ord + [const] UnsignedBytewiseOrd> const SliceOrd for A {
345    #[inline]
346    fn compare(left: &[Self], right: &[Self]) -> Ordering {
347        // Since the length of a slice is always less than or equal to
348        // isize::MAX, this never underflows.
349        let diff = left.len() as isize - right.len() as isize;
350        // This comparison gets optimized away (on x86_64 and ARM) because the
351        // subtraction updates flags.
352        let len = if left.len() < right.len() { left.len() } else { right.len() };
353        let left = left.as_ptr().cast();
354        let right = right.as_ptr().cast();
355        // SAFETY: `left` and `right` are references and are thus guaranteed to
356        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
357        // are valid u8s and can be compared the same way. We use the minimum
358        // of both lengths which guarantees that both regions are valid for
359        // reads in that interval.
360        let mut order = unsafe { compare_bytes(left, right, len) as isize };
361        if order == 0 {
362            order = diff;
363        }
364        order.cmp(&0)
365    }
366}
367
368// Don't generate our own chaining loops for `memcmp`-able things either.
369
370#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
371#[cfg(not(feature = "ferrocene_subset"))]
372impl<A: [const] PartialOrd + [const] UnsignedBytewiseOrd> const SliceChain for A {
373    #[inline]
374    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
375        match SliceOrd::compare(left, right) {
376            Ordering::Equal => ControlFlow::Continue(()),
377            ne => ControlFlow::Break(ne.is_lt()),
378        }
379    }
380    #[inline]
381    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
382        match SliceOrd::compare(left, right) {
383            Ordering::Equal => ControlFlow::Continue(()),
384            ne => ControlFlow::Break(ne.is_le()),
385        }
386    }
387    #[inline]
388    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
389        match SliceOrd::compare(left, right) {
390            Ordering::Equal => ControlFlow::Continue(()),
391            ne => ControlFlow::Break(ne.is_gt()),
392        }
393    }
394    #[inline]
395    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
396        match SliceOrd::compare(left, right) {
397            Ordering::Equal => ControlFlow::Continue(()),
398            ne => ControlFlow::Break(ne.is_ge()),
399        }
400    }
401}
402
403#[cfg(not(feature = "ferrocene_subset"))]
404pub(super) trait SliceContains: Sized {
405    fn slice_contains(&self, x: &[Self]) -> bool;
406}
407
408#[cfg(not(feature = "ferrocene_subset"))]
409impl<T> SliceContains for T
410where
411    T: PartialEq,
412{
413    default fn slice_contains(&self, x: &[Self]) -> bool {
414        x.iter().any(|y| *y == *self)
415    }
416}
417
418#[cfg(not(feature = "ferrocene_subset"))]
419impl SliceContains for u8 {
420    #[inline]
421    fn slice_contains(&self, x: &[Self]) -> bool {
422        memchr::memchr(*self, x).is_some()
423    }
424}
425
426#[cfg(not(feature = "ferrocene_subset"))]
427impl SliceContains for i8 {
428    #[inline]
429    fn slice_contains(&self, x: &[Self]) -> bool {
430        let byte = *self as u8;
431        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
432        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
433        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
434        // than `isize::MAX`. The returned slice is never mutated.
435        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
436        memchr::memchr(byte, bytes).is_some()
437    }
438}
439
440#[cfg(not(feature = "ferrocene_subset"))]
441macro_rules! impl_slice_contains {
442    ($($t:ty),*) => {
443        $(
444            impl SliceContains for $t {
445                #[inline]
446                fn slice_contains(&self, arr: &[$t]) -> bool {
447                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
448                    // The compiler will nicely unroll it.
449                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
450                    // SIMD
451                    let mut chunks = arr.chunks_exact(LANE_COUNT);
452                    for chunk in &mut chunks {
453                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
454                            return true;
455                        }
456                    }
457                    // Scalar remainder
458                    return chunks.remainder().iter().any(|x| *x == *self);
459                }
460            }
461        )*
462    };
463}
464
465#[cfg(not(feature = "ferrocene_subset"))]
466impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);