Skip to main content

core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3#[cfg(not(feature = "ferrocene_subset"))]
4use super::{from_raw_parts, memchr};
5#[cfg(not(feature = "ferrocene_subset"))]
6use crate::ascii;
7#[cfg(not(feature = "ferrocene_subset"))]
8use crate::cmp::{self, BytewiseEq, Ordering};
9use crate::intrinsics::compare_bytes;
10use crate::mem::SizedTypeProperties;
11#[cfg(not(feature = "ferrocene_subset"))]
12use crate::num::NonZero;
13#[cfg(not(feature = "ferrocene_subset"))]
14use crate::ops::ControlFlow;
15
16// Ferrocene addition: Imports for the certified subset
17#[rustfmt::skip]
18#[cfg(feature = "ferrocene_subset")]
19use crate::cmp::BytewiseEq;
20
21#[stable(feature = "rust1", since = "1.0.0")]
22#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
23impl<T, U> const PartialEq<[U]> for [T]
24where
25    T: [const] PartialEq<U>,
26{
27    #[inline]
28    fn eq(&self, other: &[U]) -> bool {
29        let len = self.len();
30        if len == other.len() {
31            // SAFETY: Just checked that they're the same length, and the pointers
32            // come from references-to-slices so they're guaranteed readable.
33            unsafe { SlicePartialEq::equal_same_length(self.as_ptr(), other.as_ptr(), len) }
34        } else {
35            false
36        }
37    }
38}
39
40#[stable(feature = "rust1", since = "1.0.0")]
41#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
42impl<T: [const] Eq> const Eq for [T] {}
43
44/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
45#[stable(feature = "rust1", since = "1.0.0")]
46#[cfg(not(feature = "ferrocene_subset"))]
47impl<T: Ord> Ord for [T] {
48    fn cmp(&self, other: &[T]) -> Ordering {
49        SliceOrd::compare(self, other)
50    }
51}
52
53#[inline]
54#[cfg(not(feature = "ferrocene_subset"))]
55const fn as_underlying(x: ControlFlow<bool>) -> u8 {
56    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
57    // size (which isn't guaranteed but this is libcore). Because they have the same
58    // size, it's a niched implementation, which in one byte means there can't be
59    // any uninitialized memory. The callers then only check for `0` or `1` from this,
60    // which must necessarily match the `Break` variant, and we're fine no matter
61    // what ends up getting picked as the value representing `Continue(())`.
62    unsafe { crate::mem::transmute(x) }
63}
64
65/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
66#[stable(feature = "rust1", since = "1.0.0")]
67#[cfg(not(feature = "ferrocene_subset"))]
68impl<T: PartialOrd> PartialOrd for [T] {
69    #[inline]
70    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
71        SlicePartialOrd::partial_compare(self, other)
72    }
73    #[inline]
74    fn lt(&self, other: &Self) -> bool {
75        // This is certainly not the obvious way to implement these methods.
76        // Unfortunately, using anything that looks at the discriminant means that
77        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
78        // gets very distracted by that, ending up generating extraneous code.
79        // This should be changed to something simpler once either LLVM is smarter,
80        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
81        // niche discriminant checks in a way that doesn't trigger it.
82
83        as_underlying(self.__chaining_lt(other)) == 1
84    }
85    #[inline]
86    fn le(&self, other: &Self) -> bool {
87        as_underlying(self.__chaining_le(other)) != 0
88    }
89    #[inline]
90    fn gt(&self, other: &Self) -> bool {
91        as_underlying(self.__chaining_gt(other)) == 1
92    }
93    #[inline]
94    fn ge(&self, other: &Self) -> bool {
95        as_underlying(self.__chaining_ge(other)) != 0
96    }
97    #[inline]
98    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
99        SliceChain::chaining_lt(self, other)
100    }
101    #[inline]
102    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
103        SliceChain::chaining_le(self, other)
104    }
105    #[inline]
106    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
107        SliceChain::chaining_gt(self, other)
108    }
109    #[inline]
110    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
111        SliceChain::chaining_ge(self, other)
112    }
113}
114
115#[doc(hidden)]
116// intermediate trait for specialization of slice's PartialEq
117#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
118const trait SlicePartialEq<B> {
119    /// # Safety
120    /// `lhs` and `rhs` are both readable for `len` elements
121    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool;
122}
123
124// Generic slice equality
125#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
126impl<A, B> const SlicePartialEq<B> for A
127where
128    A: [const] PartialEq<B>,
129{
130    // It's not worth trying to inline the loops underneath here *in MIR*,
131    // and preventing it encourages more useful inlining upstream,
132    // such as in `<str as PartialEq>::eq`.
133    // The codegen backend can still inline it later if needed.
134    #[rustc_no_mir_inline]
135    default unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
136        // Implemented as explicit indexing rather
137        // than zipped iterators for performance reasons.
138        // See PR https://github.com/rust-lang/rust/pull/116846
139        // FIXME(const_hack): make this a `for idx in 0..len` loop.
140        let mut idx = 0;
141        while idx < len {
142            // SAFETY: idx < len, so both are in-bounds and readable
143            if unsafe { *lhs.add(idx) != *rhs.add(idx) } {
144                return false;
145            }
146            idx += 1;
147        }
148
149        true
150    }
151}
152
153// When each element can be compared byte-wise, we can compare all the bytes
154// from the whole size in one call to the intrinsics.
155#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
156impl<A, B> const SlicePartialEq<B> for A
157where
158    A: [const] BytewiseEq<B>,
159{
160    #[inline]
161    unsafe fn equal_same_length(lhs: *const Self, rhs: *const B, len: usize) -> bool {
162        // SAFETY: by our precondition, `lhs` and `rhs` are guaranteed to be valid
163        // for reading `len` values, which also means the size is guaranteed
164        // not to overflow because it exists in memory;
165        unsafe {
166            let size = crate::intrinsics::unchecked_mul(len, Self::SIZE);
167            compare_bytes(lhs as _, rhs as _, size) == 0
168        }
169    }
170}
171
172#[doc(hidden)]
173#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
174#[cfg(not(feature = "ferrocene_subset"))]
175// intermediate trait for specialization of slice's PartialOrd
176const trait SlicePartialOrd: Sized {
177    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
178}
179
180#[doc(hidden)]
181#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
182#[cfg(not(feature = "ferrocene_subset"))]
183// intermediate trait for specialization of slice's PartialOrd chaining methods
184const trait SliceChain: Sized {
185    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
186    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
187    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
188    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
189}
190
191#[cfg(not(feature = "ferrocene_subset"))]
192type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
193
194#[cfg(not(feature = "ferrocene_subset"))]
195impl<A: PartialOrd> SlicePartialOrd for A {
196    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
197        let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) {
198            Some(Ordering::Equal) => ControlFlow::Continue(()),
199            non_eq => ControlFlow::Break(non_eq),
200        };
201        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
202        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
203        b
204    }
205}
206
207#[cfg(not(feature = "ferrocene_subset"))]
208impl<A: PartialOrd> SliceChain for A {
209    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
210        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
211    }
212    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
213        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
214    }
215    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
216        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
217    }
218    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
219        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
220    }
221}
222
223#[inline]
224#[cfg(not(feature = "ferrocene_subset"))]
225fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
226    left: &'l [A],
227    right: &'r [A],
228    elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow<B>,
229    len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C>,
230) -> ControlFlow<B, C> {
231    let l = cmp::min(left.len(), right.len());
232
233    // Slice to the loop iteration range to enable bound check
234    // elimination in the compiler
235    let lhs = &left[..l];
236    let rhs = &right[..l];
237
238    for i in 0..l {
239        elem_chain(&lhs[i], &rhs[i])?;
240    }
241
242    len_chain(&left.len(), &right.len())
243}
244
245// This is the impl that we would like to have. Unfortunately it's not sound.
246// See `partial_ord_slice.rs`.
247/*
248impl<A> SlicePartialOrd for A
249where
250    A: Ord,
251{
252    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
253        Some(SliceOrd::compare(left, right))
254    }
255}
256*/
257
258#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
259#[cfg(not(feature = "ferrocene_subset"))]
260impl<A: [const] AlwaysApplicableOrd> const SlicePartialOrd for A {
261    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
262        Some(SliceOrd::compare(left, right))
263    }
264}
265
266#[rustc_specialization_trait]
267#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
268#[cfg(not(feature = "ferrocene_subset"))]
269const trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {}
270
271#[cfg(not(feature = "ferrocene_subset"))]
272macro_rules! always_applicable_ord {
273    ($([$($p:tt)*] $t:ty,)*) => {
274        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
275    }
276}
277
278#[cfg(not(feature = "ferrocene_subset"))]
279always_applicable_ord! {
280    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
281    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
282    [] bool, [] char,
283    [T: ?Sized] *const T, [T: ?Sized] *mut T,
284    [T: AlwaysApplicableOrd] &T,
285    [T: AlwaysApplicableOrd] &mut T,
286    [T: AlwaysApplicableOrd] Option<T>,
287}
288
289#[doc(hidden)]
290#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
291#[cfg(not(feature = "ferrocene_subset"))]
292// intermediate trait for specialization of slice's Ord
293const trait SliceOrd: Sized {
294    fn compare(left: &[Self], right: &[Self]) -> Ordering;
295}
296
297#[cfg(not(feature = "ferrocene_subset"))]
298impl<A: Ord> SliceOrd for A {
299    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
300        let elem_chain = |a, b| match Ord::cmp(a, b) {
301            Ordering::Equal => ControlFlow::Continue(()),
302            non_eq => ControlFlow::Break(non_eq),
303        };
304        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
305        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
306        b
307    }
308}
309
310/// Marks that a type should be treated as an unsigned byte for comparisons.
311///
312/// # Safety
313/// * The type must be readable as an `u8`, meaning it has to have the same
314///   layout as `u8` and always be initialized.
315/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
316///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
317#[rustc_specialization_trait]
318#[cfg(not(feature = "ferrocene_subset"))]
319const unsafe trait UnsignedBytewiseOrd: [const] Ord {}
320
321#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
322#[cfg(not(feature = "ferrocene_subset"))]
323unsafe impl const UnsignedBytewiseOrd for bool {}
324#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
325#[cfg(not(feature = "ferrocene_subset"))]
326unsafe impl const UnsignedBytewiseOrd for u8 {}
327#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
328#[cfg(not(feature = "ferrocene_subset"))]
329unsafe impl const UnsignedBytewiseOrd for NonZero<u8> {}
330#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
331#[cfg(not(feature = "ferrocene_subset"))]
332unsafe impl const UnsignedBytewiseOrd for Option<NonZero<u8>> {}
333#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
334#[cfg(not(feature = "ferrocene_subset"))]
335unsafe impl const UnsignedBytewiseOrd for ascii::Char {}
336
337// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
338// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
339#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
340#[cfg(not(feature = "ferrocene_subset"))]
341impl<A: [const] Ord + [const] UnsignedBytewiseOrd> const SliceOrd for A {
342    #[inline]
343    fn compare(left: &[Self], right: &[Self]) -> Ordering {
344        // Since the length of a slice is always less than or equal to
345        // isize::MAX, this never underflows.
346        let diff = left.len() as isize - right.len() as isize;
347        // This comparison gets optimized away (on x86_64 and ARM) because the
348        // subtraction updates flags.
349        let len = if left.len() < right.len() { left.len() } else { right.len() };
350        let left = left.as_ptr().cast();
351        let right = right.as_ptr().cast();
352        // SAFETY: `left` and `right` are references and are thus guaranteed to
353        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
354        // are valid u8s and can be compared the same way. We use the minimum
355        // of both lengths which guarantees that both regions are valid for
356        // reads in that interval.
357        let mut order = unsafe { compare_bytes(left, right, len) as isize };
358        if order == 0 {
359            order = diff;
360        }
361        order.cmp(&0)
362    }
363}
364
365// Don't generate our own chaining loops for `memcmp`-able things either.
366
367#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
368#[cfg(not(feature = "ferrocene_subset"))]
369impl<A: [const] PartialOrd + [const] UnsignedBytewiseOrd> const SliceChain for A {
370    #[inline]
371    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
372        match SliceOrd::compare(left, right) {
373            Ordering::Equal => ControlFlow::Continue(()),
374            ne => ControlFlow::Break(ne.is_lt()),
375        }
376    }
377    #[inline]
378    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
379        match SliceOrd::compare(left, right) {
380            Ordering::Equal => ControlFlow::Continue(()),
381            ne => ControlFlow::Break(ne.is_le()),
382        }
383    }
384    #[inline]
385    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
386        match SliceOrd::compare(left, right) {
387            Ordering::Equal => ControlFlow::Continue(()),
388            ne => ControlFlow::Break(ne.is_gt()),
389        }
390    }
391    #[inline]
392    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
393        match SliceOrd::compare(left, right) {
394            Ordering::Equal => ControlFlow::Continue(()),
395            ne => ControlFlow::Break(ne.is_ge()),
396        }
397    }
398}
399
400#[cfg(not(feature = "ferrocene_subset"))]
401pub(super) trait SliceContains: Sized {
402    fn slice_contains(&self, x: &[Self]) -> bool;
403}
404
405#[cfg(not(feature = "ferrocene_subset"))]
406impl<T> SliceContains for T
407where
408    T: PartialEq,
409{
410    default fn slice_contains(&self, x: &[Self]) -> bool {
411        x.iter().any(|y| *y == *self)
412    }
413}
414
415#[cfg(not(feature = "ferrocene_subset"))]
416impl SliceContains for u8 {
417    #[inline]
418    fn slice_contains(&self, x: &[Self]) -> bool {
419        memchr::memchr(*self, x).is_some()
420    }
421}
422
423#[cfg(not(feature = "ferrocene_subset"))]
424impl SliceContains for i8 {
425    #[inline]
426    fn slice_contains(&self, x: &[Self]) -> bool {
427        let byte = *self as u8;
428        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
429        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
430        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
431        // than `isize::MAX`. The returned slice is never mutated.
432        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
433        memchr::memchr(byte, bytes).is_some()
434    }
435}
436
437#[cfg(not(feature = "ferrocene_subset"))]
438macro_rules! impl_slice_contains {
439    ($($t:ty),*) => {
440        $(
441            impl SliceContains for $t {
442                #[inline]
443                fn slice_contains(&self, arr: &[$t]) -> bool {
444                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
445                    // The compiler will nicely unroll it.
446                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
447                    // SIMD
448                    let mut chunks = arr.chunks_exact(LANE_COUNT);
449                    for chunk in &mut chunks {
450                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
451                            return true;
452                        }
453                    }
454                    // Scalar remainder
455                    return chunks.remainder().iter().any(|x| *x == *self);
456                }
457            }
458        )*
459    };
460}
461
462#[cfg(not(feature = "ferrocene_subset"))]
463impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);