core/slice/
cmp.rs

1//! Comparison traits for `[T]`.
2
3use super::{from_raw_parts, memchr};
4use crate::ascii;
5use crate::cmp::{self, BytewiseEq, Ordering};
6use crate::intrinsics::compare_bytes;
7use crate::num::NonZero;
8use crate::ops::ControlFlow;
9
10#[stable(feature = "rust1", since = "1.0.0")]
11#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
12impl<T, U> const PartialEq<[U]> for [T]
13where
14    T: [const] PartialEq<U>,
15{
16    fn eq(&self, other: &[U]) -> bool {
17        SlicePartialEq::equal(self, other)
18    }
19
20    fn ne(&self, other: &[U]) -> bool {
21        SlicePartialEq::not_equal(self, other)
22    }
23}
24
25#[stable(feature = "rust1", since = "1.0.0")]
26#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
27impl<T: [const] Eq> const Eq for [T] {}
28
29/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
30#[stable(feature = "rust1", since = "1.0.0")]
31impl<T: Ord> Ord for [T] {
32    fn cmp(&self, other: &[T]) -> Ordering {
33        SliceOrd::compare(self, other)
34    }
35}
36
37#[inline]
38const fn as_underlying(x: ControlFlow<bool>) -> u8 {
39    // SAFETY: This will only compile if `bool` and `ControlFlow<bool>` have the same
40    // size (which isn't guaranteed but this is libcore). Because they have the same
41    // size, it's a niched implementation, which in one byte means there can't be
42    // any uninitialized memory. The callers then only check for `0` or `1` from this,
43    // which must necessarily match the `Break` variant, and we're fine no matter
44    // what ends up getting picked as the value representing `Continue(())`.
45    unsafe { crate::mem::transmute(x) }
46}
47
48/// Implements comparison of slices [lexicographically](Ord#lexicographical-comparison).
49#[stable(feature = "rust1", since = "1.0.0")]
50impl<T: PartialOrd> PartialOrd for [T] {
51    #[inline]
52    fn partial_cmp(&self, other: &[T]) -> Option<Ordering> {
53        SlicePartialOrd::partial_compare(self, other)
54    }
55    #[inline]
56    fn lt(&self, other: &Self) -> bool {
57        // This is certainly not the obvious way to implement these methods.
58        // Unfortunately, using anything that looks at the discriminant means that
59        // LLVM sees a check for `2` (aka `ControlFlow<bool>::Continue(())`) and
60        // gets very distracted by that, ending up generating extraneous code.
61        // This should be changed to something simpler once either LLVM is smarter,
62        // see <https://github.com/llvm/llvm-project/issues/132678>, or we generate
63        // niche discriminant checks in a way that doesn't trigger it.
64
65        as_underlying(self.__chaining_lt(other)) == 1
66    }
67    #[inline]
68    fn le(&self, other: &Self) -> bool {
69        as_underlying(self.__chaining_le(other)) != 0
70    }
71    #[inline]
72    fn gt(&self, other: &Self) -> bool {
73        as_underlying(self.__chaining_gt(other)) == 1
74    }
75    #[inline]
76    fn ge(&self, other: &Self) -> bool {
77        as_underlying(self.__chaining_ge(other)) != 0
78    }
79    #[inline]
80    fn __chaining_lt(&self, other: &Self) -> ControlFlow<bool> {
81        SliceChain::chaining_lt(self, other)
82    }
83    #[inline]
84    fn __chaining_le(&self, other: &Self) -> ControlFlow<bool> {
85        SliceChain::chaining_le(self, other)
86    }
87    #[inline]
88    fn __chaining_gt(&self, other: &Self) -> ControlFlow<bool> {
89        SliceChain::chaining_gt(self, other)
90    }
91    #[inline]
92    fn __chaining_ge(&self, other: &Self) -> ControlFlow<bool> {
93        SliceChain::chaining_ge(self, other)
94    }
95}
96
97#[doc(hidden)]
98// intermediate trait for specialization of slice's PartialEq
99#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
100const trait SlicePartialEq<B> {
101    fn equal(&self, other: &[B]) -> bool;
102
103    fn not_equal(&self, other: &[B]) -> bool {
104        !self.equal(other)
105    }
106}
107
108// Generic slice equality
109#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
110impl<A, B> const SlicePartialEq<B> for [A]
111where
112    A: [const] PartialEq<B>,
113{
114    default fn equal(&self, other: &[B]) -> bool {
115        if self.len() != other.len() {
116            return false;
117        }
118
119        // Implemented as explicit indexing rather
120        // than zipped iterators for performance reasons.
121        // See PR https://github.com/rust-lang/rust/pull/116846
122        // FIXME(const_hack): make this a `for idx in 0..self.len()` loop.
123        let mut idx = 0;
124        while idx < self.len() {
125            // bound checks are optimized away
126            if self[idx] != other[idx] {
127                return false;
128            }
129            idx += 1;
130        }
131
132        true
133    }
134}
135
136// When each element can be compared byte-wise, we can compare all the bytes
137// from the whole size in one call to the intrinsics.
138#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
139impl<A, B> const SlicePartialEq<B> for [A]
140where
141    A: [const] BytewiseEq<B>,
142{
143    fn equal(&self, other: &[B]) -> bool {
144        if self.len() != other.len() {
145            return false;
146        }
147
148        // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
149        // The two slices have been checked to have the same size above.
150        unsafe {
151            let size = size_of_val(self);
152            compare_bytes(self.as_ptr() as *const u8, other.as_ptr() as *const u8, size) == 0
153        }
154    }
155}
156
157#[doc(hidden)]
158#[const_trait]
159#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
160// intermediate trait for specialization of slice's PartialOrd
161trait SlicePartialOrd: Sized {
162    fn partial_compare(left: &[Self], right: &[Self]) -> Option<Ordering>;
163}
164
165#[doc(hidden)]
166#[const_trait]
167#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
168// intermediate trait for specialization of slice's PartialOrd chaining methods
169trait SliceChain: Sized {
170    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
171    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
172    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
173    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool>;
174}
175
176type AlwaysBreak<B> = ControlFlow<B, crate::convert::Infallible>;
177
178impl<A: PartialOrd> SlicePartialOrd for A {
179    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
180        let elem_chain = |a, b| match PartialOrd::partial_cmp(a, b) {
181            Some(Ordering::Equal) => ControlFlow::Continue(()),
182            non_eq => ControlFlow::Break(non_eq),
183        };
184        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::partial_cmp(a, b));
185        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
186        b
187    }
188}
189
190impl<A: PartialOrd> SliceChain for A {
191    default fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
192        chaining_impl(left, right, PartialOrd::__chaining_lt, usize::__chaining_lt)
193    }
194    default fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
195        chaining_impl(left, right, PartialOrd::__chaining_le, usize::__chaining_le)
196    }
197    default fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
198        chaining_impl(left, right, PartialOrd::__chaining_gt, usize::__chaining_gt)
199    }
200    default fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
201        chaining_impl(left, right, PartialOrd::__chaining_ge, usize::__chaining_ge)
202    }
203}
204
205#[inline]
206fn chaining_impl<'l, 'r, A: PartialOrd, B, C>(
207    left: &'l [A],
208    right: &'r [A],
209    elem_chain: impl Fn(&'l A, &'r A) -> ControlFlow<B>,
210    len_chain: impl for<'a> FnOnce(&'a usize, &'a usize) -> ControlFlow<B, C>,
211) -> ControlFlow<B, C> {
212    let l = cmp::min(left.len(), right.len());
213
214    // Slice to the loop iteration range to enable bound check
215    // elimination in the compiler
216    let lhs = &left[..l];
217    let rhs = &right[..l];
218
219    for i in 0..l {
220        elem_chain(&lhs[i], &rhs[i])?;
221    }
222
223    len_chain(&left.len(), &right.len())
224}
225
226// This is the impl that we would like to have. Unfortunately it's not sound.
227// See `partial_ord_slice.rs`.
228/*
229impl<A> SlicePartialOrd for A
230where
231    A: Ord,
232{
233    default fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
234        Some(SliceOrd::compare(left, right))
235    }
236}
237*/
238
239#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
240impl<A: [const] AlwaysApplicableOrd> const SlicePartialOrd for A {
241    fn partial_compare(left: &[A], right: &[A]) -> Option<Ordering> {
242        Some(SliceOrd::compare(left, right))
243    }
244}
245
246#[rustc_specialization_trait]
247#[const_trait]
248#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
249trait AlwaysApplicableOrd: [const] SliceOrd + [const] Ord {}
250
251macro_rules! always_applicable_ord {
252    ($([$($p:tt)*] $t:ty,)*) => {
253        $(impl<$($p)*> AlwaysApplicableOrd for $t {})*
254    }
255}
256
257always_applicable_ord! {
258    [] u8, [] u16, [] u32, [] u64, [] u128, [] usize,
259    [] i8, [] i16, [] i32, [] i64, [] i128, [] isize,
260    [] bool, [] char,
261    [T: ?Sized] *const T, [T: ?Sized] *mut T,
262    [T: AlwaysApplicableOrd] &T,
263    [T: AlwaysApplicableOrd] &mut T,
264    [T: AlwaysApplicableOrd] Option<T>,
265}
266
267#[doc(hidden)]
268#[const_trait]
269#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
270// intermediate trait for specialization of slice's Ord
271trait SliceOrd: Sized {
272    fn compare(left: &[Self], right: &[Self]) -> Ordering;
273}
274
275impl<A: Ord> SliceOrd for A {
276    default fn compare(left: &[Self], right: &[Self]) -> Ordering {
277        let elem_chain = |a, b| match Ord::cmp(a, b) {
278            Ordering::Equal => ControlFlow::Continue(()),
279            non_eq => ControlFlow::Break(non_eq),
280        };
281        let len_chain = |a: &_, b: &_| ControlFlow::Break(usize::cmp(a, b));
282        let AlwaysBreak::Break(b) = chaining_impl(left, right, elem_chain, len_chain);
283        b
284    }
285}
286
287/// Marks that a type should be treated as an unsigned byte for comparisons.
288///
289/// # Safety
290/// * The type must be readable as an `u8`, meaning it has to have the same
291///   layout as `u8` and always be initialized.
292/// * For every `x` and `y` of this type, `Ord(x, y)` must return the same
293///   value as `Ord::cmp(transmute::<_, u8>(x), transmute::<_, u8>(y))`.
294#[rustc_specialization_trait]
295#[const_trait]
296unsafe trait UnsignedBytewiseOrd: [const] Ord {}
297
298#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
299unsafe impl const UnsignedBytewiseOrd for bool {}
300#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
301unsafe impl const UnsignedBytewiseOrd for u8 {}
302#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
303unsafe impl const UnsignedBytewiseOrd for NonZero<u8> {}
304#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
305unsafe impl const UnsignedBytewiseOrd for Option<NonZero<u8>> {}
306#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
307unsafe impl const UnsignedBytewiseOrd for ascii::Char {}
308
309// `compare_bytes` compares a sequence of unsigned bytes lexicographically, so
310// use it if the requirements for `UnsignedBytewiseOrd` are fulfilled.
311#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
312impl<A: [const] Ord + [const] UnsignedBytewiseOrd> const SliceOrd for A {
313    #[inline]
314    fn compare(left: &[Self], right: &[Self]) -> Ordering {
315        // Since the length of a slice is always less than or equal to
316        // isize::MAX, this never underflows.
317        let diff = left.len() as isize - right.len() as isize;
318        // This comparison gets optimized away (on x86_64 and ARM) because the
319        // subtraction updates flags.
320        let len = if left.len() < right.len() { left.len() } else { right.len() };
321        let left = left.as_ptr().cast();
322        let right = right.as_ptr().cast();
323        // SAFETY: `left` and `right` are references and are thus guaranteed to
324        // be valid. `UnsignedBytewiseOrd` is only implemented for types that
325        // are valid u8s and can be compared the same way. We use the minimum
326        // of both lengths which guarantees that both regions are valid for
327        // reads in that interval.
328        let mut order = unsafe { compare_bytes(left, right, len) as isize };
329        if order == 0 {
330            order = diff;
331        }
332        order.cmp(&0)
333    }
334}
335
336// Don't generate our own chaining loops for `memcmp`-able things either.
337
338#[rustc_const_unstable(feature = "const_cmp", issue = "143800")]
339impl<A: [const] PartialOrd + [const] UnsignedBytewiseOrd> const SliceChain for A {
340    #[inline]
341    fn chaining_lt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
342        match SliceOrd::compare(left, right) {
343            Ordering::Equal => ControlFlow::Continue(()),
344            ne => ControlFlow::Break(ne.is_lt()),
345        }
346    }
347    #[inline]
348    fn chaining_le(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
349        match SliceOrd::compare(left, right) {
350            Ordering::Equal => ControlFlow::Continue(()),
351            ne => ControlFlow::Break(ne.is_le()),
352        }
353    }
354    #[inline]
355    fn chaining_gt(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
356        match SliceOrd::compare(left, right) {
357            Ordering::Equal => ControlFlow::Continue(()),
358            ne => ControlFlow::Break(ne.is_gt()),
359        }
360    }
361    #[inline]
362    fn chaining_ge(left: &[Self], right: &[Self]) -> ControlFlow<bool> {
363        match SliceOrd::compare(left, right) {
364            Ordering::Equal => ControlFlow::Continue(()),
365            ne => ControlFlow::Break(ne.is_ge()),
366        }
367    }
368}
369
370pub(super) trait SliceContains: Sized {
371    fn slice_contains(&self, x: &[Self]) -> bool;
372}
373
374impl<T> SliceContains for T
375where
376    T: PartialEq,
377{
378    default fn slice_contains(&self, x: &[Self]) -> bool {
379        x.iter().any(|y| *y == *self)
380    }
381}
382
383impl SliceContains for u8 {
384    #[inline]
385    fn slice_contains(&self, x: &[Self]) -> bool {
386        memchr::memchr(*self, x).is_some()
387    }
388}
389
390impl SliceContains for i8 {
391    #[inline]
392    fn slice_contains(&self, x: &[Self]) -> bool {
393        let byte = *self as u8;
394        // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
395        // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
396        // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
397        // than `isize::MAX`. The returned slice is never mutated.
398        let bytes: &[u8] = unsafe { from_raw_parts(x.as_ptr() as *const u8, x.len()) };
399        memchr::memchr(byte, bytes).is_some()
400    }
401}
402
403macro_rules! impl_slice_contains {
404    ($($t:ty),*) => {
405        $(
406            impl SliceContains for $t {
407                #[inline]
408                fn slice_contains(&self, arr: &[$t]) -> bool {
409                    // Make our LANE_COUNT 4x the normal lane count (aiming for 128 bit vectors).
410                    // The compiler will nicely unroll it.
411                    const LANE_COUNT: usize = 4 * (128 / (size_of::<$t>() * 8));
412                    // SIMD
413                    let mut chunks = arr.chunks_exact(LANE_COUNT);
414                    for chunk in &mut chunks {
415                        if chunk.iter().fold(false, |acc, x| acc | (*x == *self)) {
416                            return true;
417                        }
418                    }
419                    // Scalar remainder
420                    return chunks.remainder().iter().any(|x| *x == *self);
421                }
422            }
423        )*
424    };
425}
426
427impl_slice_contains!(u16, u32, u64, i16, i32, i64, f32, f64, usize, isize, char);