1#![unstable(
2 feature = "core_intrinsics_fallbacks",
3 reason = "The fallbacks will never be stable, as they exist only to be called \
4 by the fallback MIR, but they're exported so they can be tested on \
5 platforms where the fallback MIR isn't actually used",
6 issue = "none"
7)]
8#![allow(missing_docs)]
9
10#[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
11pub const trait CarryingMulAdd: Copy + 'static {
12 type Unsigned: Copy + 'static;
13 fn carrying_mul_add(
14 self,
15 multiplicand: Self,
16 addend: Self,
17 carry: Self,
18 ) -> (Self::Unsigned, Self);
19}
20
21macro_rules! impl_carrying_mul_add_by_widening {
22 ($($t:ident $u:ident $w:ident,)+) => {$(
23 #[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
24 impl const CarryingMulAdd for $t {
25 type Unsigned = $u;
26 #[inline]
27 fn carrying_mul_add(self, a: Self, b: Self, c: Self) -> ($u, $t) {
28 let wide = (self as $w) * (a as $w) + (b as $w) + (c as $w);
29 (wide as _, (wide >> Self::BITS) as _)
30 }
31 }
32 )+};
33}
34impl_carrying_mul_add_by_widening! {
35 u8 u8 u16,
36 u16 u16 u32,
37 u32 u32 u64,
38 u64 u64 u128,
39 usize usize UDoubleSize,
40 i8 u8 i16,
41 i16 u16 i32,
42 i32 u32 i64,
43 i64 u64 i128,
44 isize usize UDoubleSize,
45}
46
47#[cfg(target_pointer_width = "16")]
48type UDoubleSize = u32;
49#[cfg(target_pointer_width = "32")]
50type UDoubleSize = u64;
51#[cfg(target_pointer_width = "64")]
52type UDoubleSize = u128;
53
54#[inline]
55const fn wide_mul_u128(a: u128, b: u128) -> (u128, u128) {
56 #[inline]
57 const fn to_low_high(x: u128) -> [u128; 2] {
58 const MASK: u128 = u64::MAX as _;
59 [x & MASK, x >> 64]
60 }
61 #[inline]
62 const fn from_low_high(x: [u128; 2]) -> u128 {
63 x[0] | (x[1] << 64)
64 }
65 #[inline]
66 const fn scalar_mul(low_high: [u128; 2], k: u128) -> [u128; 3] {
67 let [x, c] = to_low_high(k * low_high[0]);
68 let [y, z] = to_low_high(k * low_high[1] + c);
69 [x, y, z]
70 }
71 let a = to_low_high(a);
72 let b = to_low_high(b);
73 let low = scalar_mul(a, b[0]);
74 let high = scalar_mul(a, b[1]);
75 let r0 = low[0];
76 let [r1, c] = to_low_high(low[1] + high[0]);
77 let [r2, c] = to_low_high(low[2] + high[1] + c);
78 let r3 = high[2] + c;
79 (from_low_high([r0, r1]), from_low_high([r2, r3]))
80}
81
82#[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
83impl const CarryingMulAdd for u128 {
84 type Unsigned = u128;
85 #[inline]
86 fn carrying_mul_add(self, b: u128, c: u128, d: u128) -> (u128, u128) {
87 let (low, mut high) = wide_mul_u128(self, b);
88 let (low, carry) = u128::overflowing_add(low, c);
89 high += carry as u128;
90 let (low, carry) = u128::overflowing_add(low, d);
91 high += carry as u128;
92 (low, high)
93 }
94}
95
96#[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
97impl const CarryingMulAdd for i128 {
98 type Unsigned = u128;
99 #[inline]
100 fn carrying_mul_add(self, b: i128, c: i128, d: i128) -> (u128, i128) {
101 let (low, high) = wide_mul_u128(self as u128, b as u128);
102 let mut high = high as i128;
103 high = high.wrapping_add(i128::wrapping_mul(self >> 127, b));
104 high = high.wrapping_add(i128::wrapping_mul(self, b >> 127));
105 let (low, carry) = u128::overflowing_add(low, c as u128);
106 high = high.wrapping_add((carry as i128) + (c >> 127));
107 let (low, carry) = u128::overflowing_add(low, d as u128);
108 high = high.wrapping_add((carry as i128) + (d >> 127));
109 (low, high)
110 }
111}
112
113#[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
114pub const trait DisjointBitOr: Copy + 'static {
115 unsafe fn disjoint_bitor(self, other: Self) -> Self;
118}
119macro_rules! zero {
120 (bool) => {
121 false
122 };
123 ($t:ident) => {
124 0
125 };
126}
127macro_rules! impl_disjoint_bitor {
128 ($($t:ident,)+) => {$(
129 #[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
130 impl const DisjointBitOr for $t {
131 #[cfg_attr(miri, track_caller)]
132 #[inline]
133 unsafe fn disjoint_bitor(self, other: Self) -> Self {
134 unsafe { super::assume((self & other) == zero!($t)) };
139 self | other
140 }
141 }
142 )+};
143}
144impl_disjoint_bitor! {
145 bool,
146 u8, u16, u32, u64, u128, usize,
147 i8, i16, i32, i64, i128, isize,
148}
149
150#[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
151pub const trait FunnelShift: Copy + 'static {
152 unsafe fn unchecked_funnel_shl(self, rhs: Self, shift: u32) -> Self;
155
156 unsafe fn unchecked_funnel_shr(self, rhs: Self, shift: u32) -> Self;
159}
160
161macro_rules! impl_funnel_shifts {
162 ($($type:ident),*) => {$(
163 #[rustc_const_unstable(feature = "core_intrinsics_fallbacks", issue = "none")]
164 impl const FunnelShift for $type {
165 #[cfg_attr(miri, track_caller)]
166 #[inline]
167 unsafe fn unchecked_funnel_shl(self, rhs: Self, shift: u32) -> Self {
168 unsafe { super::assume(shift < $type::BITS) };
171 if shift == 0 {
172 self
173 } else {
174 unsafe {
182 super::disjoint_bitor(
183 super::unchecked_shl(self, shift),
184 super::unchecked_shr(rhs, $type::BITS - shift),
185 )
186 }
187 }
188 }
189
190 #[cfg_attr(miri, track_caller)]
191 #[inline]
192 unsafe fn unchecked_funnel_shr(self, rhs: Self, shift: u32) -> Self {
193 unsafe { super::assume(shift < $type::BITS) };
196 if shift == 0 {
197 rhs
198 } else {
199 unsafe {
207 super::disjoint_bitor(
208 super::unchecked_shl(self, $type::BITS - shift),
209 super::unchecked_shr(rhs, shift),
210 )
211 }
212 }
213 }
214 }
215 )*};
216}
217
218impl_funnel_shifts! {
219 u8, u16, u32, u64, u128, usize
220}