Trait sp_std::ops::Shr

1.0.0 · source ·
pub trait Shr<Rhs = Self> {
    type Output;

    fn shr(self, rhs: Rhs) -> Self::Output;
}
Expand description

The right shift operator >>. Note that because this trait is implemented for all integer types with multiple right-hand-side types, Rust’s type checker has special handling for _ >> _, setting the result type for integer operations to the type of the left-hand-side operand. This means that though a >> b and a.shr(b) are one and the same from an evaluation standpoint, they are different when it comes to type inference.

Examples

An implementation of Shr that lifts the >> operation on integers to a wrapper around usize.

use std::ops::Shr;

#[derive(PartialEq, Debug)]
struct Scalar(usize);

impl Shr<Scalar> for Scalar {
    type Output = Self;

    fn shr(self, Self(rhs): Self) -> Self::Output {
        let Self(lhs) = self;
        Self(lhs >> rhs)
    }
}

assert_eq!(Scalar(16) >> Scalar(2), Scalar(4));

An implementation of Shr that spins a vector rightward by a given amount.

use std::ops::Shr;

#[derive(PartialEq, Debug)]
struct SpinVector<T: Clone> {
    vec: Vec<T>,
}

impl<T: Clone> Shr<usize> for SpinVector<T> {
    type Output = Self;

    fn shr(self, rhs: usize) -> Self::Output {
        // Rotate the vector by `rhs` places.
        let (a, b) = self.vec.split_at(self.vec.len() - rhs);
        let mut spun_vector = vec![];
        spun_vector.extend_from_slice(b);
        spun_vector.extend_from_slice(a);
        Self { vec: spun_vector }
    }
}

assert_eq!(SpinVector { vec: vec![0, 1, 2, 3, 4] } >> 2,
           SpinVector { vec: vec![3, 4, 0, 1, 2] });

Required Associated Types§

The resulting type after applying the >> operator.

Required Methods§

Performs the >> operation.

Examples
assert_eq!(5u8 >> 1, 2);
assert_eq!(2u8 >> 1, 1);

Implementors§

impl<const LIMBS: usize> Shr<usize> for UInt<LIMBS>

impl<const LIMBS: usize> Shr<usize> for &UInt<LIMBS>

impl<Frac> Shr<i8> for FixedU8<Frac>

impl<Frac> Shr<i8> for &FixedU8<Frac>

impl<Frac> Shr<&i8> for FixedU8<Frac>

impl<Frac> Shr<&i8> for &FixedU8<Frac>

impl<Frac> Shr<i16> for FixedU8<Frac>

impl<Frac> Shr<i16> for &FixedU8<Frac>

impl<Frac> Shr<&i16> for FixedU8<Frac>

impl<Frac> Shr<&i16> for &FixedU8<Frac>

impl<Frac> Shr<i32> for FixedU8<Frac>

impl<Frac> Shr<i32> for &FixedU8<Frac>

impl<Frac> Shr<&i32> for FixedU8<Frac>

impl<Frac> Shr<&i32> for &FixedU8<Frac>

impl<Frac> Shr<i64> for FixedU8<Frac>

impl<Frac> Shr<i64> for &FixedU8<Frac>

impl<Frac> Shr<&i64> for FixedU8<Frac>

impl<Frac> Shr<&i64> for &FixedU8<Frac>

impl<Frac> Shr<i128> for FixedU8<Frac>

impl<Frac> Shr<i128> for &FixedU8<Frac>

impl<Frac> Shr<&i128> for FixedU8<Frac>

impl<Frac> Shr<&i128> for &FixedU8<Frac>

impl<Frac> Shr<isize> for FixedU8<Frac>

impl<Frac> Shr<isize> for &FixedU8<Frac>

impl<Frac> Shr<&isize> for FixedU8<Frac>

impl<Frac> Shr<&isize> for &FixedU8<Frac>

impl<Frac> Shr<u8> for FixedU8<Frac>

impl<Frac> Shr<u8> for &FixedU8<Frac>

impl<Frac> Shr<&u8> for FixedU8<Frac>

impl<Frac> Shr<&u8> for &FixedU8<Frac>

impl<Frac> Shr<u16> for FixedU8<Frac>

impl<Frac> Shr<u16> for &FixedU8<Frac>

impl<Frac> Shr<&u16> for FixedU8<Frac>

impl<Frac> Shr<&u16> for &FixedU8<Frac>

impl<Frac> Shr<u32> for FixedU8<Frac>

impl<Frac> Shr<u32> for &FixedU8<Frac>

impl<Frac> Shr<&u32> for FixedU8<Frac>

impl<Frac> Shr<&u32> for &FixedU8<Frac>

impl<Frac> Shr<u64> for FixedU8<Frac>

impl<Frac> Shr<u64> for &FixedU8<Frac>

impl<Frac> Shr<&u64> for FixedU8<Frac>

impl<Frac> Shr<&u64> for &FixedU8<Frac>

impl<Frac> Shr<u128> for FixedU8<Frac>

impl<Frac> Shr<u128> for &FixedU8<Frac>

impl<Frac> Shr<&u128> for FixedU8<Frac>

impl<Frac> Shr<&u128> for &FixedU8<Frac>

impl<Frac> Shr<usize> for FixedU8<Frac>

impl<Frac> Shr<usize> for &FixedU8<Frac>

impl<Frac> Shr<&usize> for FixedU8<Frac>

impl<Frac> Shr<&usize> for &FixedU8<Frac>

impl<Frac> Shr<i8> for FixedU16<Frac>

impl<Frac> Shr<i8> for &FixedU16<Frac>

impl<Frac> Shr<&i8> for FixedU16<Frac>

impl<Frac> Shr<&i8> for &FixedU16<Frac>

impl<Frac> Shr<i16> for FixedU16<Frac>

impl<Frac> Shr<i16> for &FixedU16<Frac>

impl<Frac> Shr<&i16> for FixedU16<Frac>

impl<Frac> Shr<&i16> for &FixedU16<Frac>

impl<Frac> Shr<i32> for FixedU16<Frac>

impl<Frac> Shr<i32> for &FixedU16<Frac>

impl<Frac> Shr<&i32> for FixedU16<Frac>

impl<Frac> Shr<&i32> for &FixedU16<Frac>

impl<Frac> Shr<i64> for FixedU16<Frac>

impl<Frac> Shr<i64> for &FixedU16<Frac>

impl<Frac> Shr<&i64> for FixedU16<Frac>

impl<Frac> Shr<&i64> for &FixedU16<Frac>

impl<Frac> Shr<i128> for FixedU16<Frac>

impl<Frac> Shr<i128> for &FixedU16<Frac>

impl<Frac> Shr<&i128> for FixedU16<Frac>

impl<Frac> Shr<&i128> for &FixedU16<Frac>

impl<Frac> Shr<isize> for FixedU16<Frac>

impl<Frac> Shr<isize> for &FixedU16<Frac>

impl<Frac> Shr<&isize> for FixedU16<Frac>

impl<Frac> Shr<&isize> for &FixedU16<Frac>

impl<Frac> Shr<u8> for FixedU16<Frac>

impl<Frac> Shr<u8> for &FixedU16<Frac>

impl<Frac> Shr<&u8> for FixedU16<Frac>

impl<Frac> Shr<&u8> for &FixedU16<Frac>

impl<Frac> Shr<u16> for FixedU16<Frac>

impl<Frac> Shr<u16> for &FixedU16<Frac>

impl<Frac> Shr<&u16> for FixedU16<Frac>

impl<Frac> Shr<&u16> for &FixedU16<Frac>

impl<Frac> Shr<u32> for FixedU16<Frac>

impl<Frac> Shr<u32> for &FixedU16<Frac>

impl<Frac> Shr<&u32> for FixedU16<Frac>

impl<Frac> Shr<&u32> for &FixedU16<Frac>

impl<Frac> Shr<u64> for FixedU16<Frac>

impl<Frac> Shr<u64> for &FixedU16<Frac>

impl<Frac> Shr<&u64> for FixedU16<Frac>

impl<Frac> Shr<&u64> for &FixedU16<Frac>

impl<Frac> Shr<u128> for FixedU16<Frac>

impl<Frac> Shr<u128> for &FixedU16<Frac>

impl<Frac> Shr<&u128> for FixedU16<Frac>

impl<Frac> Shr<&u128> for &FixedU16<Frac>

impl<Frac> Shr<usize> for FixedU16<Frac>

impl<Frac> Shr<usize> for &FixedU16<Frac>

impl<Frac> Shr<&usize> for FixedU16<Frac>

impl<Frac> Shr<&usize> for &FixedU16<Frac>

impl<Frac> Shr<i8> for FixedU32<Frac>

impl<Frac> Shr<i8> for &FixedU32<Frac>

impl<Frac> Shr<&i8> for FixedU32<Frac>

impl<Frac> Shr<&i8> for &FixedU32<Frac>

impl<Frac> Shr<i16> for FixedU32<Frac>

impl<Frac> Shr<i16> for &FixedU32<Frac>

impl<Frac> Shr<&i16> for FixedU32<Frac>

impl<Frac> Shr<&i16> for &FixedU32<Frac>

impl<Frac> Shr<i32> for FixedU32<Frac>

impl<Frac> Shr<i32> for &FixedU32<Frac>

impl<Frac> Shr<&i32> for FixedU32<Frac>

impl<Frac> Shr<&i32> for &FixedU32<Frac>

impl<Frac> Shr<i64> for FixedU32<Frac>

impl<Frac> Shr<i64> for &FixedU32<Frac>

impl<Frac> Shr<&i64> for FixedU32<Frac>

impl<Frac> Shr<&i64> for &FixedU32<Frac>

impl<Frac> Shr<i128> for FixedU32<Frac>

impl<Frac> Shr<i128> for &FixedU32<Frac>

impl<Frac> Shr<&i128> for FixedU32<Frac>

impl<Frac> Shr<&i128> for &FixedU32<Frac>

impl<Frac> Shr<isize> for FixedU32<Frac>

impl<Frac> Shr<isize> for &FixedU32<Frac>

impl<Frac> Shr<&isize> for FixedU32<Frac>

impl<Frac> Shr<&isize> for &FixedU32<Frac>

impl<Frac> Shr<u8> for FixedU32<Frac>

impl<Frac> Shr<u8> for &FixedU32<Frac>

impl<Frac> Shr<&u8> for FixedU32<Frac>

impl<Frac> Shr<&u8> for &FixedU32<Frac>

impl<Frac> Shr<u16> for FixedU32<Frac>

impl<Frac> Shr<u16> for &FixedU32<Frac>

impl<Frac> Shr<&u16> for FixedU32<Frac>

impl<Frac> Shr<&u16> for &FixedU32<Frac>

impl<Frac> Shr<u32> for FixedU32<Frac>

impl<Frac> Shr<u32> for &FixedU32<Frac>

impl<Frac> Shr<&u32> for FixedU32<Frac>

impl<Frac> Shr<&u32> for &FixedU32<Frac>

impl<Frac> Shr<u64> for FixedU32<Frac>

impl<Frac> Shr<u64> for &FixedU32<Frac>

impl<Frac> Shr<&u64> for FixedU32<Frac>

impl<Frac> Shr<&u64> for &FixedU32<Frac>

impl<Frac> Shr<u128> for FixedU32<Frac>

impl<Frac> Shr<u128> for &FixedU32<Frac>

impl<Frac> Shr<&u128> for FixedU32<Frac>

impl<Frac> Shr<&u128> for &FixedU32<Frac>

impl<Frac> Shr<usize> for FixedU32<Frac>

impl<Frac> Shr<usize> for &FixedU32<Frac>

impl<Frac> Shr<&usize> for FixedU32<Frac>

impl<Frac> Shr<&usize> for &FixedU32<Frac>

impl<Frac> Shr<i8> for FixedU64<Frac>

impl<Frac> Shr<i8> for &FixedU64<Frac>

impl<Frac> Shr<&i8> for FixedU64<Frac>

impl<Frac> Shr<&i8> for &FixedU64<Frac>

impl<Frac> Shr<i16> for FixedU64<Frac>

impl<Frac> Shr<i16> for &FixedU64<Frac>

impl<Frac> Shr<&i16> for FixedU64<Frac>

impl<Frac> Shr<&i16> for &FixedU64<Frac>

impl<Frac> Shr<i32> for FixedU64<Frac>

impl<Frac> Shr<i32> for &FixedU64<Frac>

impl<Frac> Shr<&i32> for FixedU64<Frac>

impl<Frac> Shr<&i32> for &FixedU64<Frac>

impl<Frac> Shr<i64> for FixedU64<Frac>

impl<Frac> Shr<i64> for &FixedU64<Frac>

impl<Frac> Shr<&i64> for FixedU64<Frac>

impl<Frac> Shr<&i64> for &FixedU64<Frac>

impl<Frac> Shr<i128> for FixedU64<Frac>

impl<Frac> Shr<i128> for &FixedU64<Frac>

impl<Frac> Shr<&i128> for FixedU64<Frac>

impl<Frac> Shr<&i128> for &FixedU64<Frac>

impl<Frac> Shr<isize> for FixedU64<Frac>

impl<Frac> Shr<isize> for &FixedU64<Frac>

impl<Frac> Shr<&isize> for FixedU64<Frac>

impl<Frac> Shr<&isize> for &FixedU64<Frac>

impl<Frac> Shr<u8> for FixedU64<Frac>

impl<Frac> Shr<u8> for &FixedU64<Frac>

impl<Frac> Shr<&u8> for FixedU64<Frac>

impl<Frac> Shr<&u8> for &FixedU64<Frac>

impl<Frac> Shr<u16> for FixedU64<Frac>

impl<Frac> Shr<u16> for &FixedU64<Frac>

impl<Frac> Shr<&u16> for FixedU64<Frac>

impl<Frac> Shr<&u16> for &FixedU64<Frac>

impl<Frac> Shr<u32> for FixedU64<Frac>

impl<Frac> Shr<u32> for &FixedU64<Frac>

impl<Frac> Shr<&u32> for FixedU64<Frac>

impl<Frac> Shr<&u32> for &FixedU64<Frac>

impl<Frac> Shr<u64> for FixedU64<Frac>

impl<Frac> Shr<u64> for &FixedU64<Frac>

impl<Frac> Shr<&u64> for FixedU64<Frac>

impl<Frac> Shr<&u64> for &FixedU64<Frac>

impl<Frac> Shr<u128> for FixedU64<Frac>

impl<Frac> Shr<u128> for &FixedU64<Frac>

impl<Frac> Shr<&u128> for FixedU64<Frac>

impl<Frac> Shr<&u128> for &FixedU64<Frac>

impl<Frac> Shr<usize> for FixedU64<Frac>

impl<Frac> Shr<usize> for &FixedU64<Frac>

impl<Frac> Shr<&usize> for FixedU64<Frac>

impl<Frac> Shr<&usize> for &FixedU64<Frac>

impl<Frac> Shr<i8> for FixedU128<Frac>

impl<Frac> Shr<i8> for &FixedU128<Frac>

impl<Frac> Shr<&i8> for FixedU128<Frac>

impl<Frac> Shr<&i8> for &FixedU128<Frac>

impl<Frac> Shr<i16> for FixedU128<Frac>

impl<Frac> Shr<i16> for &FixedU128<Frac>

impl<Frac> Shr<&i16> for FixedU128<Frac>

impl<Frac> Shr<&i16> for &FixedU128<Frac>

impl<Frac> Shr<i32> for FixedU128<Frac>

impl<Frac> Shr<i32> for &FixedU128<Frac>

impl<Frac> Shr<&i32> for FixedU128<Frac>

impl<Frac> Shr<&i32> for &FixedU128<Frac>

impl<Frac> Shr<i64> for FixedU128<Frac>

impl<Frac> Shr<i64> for &FixedU128<Frac>

impl<Frac> Shr<&i64> for FixedU128<Frac>

impl<Frac> Shr<&i64> for &FixedU128<Frac>

impl<Frac> Shr<i128> for FixedU128<Frac>

impl<Frac> Shr<i128> for &FixedU128<Frac>

impl<Frac> Shr<&i128> for FixedU128<Frac>

impl<Frac> Shr<&i128> for &FixedU128<Frac>

impl<Frac> Shr<isize> for FixedU128<Frac>

impl<Frac> Shr<isize> for &FixedU128<Frac>

impl<Frac> Shr<&isize> for FixedU128<Frac>

impl<Frac> Shr<&isize> for &FixedU128<Frac>

impl<Frac> Shr<u8> for FixedU128<Frac>

impl<Frac> Shr<u8> for &FixedU128<Frac>

impl<Frac> Shr<&u8> for FixedU128<Frac>

impl<Frac> Shr<&u8> for &FixedU128<Frac>

impl<Frac> Shr<u16> for FixedU128<Frac>

impl<Frac> Shr<u16> for &FixedU128<Frac>

impl<Frac> Shr<&u16> for FixedU128<Frac>

impl<Frac> Shr<&u16> for &FixedU128<Frac>

impl<Frac> Shr<u32> for FixedU128<Frac>

impl<Frac> Shr<u32> for &FixedU128<Frac>

impl<Frac> Shr<&u32> for FixedU128<Frac>

impl<Frac> Shr<&u32> for &FixedU128<Frac>

impl<Frac> Shr<u64> for FixedU128<Frac>

impl<Frac> Shr<u64> for &FixedU128<Frac>

impl<Frac> Shr<&u64> for FixedU128<Frac>

impl<Frac> Shr<&u64> for &FixedU128<Frac>

impl<Frac> Shr<u128> for FixedU128<Frac>

impl<Frac> Shr<u128> for &FixedU128<Frac>

impl<Frac> Shr<&u128> for FixedU128<Frac>

impl<Frac> Shr<&u128> for &FixedU128<Frac>

impl<Frac> Shr<usize> for FixedU128<Frac>

impl<Frac> Shr<usize> for &FixedU128<Frac>

impl<Frac> Shr<&usize> for FixedU128<Frac>

impl<Frac> Shr<&usize> for &FixedU128<Frac>

impl<Frac> Shr<i8> for FixedI8<Frac>

impl<Frac> Shr<i8> for &FixedI8<Frac>

impl<Frac> Shr<&i8> for FixedI8<Frac>

impl<Frac> Shr<&i8> for &FixedI8<Frac>

impl<Frac> Shr<i16> for FixedI8<Frac>

impl<Frac> Shr<i16> for &FixedI8<Frac>

impl<Frac> Shr<&i16> for FixedI8<Frac>

impl<Frac> Shr<&i16> for &FixedI8<Frac>

impl<Frac> Shr<i32> for FixedI8<Frac>

impl<Frac> Shr<i32> for &FixedI8<Frac>

impl<Frac> Shr<&i32> for FixedI8<Frac>

impl<Frac> Shr<&i32> for &FixedI8<Frac>

impl<Frac> Shr<i64> for FixedI8<Frac>

impl<Frac> Shr<i64> for &FixedI8<Frac>

impl<Frac> Shr<&i64> for FixedI8<Frac>

impl<Frac> Shr<&i64> for &FixedI8<Frac>

impl<Frac> Shr<i128> for FixedI8<Frac>

impl<Frac> Shr<i128> for &FixedI8<Frac>

impl<Frac> Shr<&i128> for FixedI8<Frac>

impl<Frac> Shr<&i128> for &FixedI8<Frac>

impl<Frac> Shr<isize> for FixedI8<Frac>

impl<Frac> Shr<isize> for &FixedI8<Frac>

impl<Frac> Shr<&isize> for FixedI8<Frac>

impl<Frac> Shr<&isize> for &FixedI8<Frac>

impl<Frac> Shr<u8> for FixedI8<Frac>

impl<Frac> Shr<u8> for &FixedI8<Frac>

impl<Frac> Shr<&u8> for FixedI8<Frac>

impl<Frac> Shr<&u8> for &FixedI8<Frac>

impl<Frac> Shr<u16> for FixedI8<Frac>

impl<Frac> Shr<u16> for &FixedI8<Frac>

impl<Frac> Shr<&u16> for FixedI8<Frac>

impl<Frac> Shr<&u16> for &FixedI8<Frac>

impl<Frac> Shr<u32> for FixedI8<Frac>

impl<Frac> Shr<u32> for &FixedI8<Frac>

impl<Frac> Shr<&u32> for FixedI8<Frac>

impl<Frac> Shr<&u32> for &FixedI8<Frac>

impl<Frac> Shr<u64> for FixedI8<Frac>

impl<Frac> Shr<u64> for &FixedI8<Frac>

impl<Frac> Shr<&u64> for FixedI8<Frac>

impl<Frac> Shr<&u64> for &FixedI8<Frac>

impl<Frac> Shr<u128> for FixedI8<Frac>

impl<Frac> Shr<u128> for &FixedI8<Frac>

impl<Frac> Shr<&u128> for FixedI8<Frac>

impl<Frac> Shr<&u128> for &FixedI8<Frac>

impl<Frac> Shr<usize> for FixedI8<Frac>

impl<Frac> Shr<usize> for &FixedI8<Frac>

impl<Frac> Shr<&usize> for FixedI8<Frac>

impl<Frac> Shr<&usize> for &FixedI8<Frac>

impl<Frac> Shr<i8> for FixedI16<Frac>

impl<Frac> Shr<i8> for &FixedI16<Frac>

impl<Frac> Shr<&i8> for FixedI16<Frac>

impl<Frac> Shr<&i8> for &FixedI16<Frac>

impl<Frac> Shr<i16> for FixedI16<Frac>

impl<Frac> Shr<i16> for &FixedI16<Frac>

impl<Frac> Shr<&i16> for FixedI16<Frac>

impl<Frac> Shr<&i16> for &FixedI16<Frac>

impl<Frac> Shr<i32> for FixedI16<Frac>

impl<Frac> Shr<i32> for &FixedI16<Frac>

impl<Frac> Shr<&i32> for FixedI16<Frac>

impl<Frac> Shr<&i32> for &FixedI16<Frac>

impl<Frac> Shr<i64> for FixedI16<Frac>

impl<Frac> Shr<i64> for &FixedI16<Frac>

impl<Frac> Shr<&i64> for FixedI16<Frac>

impl<Frac> Shr<&i64> for &FixedI16<Frac>

impl<Frac> Shr<i128> for FixedI16<Frac>

impl<Frac> Shr<i128> for &FixedI16<Frac>

impl<Frac> Shr<&i128> for FixedI16<Frac>

impl<Frac> Shr<&i128> for &FixedI16<Frac>

impl<Frac> Shr<isize> for FixedI16<Frac>

impl<Frac> Shr<isize> for &FixedI16<Frac>

impl<Frac> Shr<&isize> for FixedI16<Frac>

impl<Frac> Shr<&isize> for &FixedI16<Frac>

impl<Frac> Shr<u8> for FixedI16<Frac>

impl<Frac> Shr<u8> for &FixedI16<Frac>

impl<Frac> Shr<&u8> for FixedI16<Frac>

impl<Frac> Shr<&u8> for &FixedI16<Frac>

impl<Frac> Shr<u16> for FixedI16<Frac>

impl<Frac> Shr<u16> for &FixedI16<Frac>

impl<Frac> Shr<&u16> for FixedI16<Frac>

impl<Frac> Shr<&u16> for &FixedI16<Frac>

impl<Frac> Shr<u32> for FixedI16<Frac>

impl<Frac> Shr<u32> for &FixedI16<Frac>

impl<Frac> Shr<&u32> for FixedI16<Frac>

impl<Frac> Shr<&u32> for &FixedI16<Frac>

impl<Frac> Shr<u64> for FixedI16<Frac>

impl<Frac> Shr<u64> for &FixedI16<Frac>

impl<Frac> Shr<&u64> for FixedI16<Frac>

impl<Frac> Shr<&u64> for &FixedI16<Frac>

impl<Frac> Shr<u128> for FixedI16<Frac>

impl<Frac> Shr<u128> for &FixedI16<Frac>

impl<Frac> Shr<&u128> for FixedI16<Frac>

impl<Frac> Shr<&u128> for &FixedI16<Frac>

impl<Frac> Shr<usize> for FixedI16<Frac>

impl<Frac> Shr<usize> for &FixedI16<Frac>

impl<Frac> Shr<&usize> for FixedI16<Frac>

impl<Frac> Shr<&usize> for &FixedI16<Frac>

impl<Frac> Shr<i8> for FixedI32<Frac>

impl<Frac> Shr<i8> for &FixedI32<Frac>

impl<Frac> Shr<&i8> for FixedI32<Frac>

impl<Frac> Shr<&i8> for &FixedI32<Frac>

impl<Frac> Shr<i16> for FixedI32<Frac>

impl<Frac> Shr<i16> for &FixedI32<Frac>

impl<Frac> Shr<&i16> for FixedI32<Frac>

impl<Frac> Shr<&i16> for &FixedI32<Frac>

impl<Frac> Shr<i32> for FixedI32<Frac>

impl<Frac> Shr<i32> for &FixedI32<Frac>

impl<Frac> Shr<&i32> for FixedI32<Frac>

impl<Frac> Shr<&i32> for &FixedI32<Frac>

impl<Frac> Shr<i64> for FixedI32<Frac>

impl<Frac> Shr<i64> for &FixedI32<Frac>

impl<Frac> Shr<&i64> for FixedI32<Frac>

impl<Frac> Shr<&i64> for &FixedI32<Frac>

impl<Frac> Shr<i128> for FixedI32<Frac>

impl<Frac> Shr<i128> for &FixedI32<Frac>

impl<Frac> Shr<&i128> for FixedI32<Frac>

impl<Frac> Shr<&i128> for &FixedI32<Frac>

impl<Frac> Shr<isize> for FixedI32<Frac>

impl<Frac> Shr<isize> for &FixedI32<Frac>

impl<Frac> Shr<&isize> for FixedI32<Frac>

impl<Frac> Shr<&isize> for &FixedI32<Frac>

impl<Frac> Shr<u8> for FixedI32<Frac>

impl<Frac> Shr<u8> for &FixedI32<Frac>

impl<Frac> Shr<&u8> for FixedI32<Frac>

impl<Frac> Shr<&u8> for &FixedI32<Frac>

impl<Frac> Shr<u16> for FixedI32<Frac>

impl<Frac> Shr<u16> for &FixedI32<Frac>

impl<Frac> Shr<&u16> for FixedI32<Frac>

impl<Frac> Shr<&u16> for &FixedI32<Frac>

impl<Frac> Shr<u32> for FixedI32<Frac>

impl<Frac> Shr<u32> for &FixedI32<Frac>

impl<Frac> Shr<&u32> for FixedI32<Frac>

impl<Frac> Shr<&u32> for &FixedI32<Frac>

impl<Frac> Shr<u64> for FixedI32<Frac>

impl<Frac> Shr<u64> for &FixedI32<Frac>

impl<Frac> Shr<&u64> for FixedI32<Frac>

impl<Frac> Shr<&u64> for &FixedI32<Frac>

impl<Frac> Shr<u128> for FixedI32<Frac>

impl<Frac> Shr<u128> for &FixedI32<Frac>

impl<Frac> Shr<&u128> for FixedI32<Frac>

impl<Frac> Shr<&u128> for &FixedI32<Frac>

impl<Frac> Shr<usize> for FixedI32<Frac>

impl<Frac> Shr<usize> for &FixedI32<Frac>

impl<Frac> Shr<&usize> for FixedI32<Frac>

impl<Frac> Shr<&usize> for &FixedI32<Frac>

impl<Frac> Shr<i8> for FixedI64<Frac>

impl<Frac> Shr<i8> for &FixedI64<Frac>

impl<Frac> Shr<&i8> for FixedI64<Frac>

impl<Frac> Shr<&i8> for &FixedI64<Frac>

impl<Frac> Shr<i16> for FixedI64<Frac>

impl<Frac> Shr<i16> for &FixedI64<Frac>

impl<Frac> Shr<&i16> for FixedI64<Frac>

impl<Frac> Shr<&i16> for &FixedI64<Frac>

impl<Frac> Shr<i32> for FixedI64<Frac>

impl<Frac> Shr<i32> for &FixedI64<Frac>

impl<Frac> Shr<&i32> for FixedI64<Frac>

impl<Frac> Shr<&i32> for &FixedI64<Frac>

impl<Frac> Shr<i64> for FixedI64<Frac>

impl<Frac> Shr<i64> for &FixedI64<Frac>

impl<Frac> Shr<&i64> for FixedI64<Frac>

impl<Frac> Shr<&i64> for &FixedI64<Frac>

impl<Frac> Shr<i128> for FixedI64<Frac>

impl<Frac> Shr<i128> for &FixedI64<Frac>

impl<Frac> Shr<&i128> for FixedI64<Frac>

impl<Frac> Shr<&i128> for &FixedI64<Frac>

impl<Frac> Shr<isize> for FixedI64<Frac>

impl<Frac> Shr<isize> for &FixedI64<Frac>

impl<Frac> Shr<&isize> for FixedI64<Frac>

impl<Frac> Shr<&isize> for &FixedI64<Frac>

impl<Frac> Shr<u8> for FixedI64<Frac>

impl<Frac> Shr<u8> for &FixedI64<Frac>

impl<Frac> Shr<&u8> for FixedI64<Frac>

impl<Frac> Shr<&u8> for &FixedI64<Frac>

impl<Frac> Shr<u16> for FixedI64<Frac>

impl<Frac> Shr<u16> for &FixedI64<Frac>

impl<Frac> Shr<&u16> for FixedI64<Frac>

impl<Frac> Shr<&u16> for &FixedI64<Frac>

impl<Frac> Shr<u32> for FixedI64<Frac>

impl<Frac> Shr<u32> for &FixedI64<Frac>

impl<Frac> Shr<&u32> for FixedI64<Frac>

impl<Frac> Shr<&u32> for &FixedI64<Frac>

impl<Frac> Shr<u64> for FixedI64<Frac>

impl<Frac> Shr<u64> for &FixedI64<Frac>

impl<Frac> Shr<&u64> for FixedI64<Frac>

impl<Frac> Shr<&u64> for &FixedI64<Frac>

impl<Frac> Shr<u128> for FixedI64<Frac>

impl<Frac> Shr<u128> for &FixedI64<Frac>

impl<Frac> Shr<&u128> for FixedI64<Frac>

impl<Frac> Shr<&u128> for &FixedI64<Frac>

impl<Frac> Shr<usize> for FixedI64<Frac>

impl<Frac> Shr<usize> for &FixedI64<Frac>

impl<Frac> Shr<&usize> for FixedI64<Frac>

impl<Frac> Shr<&usize> for &FixedI64<Frac>

impl<Frac> Shr<i8> for FixedI128<Frac>

impl<Frac> Shr<i8> for &FixedI128<Frac>

impl<Frac> Shr<&i8> for FixedI128<Frac>

impl<Frac> Shr<&i8> for &FixedI128<Frac>

impl<Frac> Shr<i16> for FixedI128<Frac>

impl<Frac> Shr<i16> for &FixedI128<Frac>

impl<Frac> Shr<&i16> for FixedI128<Frac>

impl<Frac> Shr<&i16> for &FixedI128<Frac>

impl<Frac> Shr<i32> for FixedI128<Frac>

impl<Frac> Shr<i32> for &FixedI128<Frac>

impl<Frac> Shr<&i32> for FixedI128<Frac>

impl<Frac> Shr<&i32> for &FixedI128<Frac>

impl<Frac> Shr<i64> for FixedI128<Frac>

impl<Frac> Shr<i64> for &FixedI128<Frac>

impl<Frac> Shr<&i64> for FixedI128<Frac>

impl<Frac> Shr<&i64> for &FixedI128<Frac>

impl<Frac> Shr<i128> for FixedI128<Frac>

impl<Frac> Shr<i128> for &FixedI128<Frac>

impl<Frac> Shr<&i128> for FixedI128<Frac>

impl<Frac> Shr<&i128> for &FixedI128<Frac>

impl<Frac> Shr<isize> for FixedI128<Frac>

impl<Frac> Shr<isize> for &FixedI128<Frac>

impl<Frac> Shr<&isize> for FixedI128<Frac>

impl<Frac> Shr<&isize> for &FixedI128<Frac>

impl<Frac> Shr<u8> for FixedI128<Frac>

impl<Frac> Shr<u8> for &FixedI128<Frac>

impl<Frac> Shr<&u8> for FixedI128<Frac>

impl<Frac> Shr<&u8> for &FixedI128<Frac>

impl<Frac> Shr<u16> for FixedI128<Frac>

impl<Frac> Shr<u16> for &FixedI128<Frac>

impl<Frac> Shr<&u16> for FixedI128<Frac>

impl<Frac> Shr<&u16> for &FixedI128<Frac>

impl<Frac> Shr<u32> for FixedI128<Frac>

impl<Frac> Shr<u32> for &FixedI128<Frac>

impl<Frac> Shr<&u32> for FixedI128<Frac>

impl<Frac> Shr<&u32> for &FixedI128<Frac>

impl<Frac> Shr<u64> for FixedI128<Frac>

impl<Frac> Shr<u64> for &FixedI128<Frac>

impl<Frac> Shr<&u64> for FixedI128<Frac>

impl<Frac> Shr<&u64> for &FixedI128<Frac>

impl<Frac> Shr<u128> for FixedI128<Frac>

impl<Frac> Shr<u128> for &FixedI128<Frac>

impl<Frac> Shr<&u128> for FixedI128<Frac>

impl<Frac> Shr<&u128> for &FixedI128<Frac>

impl<Frac> Shr<usize> for FixedI128<Frac>

impl<Frac> Shr<usize> for &FixedI128<Frac>

impl<Frac> Shr<&usize> for FixedI128<Frac>

impl<Frac> Shr<&usize> for &FixedI128<Frac>

impl<F> Shr<i8> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i8> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i8> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i8> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i16> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i16> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i16> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i16> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i32> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i32> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i32> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i32> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i64> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i64> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i64> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i64> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i128> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i128> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i128> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i128> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<isize> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<isize> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&isize> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&isize> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u8> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u8> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u8> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u8> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u16> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u16> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u16> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u16> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u32> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u32> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u32> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u32> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u64> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u64> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u64> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u64> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u128> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u128> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u128> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u128> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<usize> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<usize> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&usize> for Unwrapped<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&usize> for &Unwrapped<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i8> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i8> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i8> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i8> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i16> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i16> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i16> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i16> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i32> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i32> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i32> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i32> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i64> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i64> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i64> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i64> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<i128> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<i128> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&i128> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&i128> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<isize> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<isize> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&isize> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&isize> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u8> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u8> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u8> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u8> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u16> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u16> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u16> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u16> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u32> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u32> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u32> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u32> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u64> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u64> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u64> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u64> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<u128> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<u128> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&u128> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&u128> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<usize> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<usize> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl<F> Shr<&usize> for Wrapping<F>where
    F: Shr<u32, Output = F>,

impl<F> Shr<&usize> for &Wrapping<F>where
    for<'a> &'a F: Shr<u32, Output = F>,

impl Shr<&BigInt> for &BigInt

impl<'a> Shr<BigInt> for &'a BigInt

impl Shr<&BigInt> for BigInt

impl Shr<BigInt> for BigInt

impl Shr<&Number> for &Number

impl<'a> Shr<Number> for &'a Number

impl Shr<&Number> for Number

impl Shr<Number> for Number

impl Shr<usize> for Scalar

impl Shr<usize> for &Scalar

impl Shr<usize> for BigInt

impl<'a> Shr<usize> for &'a BigInt

impl Shr<usize> for BigUint

impl<'a> Shr<usize> for &'a BigUint

impl<T> Shr<T> for U128where
    T: Into<U128>,

impl<'a, T> Shr<T> for &'a U128where
    T: Into<U128>,

impl<T> Shr<T> for U256where
    T: Into<U256>,

impl<'a, T> Shr<T> for &'a U256where
    T: Into<U256>,

impl<T> Shr<T> for U512where
    T: Into<U512>,

impl<'a, T> Shr<T> for &'a U512where
    T: Into<U512>,

impl<U: Unsigned> Shr<U> for UTerm

impl<U: Unsigned, B: Bit> Shr<UTerm> for UInt<U, B>

impl Shr<B0> for UTerm

impl Shr<B1> for UTerm

impl<U: Unsigned, B: Bit> Shr<B0> for UInt<U, B>

impl<U: Unsigned, B: Bit> Shr<B1> for UInt<U, B>

impl<U, B: Bit, Ur: Unsigned, Br: Bit> Shr<UInt<Ur, Br>> for UInt<U, B>where
    UInt<Ur, Br>: Sub<B1>,
    U: Shr<Sub1<UInt<Ur, Br>>> + Unsigned,

impl Shr<&JsValue> for &JsValue

impl<'a> Shr<JsValue> for &'a JsValue

impl Shr<&JsValue> for JsValue

impl Shr<JsValue> for JsValue