Trait sp_std::ops::Shl

1.0.0 · source ·
pub trait Shl<Rhs = Self> {
    type Output;

    fn shl(self, rhs: Rhs) -> Self::Output;
}
Expand description

The left shift operator <<. Note that because this trait is implemented for all integer types with multiple right-hand-side types, Rust’s type checker has special handling for _ << _, setting the result type for integer operations to the type of the left-hand-side operand. This means that though a << b and a.shl(b) are one and the same from an evaluation standpoint, they are different when it comes to type inference.

Examples

An implementation of Shl that lifts the << operation on integers to a wrapper around usize.

use std::ops::Shl;

#[derive(PartialEq, Debug)]
struct Scalar(usize);

impl Shl<Scalar> for Scalar {
    type Output = Self;

    fn shl(self, Self(rhs): Self) -> Self::Output {
        let Self(lhs) = self;
        Self(lhs << rhs)
    }
}

assert_eq!(Scalar(4) << Scalar(2), Scalar(16));

An implementation of Shl that spins a vector leftward by a given amount.

use std::ops::Shl;

#[derive(PartialEq, Debug)]
struct SpinVector<T: Clone> {
    vec: Vec<T>,
}

impl<T: Clone> Shl<usize> for SpinVector<T> {
    type Output = Self;

    fn shl(self, rhs: usize) -> Self::Output {
        // Rotate the vector by `rhs` places.
        let (a, b) = self.vec.split_at(rhs);
        let mut spun_vector = vec![];
        spun_vector.extend_from_slice(b);
        spun_vector.extend_from_slice(a);
        Self { vec: spun_vector }
    }
}

assert_eq!(SpinVector { vec: vec![0, 1, 2, 3, 4] } << 2,
           SpinVector { vec: vec![2, 3, 4, 0, 1] });

Required Associated Types§

The resulting type after applying the << operator.

Required Methods§

Performs the << operation.

Examples
assert_eq!(5u8 << 1, 10);
assert_eq!(1u8 << 1, 2);

Implementors§

impl<const LIMBS: usize> Shl<usize> for UInt<LIMBS>

impl<const LIMBS: usize> Shl<usize> for &UInt<LIMBS>

impl<Frac> Shl<i8> for FixedU8<Frac>

impl<Frac> Shl<i8> for &FixedU8<Frac>

impl<Frac> Shl<&i8> for FixedU8<Frac>

impl<Frac> Shl<&i8> for &FixedU8<Frac>

impl<Frac> Shl<i16> for FixedU8<Frac>

impl<Frac> Shl<i16> for &FixedU8<Frac>

impl<Frac> Shl<&i16> for FixedU8<Frac>

impl<Frac> Shl<&i16> for &FixedU8<Frac>

impl<Frac> Shl<i32> for FixedU8<Frac>

impl<Frac> Shl<i32> for &FixedU8<Frac>

impl<Frac> Shl<&i32> for FixedU8<Frac>

impl<Frac> Shl<&i32> for &FixedU8<Frac>

impl<Frac> Shl<i64> for FixedU8<Frac>

impl<Frac> Shl<i64> for &FixedU8<Frac>

impl<Frac> Shl<&i64> for FixedU8<Frac>

impl<Frac> Shl<&i64> for &FixedU8<Frac>

impl<Frac> Shl<i128> for FixedU8<Frac>

impl<Frac> Shl<i128> for &FixedU8<Frac>

impl<Frac> Shl<&i128> for FixedU8<Frac>

impl<Frac> Shl<&i128> for &FixedU8<Frac>

impl<Frac> Shl<isize> for FixedU8<Frac>

impl<Frac> Shl<isize> for &FixedU8<Frac>

impl<Frac> Shl<&isize> for FixedU8<Frac>

impl<Frac> Shl<&isize> for &FixedU8<Frac>

impl<Frac> Shl<u8> for FixedU8<Frac>

impl<Frac> Shl<u8> for &FixedU8<Frac>

impl<Frac> Shl<&u8> for FixedU8<Frac>

impl<Frac> Shl<&u8> for &FixedU8<Frac>

impl<Frac> Shl<u16> for FixedU8<Frac>

impl<Frac> Shl<u16> for &FixedU8<Frac>

impl<Frac> Shl<&u16> for FixedU8<Frac>

impl<Frac> Shl<&u16> for &FixedU8<Frac>

impl<Frac> Shl<u32> for FixedU8<Frac>

impl<Frac> Shl<u32> for &FixedU8<Frac>

impl<Frac> Shl<&u32> for FixedU8<Frac>

impl<Frac> Shl<&u32> for &FixedU8<Frac>

impl<Frac> Shl<u64> for FixedU8<Frac>

impl<Frac> Shl<u64> for &FixedU8<Frac>

impl<Frac> Shl<&u64> for FixedU8<Frac>

impl<Frac> Shl<&u64> for &FixedU8<Frac>

impl<Frac> Shl<u128> for FixedU8<Frac>

impl<Frac> Shl<u128> for &FixedU8<Frac>

impl<Frac> Shl<&u128> for FixedU8<Frac>

impl<Frac> Shl<&u128> for &FixedU8<Frac>

impl<Frac> Shl<usize> for FixedU8<Frac>

impl<Frac> Shl<usize> for &FixedU8<Frac>

impl<Frac> Shl<&usize> for FixedU8<Frac>

impl<Frac> Shl<&usize> for &FixedU8<Frac>

impl<Frac> Shl<i8> for FixedU16<Frac>

impl<Frac> Shl<i8> for &FixedU16<Frac>

impl<Frac> Shl<&i8> for FixedU16<Frac>

impl<Frac> Shl<&i8> for &FixedU16<Frac>

impl<Frac> Shl<i16> for FixedU16<Frac>

impl<Frac> Shl<i16> for &FixedU16<Frac>

impl<Frac> Shl<&i16> for FixedU16<Frac>

impl<Frac> Shl<&i16> for &FixedU16<Frac>

impl<Frac> Shl<i32> for FixedU16<Frac>

impl<Frac> Shl<i32> for &FixedU16<Frac>

impl<Frac> Shl<&i32> for FixedU16<Frac>

impl<Frac> Shl<&i32> for &FixedU16<Frac>

impl<Frac> Shl<i64> for FixedU16<Frac>

impl<Frac> Shl<i64> for &FixedU16<Frac>

impl<Frac> Shl<&i64> for FixedU16<Frac>

impl<Frac> Shl<&i64> for &FixedU16<Frac>

impl<Frac> Shl<i128> for FixedU16<Frac>

impl<Frac> Shl<i128> for &FixedU16<Frac>

impl<Frac> Shl<&i128> for FixedU16<Frac>

impl<Frac> Shl<&i128> for &FixedU16<Frac>

impl<Frac> Shl<isize> for FixedU16<Frac>

impl<Frac> Shl<isize> for &FixedU16<Frac>

impl<Frac> Shl<&isize> for FixedU16<Frac>

impl<Frac> Shl<&isize> for &FixedU16<Frac>

impl<Frac> Shl<u8> for FixedU16<Frac>

impl<Frac> Shl<u8> for &FixedU16<Frac>

impl<Frac> Shl<&u8> for FixedU16<Frac>

impl<Frac> Shl<&u8> for &FixedU16<Frac>

impl<Frac> Shl<u16> for FixedU16<Frac>

impl<Frac> Shl<u16> for &FixedU16<Frac>

impl<Frac> Shl<&u16> for FixedU16<Frac>

impl<Frac> Shl<&u16> for &FixedU16<Frac>

impl<Frac> Shl<u32> for FixedU16<Frac>

impl<Frac> Shl<u32> for &FixedU16<Frac>

impl<Frac> Shl<&u32> for FixedU16<Frac>

impl<Frac> Shl<&u32> for &FixedU16<Frac>

impl<Frac> Shl<u64> for FixedU16<Frac>

impl<Frac> Shl<u64> for &FixedU16<Frac>

impl<Frac> Shl<&u64> for FixedU16<Frac>

impl<Frac> Shl<&u64> for &FixedU16<Frac>

impl<Frac> Shl<u128> for FixedU16<Frac>

impl<Frac> Shl<u128> for &FixedU16<Frac>

impl<Frac> Shl<&u128> for FixedU16<Frac>

impl<Frac> Shl<&u128> for &FixedU16<Frac>

impl<Frac> Shl<usize> for FixedU16<Frac>

impl<Frac> Shl<usize> for &FixedU16<Frac>

impl<Frac> Shl<&usize> for FixedU16<Frac>

impl<Frac> Shl<&usize> for &FixedU16<Frac>

impl<Frac> Shl<i8> for FixedU32<Frac>

impl<Frac> Shl<i8> for &FixedU32<Frac>

impl<Frac> Shl<&i8> for FixedU32<Frac>

impl<Frac> Shl<&i8> for &FixedU32<Frac>

impl<Frac> Shl<i16> for FixedU32<Frac>

impl<Frac> Shl<i16> for &FixedU32<Frac>

impl<Frac> Shl<&i16> for FixedU32<Frac>

impl<Frac> Shl<&i16> for &FixedU32<Frac>

impl<Frac> Shl<i32> for FixedU32<Frac>

impl<Frac> Shl<i32> for &FixedU32<Frac>

impl<Frac> Shl<&i32> for FixedU32<Frac>

impl<Frac> Shl<&i32> for &FixedU32<Frac>

impl<Frac> Shl<i64> for FixedU32<Frac>

impl<Frac> Shl<i64> for &FixedU32<Frac>

impl<Frac> Shl<&i64> for FixedU32<Frac>

impl<Frac> Shl<&i64> for &FixedU32<Frac>

impl<Frac> Shl<i128> for FixedU32<Frac>

impl<Frac> Shl<i128> for &FixedU32<Frac>

impl<Frac> Shl<&i128> for FixedU32<Frac>

impl<Frac> Shl<&i128> for &FixedU32<Frac>

impl<Frac> Shl<isize> for FixedU32<Frac>

impl<Frac> Shl<isize> for &FixedU32<Frac>

impl<Frac> Shl<&isize> for FixedU32<Frac>

impl<Frac> Shl<&isize> for &FixedU32<Frac>

impl<Frac> Shl<u8> for FixedU32<Frac>

impl<Frac> Shl<u8> for &FixedU32<Frac>

impl<Frac> Shl<&u8> for FixedU32<Frac>

impl<Frac> Shl<&u8> for &FixedU32<Frac>

impl<Frac> Shl<u16> for FixedU32<Frac>

impl<Frac> Shl<u16> for &FixedU32<Frac>

impl<Frac> Shl<&u16> for FixedU32<Frac>

impl<Frac> Shl<&u16> for &FixedU32<Frac>

impl<Frac> Shl<u32> for FixedU32<Frac>

impl<Frac> Shl<u32> for &FixedU32<Frac>

impl<Frac> Shl<&u32> for FixedU32<Frac>

impl<Frac> Shl<&u32> for &FixedU32<Frac>

impl<Frac> Shl<u64> for FixedU32<Frac>

impl<Frac> Shl<u64> for &FixedU32<Frac>

impl<Frac> Shl<&u64> for FixedU32<Frac>

impl<Frac> Shl<&u64> for &FixedU32<Frac>

impl<Frac> Shl<u128> for FixedU32<Frac>

impl<Frac> Shl<u128> for &FixedU32<Frac>

impl<Frac> Shl<&u128> for FixedU32<Frac>

impl<Frac> Shl<&u128> for &FixedU32<Frac>

impl<Frac> Shl<usize> for FixedU32<Frac>

impl<Frac> Shl<usize> for &FixedU32<Frac>

impl<Frac> Shl<&usize> for FixedU32<Frac>

impl<Frac> Shl<&usize> for &FixedU32<Frac>

impl<Frac> Shl<i8> for FixedU64<Frac>

impl<Frac> Shl<i8> for &FixedU64<Frac>

impl<Frac> Shl<&i8> for FixedU64<Frac>

impl<Frac> Shl<&i8> for &FixedU64<Frac>

impl<Frac> Shl<i16> for FixedU64<Frac>

impl<Frac> Shl<i16> for &FixedU64<Frac>

impl<Frac> Shl<&i16> for FixedU64<Frac>

impl<Frac> Shl<&i16> for &FixedU64<Frac>

impl<Frac> Shl<i32> for FixedU64<Frac>

impl<Frac> Shl<i32> for &FixedU64<Frac>

impl<Frac> Shl<&i32> for FixedU64<Frac>

impl<Frac> Shl<&i32> for &FixedU64<Frac>

impl<Frac> Shl<i64> for FixedU64<Frac>

impl<Frac> Shl<i64> for &FixedU64<Frac>

impl<Frac> Shl<&i64> for FixedU64<Frac>

impl<Frac> Shl<&i64> for &FixedU64<Frac>

impl<Frac> Shl<i128> for FixedU64<Frac>

impl<Frac> Shl<i128> for &FixedU64<Frac>

impl<Frac> Shl<&i128> for FixedU64<Frac>

impl<Frac> Shl<&i128> for &FixedU64<Frac>

impl<Frac> Shl<isize> for FixedU64<Frac>

impl<Frac> Shl<isize> for &FixedU64<Frac>

impl<Frac> Shl<&isize> for FixedU64<Frac>

impl<Frac> Shl<&isize> for &FixedU64<Frac>

impl<Frac> Shl<u8> for FixedU64<Frac>

impl<Frac> Shl<u8> for &FixedU64<Frac>

impl<Frac> Shl<&u8> for FixedU64<Frac>

impl<Frac> Shl<&u8> for &FixedU64<Frac>

impl<Frac> Shl<u16> for FixedU64<Frac>

impl<Frac> Shl<u16> for &FixedU64<Frac>

impl<Frac> Shl<&u16> for FixedU64<Frac>

impl<Frac> Shl<&u16> for &FixedU64<Frac>

impl<Frac> Shl<u32> for FixedU64<Frac>

impl<Frac> Shl<u32> for &FixedU64<Frac>

impl<Frac> Shl<&u32> for FixedU64<Frac>

impl<Frac> Shl<&u32> for &FixedU64<Frac>

impl<Frac> Shl<u64> for FixedU64<Frac>

impl<Frac> Shl<u64> for &FixedU64<Frac>

impl<Frac> Shl<&u64> for FixedU64<Frac>

impl<Frac> Shl<&u64> for &FixedU64<Frac>

impl<Frac> Shl<u128> for FixedU64<Frac>

impl<Frac> Shl<u128> for &FixedU64<Frac>

impl<Frac> Shl<&u128> for FixedU64<Frac>

impl<Frac> Shl<&u128> for &FixedU64<Frac>

impl<Frac> Shl<usize> for FixedU64<Frac>

impl<Frac> Shl<usize> for &FixedU64<Frac>

impl<Frac> Shl<&usize> for FixedU64<Frac>

impl<Frac> Shl<&usize> for &FixedU64<Frac>

impl<Frac> Shl<i8> for FixedU128<Frac>

impl<Frac> Shl<i8> for &FixedU128<Frac>

impl<Frac> Shl<&i8> for FixedU128<Frac>

impl<Frac> Shl<&i8> for &FixedU128<Frac>

impl<Frac> Shl<i16> for FixedU128<Frac>

impl<Frac> Shl<i16> for &FixedU128<Frac>

impl<Frac> Shl<&i16> for FixedU128<Frac>

impl<Frac> Shl<&i16> for &FixedU128<Frac>

impl<Frac> Shl<i32> for FixedU128<Frac>

impl<Frac> Shl<i32> for &FixedU128<Frac>

impl<Frac> Shl<&i32> for FixedU128<Frac>

impl<Frac> Shl<&i32> for &FixedU128<Frac>

impl<Frac> Shl<i64> for FixedU128<Frac>

impl<Frac> Shl<i64> for &FixedU128<Frac>

impl<Frac> Shl<&i64> for FixedU128<Frac>

impl<Frac> Shl<&i64> for &FixedU128<Frac>

impl<Frac> Shl<i128> for FixedU128<Frac>

impl<Frac> Shl<i128> for &FixedU128<Frac>

impl<Frac> Shl<&i128> for FixedU128<Frac>

impl<Frac> Shl<&i128> for &FixedU128<Frac>

impl<Frac> Shl<isize> for FixedU128<Frac>

impl<Frac> Shl<isize> for &FixedU128<Frac>

impl<Frac> Shl<&isize> for FixedU128<Frac>

impl<Frac> Shl<&isize> for &FixedU128<Frac>

impl<Frac> Shl<u8> for FixedU128<Frac>

impl<Frac> Shl<u8> for &FixedU128<Frac>

impl<Frac> Shl<&u8> for FixedU128<Frac>

impl<Frac> Shl<&u8> for &FixedU128<Frac>

impl<Frac> Shl<u16> for FixedU128<Frac>

impl<Frac> Shl<u16> for &FixedU128<Frac>

impl<Frac> Shl<&u16> for FixedU128<Frac>

impl<Frac> Shl<&u16> for &FixedU128<Frac>

impl<Frac> Shl<u32> for FixedU128<Frac>

impl<Frac> Shl<u32> for &FixedU128<Frac>

impl<Frac> Shl<&u32> for FixedU128<Frac>

impl<Frac> Shl<&u32> for &FixedU128<Frac>

impl<Frac> Shl<u64> for FixedU128<Frac>

impl<Frac> Shl<u64> for &FixedU128<Frac>

impl<Frac> Shl<&u64> for FixedU128<Frac>

impl<Frac> Shl<&u64> for &FixedU128<Frac>

impl<Frac> Shl<u128> for FixedU128<Frac>

impl<Frac> Shl<u128> for &FixedU128<Frac>

impl<Frac> Shl<&u128> for FixedU128<Frac>

impl<Frac> Shl<&u128> for &FixedU128<Frac>

impl<Frac> Shl<usize> for FixedU128<Frac>

impl<Frac> Shl<usize> for &FixedU128<Frac>

impl<Frac> Shl<&usize> for FixedU128<Frac>

impl<Frac> Shl<&usize> for &FixedU128<Frac>

impl<Frac> Shl<i8> for FixedI8<Frac>

impl<Frac> Shl<i8> for &FixedI8<Frac>

impl<Frac> Shl<&i8> for FixedI8<Frac>

impl<Frac> Shl<&i8> for &FixedI8<Frac>

impl<Frac> Shl<i16> for FixedI8<Frac>

impl<Frac> Shl<i16> for &FixedI8<Frac>

impl<Frac> Shl<&i16> for FixedI8<Frac>

impl<Frac> Shl<&i16> for &FixedI8<Frac>

impl<Frac> Shl<i32> for FixedI8<Frac>

impl<Frac> Shl<i32> for &FixedI8<Frac>

impl<Frac> Shl<&i32> for FixedI8<Frac>

impl<Frac> Shl<&i32> for &FixedI8<Frac>

impl<Frac> Shl<i64> for FixedI8<Frac>

impl<Frac> Shl<i64> for &FixedI8<Frac>

impl<Frac> Shl<&i64> for FixedI8<Frac>

impl<Frac> Shl<&i64> for &FixedI8<Frac>

impl<Frac> Shl<i128> for FixedI8<Frac>

impl<Frac> Shl<i128> for &FixedI8<Frac>

impl<Frac> Shl<&i128> for FixedI8<Frac>

impl<Frac> Shl<&i128> for &FixedI8<Frac>

impl<Frac> Shl<isize> for FixedI8<Frac>

impl<Frac> Shl<isize> for &FixedI8<Frac>

impl<Frac> Shl<&isize> for FixedI8<Frac>

impl<Frac> Shl<&isize> for &FixedI8<Frac>

impl<Frac> Shl<u8> for FixedI8<Frac>

impl<Frac> Shl<u8> for &FixedI8<Frac>

impl<Frac> Shl<&u8> for FixedI8<Frac>

impl<Frac> Shl<&u8> for &FixedI8<Frac>

impl<Frac> Shl<u16> for FixedI8<Frac>

impl<Frac> Shl<u16> for &FixedI8<Frac>

impl<Frac> Shl<&u16> for FixedI8<Frac>

impl<Frac> Shl<&u16> for &FixedI8<Frac>

impl<Frac> Shl<u32> for FixedI8<Frac>

impl<Frac> Shl<u32> for &FixedI8<Frac>

impl<Frac> Shl<&u32> for FixedI8<Frac>

impl<Frac> Shl<&u32> for &FixedI8<Frac>

impl<Frac> Shl<u64> for FixedI8<Frac>

impl<Frac> Shl<u64> for &FixedI8<Frac>

impl<Frac> Shl<&u64> for FixedI8<Frac>

impl<Frac> Shl<&u64> for &FixedI8<Frac>

impl<Frac> Shl<u128> for FixedI8<Frac>

impl<Frac> Shl<u128> for &FixedI8<Frac>

impl<Frac> Shl<&u128> for FixedI8<Frac>

impl<Frac> Shl<&u128> for &FixedI8<Frac>

impl<Frac> Shl<usize> for FixedI8<Frac>

impl<Frac> Shl<usize> for &FixedI8<Frac>

impl<Frac> Shl<&usize> for FixedI8<Frac>

impl<Frac> Shl<&usize> for &FixedI8<Frac>

impl<Frac> Shl<i8> for FixedI16<Frac>

impl<Frac> Shl<i8> for &FixedI16<Frac>

impl<Frac> Shl<&i8> for FixedI16<Frac>

impl<Frac> Shl<&i8> for &FixedI16<Frac>

impl<Frac> Shl<i16> for FixedI16<Frac>

impl<Frac> Shl<i16> for &FixedI16<Frac>

impl<Frac> Shl<&i16> for FixedI16<Frac>

impl<Frac> Shl<&i16> for &FixedI16<Frac>

impl<Frac> Shl<i32> for FixedI16<Frac>

impl<Frac> Shl<i32> for &FixedI16<Frac>

impl<Frac> Shl<&i32> for FixedI16<Frac>

impl<Frac> Shl<&i32> for &FixedI16<Frac>

impl<Frac> Shl<i64> for FixedI16<Frac>

impl<Frac> Shl<i64> for &FixedI16<Frac>

impl<Frac> Shl<&i64> for FixedI16<Frac>

impl<Frac> Shl<&i64> for &FixedI16<Frac>

impl<Frac> Shl<i128> for FixedI16<Frac>

impl<Frac> Shl<i128> for &FixedI16<Frac>

impl<Frac> Shl<&i128> for FixedI16<Frac>

impl<Frac> Shl<&i128> for &FixedI16<Frac>

impl<Frac> Shl<isize> for FixedI16<Frac>

impl<Frac> Shl<isize> for &FixedI16<Frac>

impl<Frac> Shl<&isize> for FixedI16<Frac>

impl<Frac> Shl<&isize> for &FixedI16<Frac>

impl<Frac> Shl<u8> for FixedI16<Frac>

impl<Frac> Shl<u8> for &FixedI16<Frac>

impl<Frac> Shl<&u8> for FixedI16<Frac>

impl<Frac> Shl<&u8> for &FixedI16<Frac>

impl<Frac> Shl<u16> for FixedI16<Frac>

impl<Frac> Shl<u16> for &FixedI16<Frac>

impl<Frac> Shl<&u16> for FixedI16<Frac>

impl<Frac> Shl<&u16> for &FixedI16<Frac>

impl<Frac> Shl<u32> for FixedI16<Frac>

impl<Frac> Shl<u32> for &FixedI16<Frac>

impl<Frac> Shl<&u32> for FixedI16<Frac>

impl<Frac> Shl<&u32> for &FixedI16<Frac>

impl<Frac> Shl<u64> for FixedI16<Frac>

impl<Frac> Shl<u64> for &FixedI16<Frac>

impl<Frac> Shl<&u64> for FixedI16<Frac>

impl<Frac> Shl<&u64> for &FixedI16<Frac>

impl<Frac> Shl<u128> for FixedI16<Frac>

impl<Frac> Shl<u128> for &FixedI16<Frac>

impl<Frac> Shl<&u128> for FixedI16<Frac>

impl<Frac> Shl<&u128> for &FixedI16<Frac>

impl<Frac> Shl<usize> for FixedI16<Frac>

impl<Frac> Shl<usize> for &FixedI16<Frac>

impl<Frac> Shl<&usize> for FixedI16<Frac>

impl<Frac> Shl<&usize> for &FixedI16<Frac>

impl<Frac> Shl<i8> for FixedI32<Frac>

impl<Frac> Shl<i8> for &FixedI32<Frac>

impl<Frac> Shl<&i8> for FixedI32<Frac>

impl<Frac> Shl<&i8> for &FixedI32<Frac>

impl<Frac> Shl<i16> for FixedI32<Frac>

impl<Frac> Shl<i16> for &FixedI32<Frac>

impl<Frac> Shl<&i16> for FixedI32<Frac>

impl<Frac> Shl<&i16> for &FixedI32<Frac>

impl<Frac> Shl<i32> for FixedI32<Frac>

impl<Frac> Shl<i32> for &FixedI32<Frac>

impl<Frac> Shl<&i32> for FixedI32<Frac>

impl<Frac> Shl<&i32> for &FixedI32<Frac>

impl<Frac> Shl<i64> for FixedI32<Frac>

impl<Frac> Shl<i64> for &FixedI32<Frac>

impl<Frac> Shl<&i64> for FixedI32<Frac>

impl<Frac> Shl<&i64> for &FixedI32<Frac>

impl<Frac> Shl<i128> for FixedI32<Frac>

impl<Frac> Shl<i128> for &FixedI32<Frac>

impl<Frac> Shl<&i128> for FixedI32<Frac>

impl<Frac> Shl<&i128> for &FixedI32<Frac>

impl<Frac> Shl<isize> for FixedI32<Frac>

impl<Frac> Shl<isize> for &FixedI32<Frac>

impl<Frac> Shl<&isize> for FixedI32<Frac>

impl<Frac> Shl<&isize> for &FixedI32<Frac>

impl<Frac> Shl<u8> for FixedI32<Frac>

impl<Frac> Shl<u8> for &FixedI32<Frac>

impl<Frac> Shl<&u8> for FixedI32<Frac>

impl<Frac> Shl<&u8> for &FixedI32<Frac>

impl<Frac> Shl<u16> for FixedI32<Frac>

impl<Frac> Shl<u16> for &FixedI32<Frac>

impl<Frac> Shl<&u16> for FixedI32<Frac>

impl<Frac> Shl<&u16> for &FixedI32<Frac>

impl<Frac> Shl<u32> for FixedI32<Frac>

impl<Frac> Shl<u32> for &FixedI32<Frac>

impl<Frac> Shl<&u32> for FixedI32<Frac>

impl<Frac> Shl<&u32> for &FixedI32<Frac>

impl<Frac> Shl<u64> for FixedI32<Frac>

impl<Frac> Shl<u64> for &FixedI32<Frac>

impl<Frac> Shl<&u64> for FixedI32<Frac>

impl<Frac> Shl<&u64> for &FixedI32<Frac>

impl<Frac> Shl<u128> for FixedI32<Frac>

impl<Frac> Shl<u128> for &FixedI32<Frac>

impl<Frac> Shl<&u128> for FixedI32<Frac>

impl<Frac> Shl<&u128> for &FixedI32<Frac>

impl<Frac> Shl<usize> for FixedI32<Frac>

impl<Frac> Shl<usize> for &FixedI32<Frac>

impl<Frac> Shl<&usize> for FixedI32<Frac>

impl<Frac> Shl<&usize> for &FixedI32<Frac>

impl<Frac> Shl<i8> for FixedI64<Frac>

impl<Frac> Shl<i8> for &FixedI64<Frac>

impl<Frac> Shl<&i8> for FixedI64<Frac>

impl<Frac> Shl<&i8> for &FixedI64<Frac>

impl<Frac> Shl<i16> for FixedI64<Frac>

impl<Frac> Shl<i16> for &FixedI64<Frac>

impl<Frac> Shl<&i16> for FixedI64<Frac>

impl<Frac> Shl<&i16> for &FixedI64<Frac>

impl<Frac> Shl<i32> for FixedI64<Frac>

impl<Frac> Shl<i32> for &FixedI64<Frac>

impl<Frac> Shl<&i32> for FixedI64<Frac>

impl<Frac> Shl<&i32> for &FixedI64<Frac>

impl<Frac> Shl<i64> for FixedI64<Frac>

impl<Frac> Shl<i64> for &FixedI64<Frac>

impl<Frac> Shl<&i64> for FixedI64<Frac>

impl<Frac> Shl<&i64> for &FixedI64<Frac>

impl<Frac> Shl<i128> for FixedI64<Frac>

impl<Frac> Shl<i128> for &FixedI64<Frac>

impl<Frac> Shl<&i128> for FixedI64<Frac>

impl<Frac> Shl<&i128> for &FixedI64<Frac>

impl<Frac> Shl<isize> for FixedI64<Frac>

impl<Frac> Shl<isize> for &FixedI64<Frac>

impl<Frac> Shl<&isize> for FixedI64<Frac>

impl<Frac> Shl<&isize> for &FixedI64<Frac>

impl<Frac> Shl<u8> for FixedI64<Frac>

impl<Frac> Shl<u8> for &FixedI64<Frac>

impl<Frac> Shl<&u8> for FixedI64<Frac>

impl<Frac> Shl<&u8> for &FixedI64<Frac>

impl<Frac> Shl<u16> for FixedI64<Frac>

impl<Frac> Shl<u16> for &FixedI64<Frac>

impl<Frac> Shl<&u16> for FixedI64<Frac>

impl<Frac> Shl<&u16> for &FixedI64<Frac>

impl<Frac> Shl<u32> for FixedI64<Frac>

impl<Frac> Shl<u32> for &FixedI64<Frac>

impl<Frac> Shl<&u32> for FixedI64<Frac>

impl<Frac> Shl<&u32> for &FixedI64<Frac>

impl<Frac> Shl<u64> for FixedI64<Frac>

impl<Frac> Shl<u64> for &FixedI64<Frac>

impl<Frac> Shl<&u64> for FixedI64<Frac>

impl<Frac> Shl<&u64> for &FixedI64<Frac>

impl<Frac> Shl<u128> for FixedI64<Frac>

impl<Frac> Shl<u128> for &FixedI64<Frac>

impl<Frac> Shl<&u128> for FixedI64<Frac>

impl<Frac> Shl<&u128> for &FixedI64<Frac>

impl<Frac> Shl<usize> for FixedI64<Frac>

impl<Frac> Shl<usize> for &FixedI64<Frac>

impl<Frac> Shl<&usize> for FixedI64<Frac>

impl<Frac> Shl<&usize> for &FixedI64<Frac>

impl<Frac> Shl<i8> for FixedI128<Frac>

impl<Frac> Shl<i8> for &FixedI128<Frac>

impl<Frac> Shl<&i8> for FixedI128<Frac>

impl<Frac> Shl<&i8> for &FixedI128<Frac>

impl<Frac> Shl<i16> for FixedI128<Frac>

impl<Frac> Shl<i16> for &FixedI128<Frac>

impl<Frac> Shl<&i16> for FixedI128<Frac>

impl<Frac> Shl<&i16> for &FixedI128<Frac>

impl<Frac> Shl<i32> for FixedI128<Frac>

impl<Frac> Shl<i32> for &FixedI128<Frac>

impl<Frac> Shl<&i32> for FixedI128<Frac>

impl<Frac> Shl<&i32> for &FixedI128<Frac>

impl<Frac> Shl<i64> for FixedI128<Frac>

impl<Frac> Shl<i64> for &FixedI128<Frac>

impl<Frac> Shl<&i64> for FixedI128<Frac>

impl<Frac> Shl<&i64> for &FixedI128<Frac>

impl<Frac> Shl<i128> for FixedI128<Frac>

impl<Frac> Shl<i128> for &FixedI128<Frac>

impl<Frac> Shl<&i128> for FixedI128<Frac>

impl<Frac> Shl<&i128> for &FixedI128<Frac>

impl<Frac> Shl<isize> for FixedI128<Frac>

impl<Frac> Shl<isize> for &FixedI128<Frac>

impl<Frac> Shl<&isize> for FixedI128<Frac>

impl<Frac> Shl<&isize> for &FixedI128<Frac>

impl<Frac> Shl<u8> for FixedI128<Frac>

impl<Frac> Shl<u8> for &FixedI128<Frac>

impl<Frac> Shl<&u8> for FixedI128<Frac>

impl<Frac> Shl<&u8> for &FixedI128<Frac>

impl<Frac> Shl<u16> for FixedI128<Frac>

impl<Frac> Shl<u16> for &FixedI128<Frac>

impl<Frac> Shl<&u16> for FixedI128<Frac>

impl<Frac> Shl<&u16> for &FixedI128<Frac>

impl<Frac> Shl<u32> for FixedI128<Frac>

impl<Frac> Shl<u32> for &FixedI128<Frac>

impl<Frac> Shl<&u32> for FixedI128<Frac>

impl<Frac> Shl<&u32> for &FixedI128<Frac>

impl<Frac> Shl<u64> for FixedI128<Frac>

impl<Frac> Shl<u64> for &FixedI128<Frac>

impl<Frac> Shl<&u64> for FixedI128<Frac>

impl<Frac> Shl<&u64> for &FixedI128<Frac>

impl<Frac> Shl<u128> for FixedI128<Frac>

impl<Frac> Shl<u128> for &FixedI128<Frac>

impl<Frac> Shl<&u128> for FixedI128<Frac>

impl<Frac> Shl<&u128> for &FixedI128<Frac>

impl<Frac> Shl<usize> for FixedI128<Frac>

impl<Frac> Shl<usize> for &FixedI128<Frac>

impl<Frac> Shl<&usize> for FixedI128<Frac>

impl<Frac> Shl<&usize> for &FixedI128<Frac>

impl<F> Shl<i8> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i8> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i8> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i8> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i16> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i16> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i16> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i16> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i32> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i32> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i32> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i32> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i64> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i64> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i64> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i64> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i128> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i128> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i128> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i128> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<isize> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<isize> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&isize> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&isize> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u8> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u8> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u8> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u8> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u16> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u16> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u16> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u16> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u32> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u32> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u32> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u32> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u64> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u64> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u64> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u64> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u128> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u128> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u128> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u128> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<usize> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<usize> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&usize> for Unwrapped<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&usize> for &Unwrapped<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i8> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i8> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i8> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i8> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i16> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i16> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i16> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i16> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i32> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i32> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i32> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i32> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i64> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i64> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i64> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i64> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<i128> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<i128> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&i128> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&i128> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<isize> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<isize> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&isize> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&isize> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u8> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u8> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u8> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u8> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u16> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u16> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u16> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u16> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u32> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u32> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u32> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u32> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u64> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u64> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u64> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u64> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<u128> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<u128> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&u128> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&u128> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<usize> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<usize> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl<F> Shl<&usize> for Wrapping<F>where
    F: Shl<u32, Output = F>,

impl<F> Shl<&usize> for &Wrapping<F>where
    for<'a> &'a F: Shl<u32, Output = F>,

impl Shl<&BigInt> for &BigInt

impl<'a> Shl<BigInt> for &'a BigInt

impl Shl<&BigInt> for BigInt

impl Shl<BigInt> for BigInt

impl Shl<&Number> for &Number

impl<'a> Shl<Number> for &'a Number

impl Shl<&Number> for Number

impl Shl<Number> for Number

impl Shl<usize> for BigInt

impl<'a> Shl<usize> for &'a BigInt

impl Shl<usize> for BigUint

impl<'a> Shl<usize> for &'a BigUint

impl<T> Shl<T> for U128where
    T: Into<U128>,

impl<'a, T> Shl<T> for &'a U128where
    T: Into<U128>,

impl<T> Shl<T> for U256where
    T: Into<U256>,

impl<'a, T> Shl<T> for &'a U256where
    T: Into<U256>,

impl<T> Shl<T> for U512where
    T: Into<U512>,

impl<'a, T> Shl<T> for &'a U512where
    T: Into<U512>,

impl Shl<B0> for UTerm

impl Shl<B1> for UTerm

impl<U: Unsigned, B: Bit> Shl<B0> for UInt<U, B>

impl<U: Unsigned, B: Bit> Shl<B1> for UInt<U, B>

impl<U: Unsigned, B: Bit> Shl<UTerm> for UInt<U, B>

impl<U: Unsigned> Shl<U> for UTerm

impl<U: Unsigned, B: Bit, Ur: Unsigned, Br: Bit> Shl<UInt<Ur, Br>> for UInt<U, B>where
    UInt<Ur, Br>: Sub<B1>,
    UInt<UInt<U, B>, B0>: Shl<Sub1<UInt<Ur, Br>>>,

impl Shl<&JsValue> for &JsValue

impl<'a> Shl<JsValue> for &'a JsValue

impl Shl<&JsValue> for JsValue

impl Shl<JsValue> for JsValue