pub enum Op {
Show 220 variants
    Ret(Ret),
    Call(Call),
    Call1(Call1),
    Call2(Call2),
    Call3(Call3),
    Call4(Call4),
    CallIndirect(CallIndirect),
    Jump(Jump),
    XJump(XJump),
    BrIf(BrIf),
    BrIfNot(BrIfNot),
    BrIfXeq32(BrIfXeq32),
    BrIfXneq32(BrIfXneq32),
    BrIfXslt32(BrIfXslt32),
    BrIfXslteq32(BrIfXslteq32),
    BrIfXult32(BrIfXult32),
    BrIfXulteq32(BrIfXulteq32),
    BrIfXeq64(BrIfXeq64),
    BrIfXneq64(BrIfXneq64),
    BrIfXslt64(BrIfXslt64),
    BrIfXslteq64(BrIfXslteq64),
    BrIfXult64(BrIfXult64),
    BrIfXulteq64(BrIfXulteq64),
    BrIfXeq32I8(BrIfXeq32I8),
    BrIfXeq32I32(BrIfXeq32I32),
    BrIfXneq32I8(BrIfXneq32I8),
    BrIfXneq32I32(BrIfXneq32I32),
    BrIfXslt32I8(BrIfXslt32I8),
    BrIfXslt32I32(BrIfXslt32I32),
    BrIfXsgt32I8(BrIfXsgt32I8),
    BrIfXsgt32I32(BrIfXsgt32I32),
    BrIfXslteq32I8(BrIfXslteq32I8),
    BrIfXslteq32I32(BrIfXslteq32I32),
    BrIfXsgteq32I8(BrIfXsgteq32I8),
    BrIfXsgteq32I32(BrIfXsgteq32I32),
    BrIfXult32U8(BrIfXult32U8),
    BrIfXult32U32(BrIfXult32U32),
    BrIfXulteq32U8(BrIfXulteq32U8),
    BrIfXulteq32U32(BrIfXulteq32U32),
    BrIfXugt32U8(BrIfXugt32U8),
    BrIfXugt32U32(BrIfXugt32U32),
    BrIfXugteq32U8(BrIfXugteq32U8),
    BrIfXugteq32U32(BrIfXugteq32U32),
    BrIfXeq64I8(BrIfXeq64I8),
    BrIfXeq64I32(BrIfXeq64I32),
    BrIfXneq64I8(BrIfXneq64I8),
    BrIfXneq64I32(BrIfXneq64I32),
    BrIfXslt64I8(BrIfXslt64I8),
    BrIfXslt64I32(BrIfXslt64I32),
    BrIfXsgt64I8(BrIfXsgt64I8),
    BrIfXsgt64I32(BrIfXsgt64I32),
    BrIfXslteq64I8(BrIfXslteq64I8),
    BrIfXslteq64I32(BrIfXslteq64I32),
    BrIfXsgteq64I8(BrIfXsgteq64I8),
    BrIfXsgteq64I32(BrIfXsgteq64I32),
    BrIfXult64U8(BrIfXult64U8),
    BrIfXult64U32(BrIfXult64U32),
    BrIfXulteq64U8(BrIfXulteq64U8),
    BrIfXulteq64U32(BrIfXulteq64U32),
    BrIfXugt64U8(BrIfXugt64U8),
    BrIfXugt64U32(BrIfXugt64U32),
    BrIfXugteq64U8(BrIfXugteq64U8),
    BrIfXugteq64U32(BrIfXugteq64U32),
    BrTable32(BrTable32),
    Xmov(Xmov),
    Xzero(Xzero),
    Xone(Xone),
    Xconst8(Xconst8),
    Xconst16(Xconst16),
    Xconst32(Xconst32),
    Xconst64(Xconst64),
    Xadd32(Xadd32),
    Xadd32U8(Xadd32U8),
    Xadd32U32(Xadd32U32),
    Xadd64(Xadd64),
    Xadd64U8(Xadd64U8),
    Xadd64U32(Xadd64U32),
    Xmadd32(Xmadd32),
    Xmadd64(Xmadd64),
    Xsub32(Xsub32),
    Xsub32U8(Xsub32U8),
    Xsub32U32(Xsub32U32),
    Xsub64(Xsub64),
    Xsub64U8(Xsub64U8),
    Xsub64U32(Xsub64U32),
    XMul32(XMul32),
    Xmul32S8(Xmul32S8),
    Xmul32S32(Xmul32S32),
    XMul64(XMul64),
    Xmul64S8(Xmul64S8),
    Xmul64S32(Xmul64S32),
    Xctz32(Xctz32),
    Xctz64(Xctz64),
    Xclz32(Xclz32),
    Xclz64(Xclz64),
    Xpopcnt32(Xpopcnt32),
    Xpopcnt64(Xpopcnt64),
    Xrotl32(Xrotl32),
    Xrotl64(Xrotl64),
    Xrotr32(Xrotr32),
    Xrotr64(Xrotr64),
    Xshl32(Xshl32),
    Xshr32S(Xshr32S),
    Xshr32U(Xshr32U),
    Xshl64(Xshl64),
    Xshr64S(Xshr64S),
    Xshr64U(Xshr64U),
    Xshl32U6(Xshl32U6),
    Xshr32SU6(Xshr32SU6),
    Xshr32UU6(Xshr32UU6),
    Xshl64U6(Xshl64U6),
    Xshr64SU6(Xshr64SU6),
    Xshr64UU6(Xshr64UU6),
    Xneg32(Xneg32),
    Xneg64(Xneg64),
    Xeq64(Xeq64),
    Xneq64(Xneq64),
    Xslt64(Xslt64),
    Xslteq64(Xslteq64),
    Xult64(Xult64),
    Xulteq64(Xulteq64),
    Xeq32(Xeq32),
    Xneq32(Xneq32),
    Xslt32(Xslt32),
    Xslteq32(Xslteq32),
    Xult32(Xult32),
    Xulteq32(Xulteq32),
    XLoad8U32O32(XLoad8U32O32),
    XLoad8S32O32(XLoad8S32O32),
    XLoad16LeU32O32(XLoad16LeU32O32),
    XLoad16LeS32O32(XLoad16LeS32O32),
    XLoad32LeO32(XLoad32LeO32),
    XLoad64LeO32(XLoad64LeO32),
    XStore8O32(XStore8O32),
    XStore16LeO32(XStore16LeO32),
    XStore32LeO32(XStore32LeO32),
    XStore64LeO32(XStore64LeO32),
    XLoad8U32Z(XLoad8U32Z),
    XLoad8S32Z(XLoad8S32Z),
    XLoad16LeU32Z(XLoad16LeU32Z),
    XLoad16LeS32Z(XLoad16LeS32Z),
    XLoad32LeZ(XLoad32LeZ),
    XLoad64LeZ(XLoad64LeZ),
    XStore8Z(XStore8Z),
    XStore16LeZ(XStore16LeZ),
    XStore32LeZ(XStore32LeZ),
    XStore64LeZ(XStore64LeZ),
    XLoad8U32G32(XLoad8U32G32),
    XLoad8S32G32(XLoad8S32G32),
    XLoad16LeU32G32(XLoad16LeU32G32),
    XLoad16LeS32G32(XLoad16LeS32G32),
    XLoad32LeG32(XLoad32LeG32),
    XLoad64LeG32(XLoad64LeG32),
    XStore8G32(XStore8G32),
    XStore16LeG32(XStore16LeG32),
    XStore32LeG32(XStore32LeG32),
    XStore64LeG32(XStore64LeG32),
    XLoad8U32G32Bne(XLoad8U32G32Bne),
    XLoad8S32G32Bne(XLoad8S32G32Bne),
    XLoad16LeU32G32Bne(XLoad16LeU32G32Bne),
    XLoad16LeS32G32Bne(XLoad16LeS32G32Bne),
    XLoad32LeG32Bne(XLoad32LeG32Bne),
    XLoad64LeG32Bne(XLoad64LeG32Bne),
    XStore8G32Bne(XStore8G32Bne),
    XStore16LeG32Bne(XStore16LeG32Bne),
    XStore32LeG32Bne(XStore32LeG32Bne),
    XStore64LeG32Bne(XStore64LeG32Bne),
    PushFrame(PushFrame),
    PopFrame(PopFrame),
    PushFrameSave(PushFrameSave),
    PopFrameRestore(PopFrameRestore),
    StackAlloc32(StackAlloc32),
    StackFree32(StackFree32),
    Zext8(Zext8),
    Zext16(Zext16),
    Zext32(Zext32),
    Sext8(Sext8),
    Sext16(Sext16),
    Sext32(Sext32),
    XAbs32(XAbs32),
    XAbs64(XAbs64),
    XDiv32S(XDiv32S),
    XDiv64S(XDiv64S),
    XDiv32U(XDiv32U),
    XDiv64U(XDiv64U),
    XRem32S(XRem32S),
    XRem64S(XRem64S),
    XRem32U(XRem32U),
    XRem64U(XRem64U),
    XBand32(XBand32),
    Xband32S8(Xband32S8),
    Xband32S32(Xband32S32),
    XBand64(XBand64),
    Xband64S8(Xband64S8),
    Xband64S32(Xband64S32),
    XBor32(XBor32),
    Xbor32S8(Xbor32S8),
    Xbor32S32(Xbor32S32),
    XBor64(XBor64),
    Xbor64S8(Xbor64S8),
    Xbor64S32(Xbor64S32),
    XBxor32(XBxor32),
    Xbxor32S8(Xbxor32S8),
    Xbxor32S32(Xbxor32S32),
    XBxor64(XBxor64),
    Xbxor64S8(Xbxor64S8),
    Xbxor64S32(Xbxor64S32),
    XBnot32(XBnot32),
    XBnot64(XBnot64),
    Xmin32U(Xmin32U),
    Xmin32S(Xmin32S),
    Xmax32U(Xmax32U),
    Xmax32S(Xmax32S),
    Xmin64U(Xmin64U),
    Xmin64S(Xmin64S),
    Xmax64U(Xmax64U),
    Xmax64S(Xmax64S),
    XSelect32(XSelect32),
    XSelect64(XSelect64),
    ExtendedOp(ExtendedOp),
}Expand description
A complete, materialized operation/instruction.
This type is useful for debugging, writing tests, etc… but is not actually ever used by the interpreter, encoder, or decoder, all of which avoid materializing ops.
Variants§
Ret(Ret)
Transfer control the address in the lr register.
Call(Call)
Transfer control to the PC at the given offset and set the lr
register to the PC just after this instruction.
This instruction generally assumes that the Pulley ABI is being respected where arguments are in argument registers (starting at x0 for integer arguments) and results are in result registers. This instruction itself assume that all arguments are already in their registers. Subsequent instructions below enable moving arguments into the correct registers as part of the same call instruction.
Call1(Call1)
Like call, but also x0 = arg1
Call2(Call2)
Like call, but also x0, x1 = arg1, arg2
Call3(Call3)
Like call, but also x0, x1, x2 = arg1, arg2, arg3
Call4(Call4)
Like call, but also x0, x1, x2, x3 = arg1, arg2, arg3, arg4
CallIndirect(CallIndirect)
Transfer control to the PC in reg and set lr to the PC just
after this instruction.
Jump(Jump)
Unconditionally transfer control to the PC at the given offset.
XJump(XJump)
Unconditionally transfer control to the PC at specified register.
BrIf(BrIf)
Conditionally transfer control to the given PC offset if
low32(cond) contains a non-zero value.
BrIfNot(BrIfNot)
Conditionally transfer control to the given PC offset if
low32(cond) contains a zero value.
BrIfXeq32(BrIfXeq32)
Branch if a == b.
BrIfXneq32(BrIfXneq32)
Branch if a != b.
BrIfXslt32(BrIfXslt32)
Branch if signed a < b.
BrIfXslteq32(BrIfXslteq32)
Branch if signed a <= b.
BrIfXult32(BrIfXult32)
Branch if unsigned a < b.
BrIfXulteq32(BrIfXulteq32)
Branch if unsigned a <= b.
BrIfXeq64(BrIfXeq64)
Branch if a == b.
BrIfXneq64(BrIfXneq64)
Branch if a != b.
BrIfXslt64(BrIfXslt64)
Branch if signed a < b.
BrIfXslteq64(BrIfXslteq64)
Branch if signed a <= b.
BrIfXult64(BrIfXult64)
Branch if unsigned a < b.
BrIfXulteq64(BrIfXulteq64)
Branch if unsigned a <= b.
BrIfXeq32I8(BrIfXeq32I8)
Branch if a == b.
BrIfXeq32I32(BrIfXeq32I32)
Branch if a == b.
BrIfXneq32I8(BrIfXneq32I8)
Branch if a != b.
BrIfXneq32I32(BrIfXneq32I32)
Branch if a != b.
BrIfXslt32I8(BrIfXslt32I8)
Branch if signed a < b.
BrIfXslt32I32(BrIfXslt32I32)
Branch if signed a < b.
BrIfXsgt32I8(BrIfXsgt32I8)
Branch if signed a > b.
BrIfXsgt32I32(BrIfXsgt32I32)
Branch if signed a > b.
BrIfXslteq32I8(BrIfXslteq32I8)
Branch if signed a <= b.
BrIfXslteq32I32(BrIfXslteq32I32)
Branch if signed a <= b.
BrIfXsgteq32I8(BrIfXsgteq32I8)
Branch if signed a >= b.
BrIfXsgteq32I32(BrIfXsgteq32I32)
Branch if signed a >= b.
BrIfXult32U8(BrIfXult32U8)
Branch if unsigned a < b.
BrIfXult32U32(BrIfXult32U32)
Branch if unsigned a < b.
BrIfXulteq32U8(BrIfXulteq32U8)
Branch if unsigned a <= b.
BrIfXulteq32U32(BrIfXulteq32U32)
Branch if unsigned a <= b.
BrIfXugt32U8(BrIfXugt32U8)
Branch if unsigned a > b.
BrIfXugt32U32(BrIfXugt32U32)
Branch if unsigned a > b.
BrIfXugteq32U8(BrIfXugteq32U8)
Branch if unsigned a >= b.
BrIfXugteq32U32(BrIfXugteq32U32)
Branch if unsigned a >= b.
BrIfXeq64I8(BrIfXeq64I8)
Branch if a == b.
BrIfXeq64I32(BrIfXeq64I32)
Branch if a == b.
BrIfXneq64I8(BrIfXneq64I8)
Branch if a != b.
BrIfXneq64I32(BrIfXneq64I32)
Branch if a != b.
BrIfXslt64I8(BrIfXslt64I8)
Branch if signed a < b.
BrIfXslt64I32(BrIfXslt64I32)
Branch if signed a < b.
BrIfXsgt64I8(BrIfXsgt64I8)
Branch if signed a > b.
BrIfXsgt64I32(BrIfXsgt64I32)
Branch if signed a > b.
BrIfXslteq64I8(BrIfXslteq64I8)
Branch if signed a <= b.
BrIfXslteq64I32(BrIfXslteq64I32)
Branch if signed a <= b.
BrIfXsgteq64I8(BrIfXsgteq64I8)
Branch if signed a >= b.
BrIfXsgteq64I32(BrIfXsgteq64I32)
Branch if signed a >= b.
BrIfXult64U8(BrIfXult64U8)
Branch if unsigned a < b.
BrIfXult64U32(BrIfXult64U32)
Branch if unsigned a < b.
BrIfXulteq64U8(BrIfXulteq64U8)
Branch if unsigned a <= b.
BrIfXulteq64U32(BrIfXulteq64U32)
Branch if unsigned a <= b.
BrIfXugt64U8(BrIfXugt64U8)
Branch if unsigned a > b.
BrIfXugt64U32(BrIfXugt64U32)
Branch if unsigned a > b.
BrIfXugteq64U8(BrIfXugteq64U8)
Branch if unsigned a >= b.
BrIfXugteq64U32(BrIfXugteq64U32)
Branch if unsigned a >= b.
BrTable32(BrTable32)
Branch to the label indicated by low32(idx).
After this instruction are amt instances of PcRelOffset
and the idx selects which one will be branched to. The value
of idx is clamped to amt - 1 (e.g. the last offset is the
“default” one.
Xmov(Xmov)
Move between x registers.
Xzero(Xzero)
Set dst = 0
Xone(Xone)
Set dst = 1
Xconst8(Xconst8)
Set dst = sign_extend(imm8).
Xconst16(Xconst16)
Set dst = sign_extend(imm16).
Xconst32(Xconst32)
Set dst = sign_extend(imm32).
Xconst64(Xconst64)
Set dst = imm64.
Xadd32(Xadd32)
32-bit wrapping addition: low32(dst) = low32(src1) + low32(src2).
The upper 32-bits of dst are unmodified.
Xadd32U8(Xadd32U8)
Same as xadd32 but src2 is a zero-extended 8-bit immediate.
Xadd32U32(Xadd32U32)
Same as xadd32 but src2 is a 32-bit immediate.
Xadd64(Xadd64)
64-bit wrapping addition: dst = src1 + src2.
Xadd64U8(Xadd64U8)
Same as xadd64 but src2 is a zero-extended 8-bit immediate.
Xadd64U32(Xadd64U32)
Same as xadd64 but src2 is a zero-extended 32-bit immediate.
Xmadd32(Xmadd32)
low32(dst) = low32(src1) * low32(src2) + low32(src3)
Xmadd64(Xmadd64)
dst = src1 * src2 + src3
Xsub32(Xsub32)
32-bit wrapping subtraction: low32(dst) = low32(src1) - low32(src2).
The upper 32-bits of dst are unmodified.
Xsub32U8(Xsub32U8)
Same as xsub32 but src2 is a zero-extended 8-bit immediate.
Xsub32U32(Xsub32U32)
Same as xsub32 but src2 is a 32-bit immediate.
Xsub64(Xsub64)
64-bit wrapping subtraction: dst = src1 - src2.
Xsub64U8(Xsub64U8)
Same as xsub64 but src2 is a zero-extended 8-bit immediate.
Xsub64U32(Xsub64U32)
Same as xsub64 but src2 is a zero-extended 32-bit immediate.
XMul32(XMul32)
low32(dst) = low32(src1) * low32(src2)
Xmul32S8(Xmul32S8)
Same as xmul64 but src2 is a sign-extended 8-bit immediate.
Xmul32S32(Xmul32S32)
Same as xmul32 but src2 is a sign-extended 32-bit immediate.
XMul64(XMul64)
dst = src1 * src2
Xmul64S8(Xmul64S8)
Same as xmul64 but src2 is a sign-extended 8-bit immediate.
Xmul64S32(Xmul64S32)
Same as xmul64 but src2 is a sign-extended 64-bit immediate.
Xctz32(Xctz32)
low32(dst) = trailing_zeros(low32(src))
Xctz64(Xctz64)
dst = trailing_zeros(src)
Xclz32(Xclz32)
low32(dst) = leading_zeros(low32(src))
Xclz64(Xclz64)
dst = leading_zeros(src)
Xpopcnt32(Xpopcnt32)
low32(dst) = count_ones(low32(src))
Xpopcnt64(Xpopcnt64)
dst = count_ones(src)
Xrotl32(Xrotl32)
low32(dst) = rotate_left(low32(src1), low32(src2))
Xrotl64(Xrotl64)
dst = rotate_left(src1, src2)
Xrotr32(Xrotr32)
low32(dst) = rotate_right(low32(src1), low32(src2))
Xrotr64(Xrotr64)
dst = rotate_right(src1, src2)
Xshl32(Xshl32)
low32(dst) = low32(src1) << low5(src2)
Xshr32S(Xshr32S)
low32(dst) = low32(src1) >> low5(src2)
Xshr32U(Xshr32U)
low32(dst) = low32(src1) >> low5(src2)
Xshl64(Xshl64)
dst = src1 << low5(src2)
Xshr64S(Xshr64S)
dst = src1 >> low6(src2)
Xshr64U(Xshr64U)
dst = src1 >> low6(src2)
Xshl32U6(Xshl32U6)
low32(dst) = low32(src1) << low5(src2)
Xshr32SU6(Xshr32SU6)
low32(dst) = low32(src1) >> low5(src2)
Xshr32UU6(Xshr32UU6)
low32(dst) = low32(src1) >> low5(src2)
Xshl64U6(Xshl64U6)
dst = src1 << low5(src2)
Xshr64SU6(Xshr64SU6)
dst = src1 >> low6(src2)
Xshr64UU6(Xshr64UU6)
dst = src1 >> low6(src2)
Xneg32(Xneg32)
low32(dst) = -low32(src)
Xneg64(Xneg64)
dst = -src
Xeq64(Xeq64)
low32(dst) = src1 == src2
Xneq64(Xneq64)
low32(dst) = src1 != src2
Xslt64(Xslt64)
low32(dst) = src1 < src2 (signed)
Xslteq64(Xslteq64)
low32(dst) = src1 <= src2 (signed)
Xult64(Xult64)
low32(dst) = src1 < src2 (unsigned)
Xulteq64(Xulteq64)
low32(dst) = src1 <= src2 (unsigned)
Xeq32(Xeq32)
low32(dst) = low32(src1) == low32(src2)
Xneq32(Xneq32)
low32(dst) = low32(src1) != low32(src2)
Xslt32(Xslt32)
low32(dst) = low32(src1) < low32(src2) (signed)
Xslteq32(Xslteq32)
low32(dst) = low32(src1) <= low32(src2) (signed)
Xult32(Xult32)
low32(dst) = low32(src1) < low32(src2) (unsigned)
Xulteq32(Xulteq32)
low32(dst) = low32(src1) <= low32(src2) (unsigned)
XLoad8U32O32(XLoad8U32O32)
low32(dst) = zext_8_32(*addr)
XLoad8S32O32(XLoad8S32O32)
low32(dst) = sext_8_32(*addr)
XLoad16LeU32O32(XLoad16LeU32O32)
low32(dst) = o32ext_16_32(*addr)
XLoad16LeS32O32(XLoad16LeS32O32)
low32(dst) = sext_16_32(*addr)
XLoad32LeO32(XLoad32LeO32)
low32(dst) = *addr
XLoad64LeO32(XLoad64LeO32)
dst = *addr
XStore8O32(XStore8O32)
*addr = low8(src)
XStore16LeO32(XStore16LeO32)
*addr = low16(src)
XStore32LeO32(XStore32LeO32)
*addr = low32(src)
XStore64LeO32(XStore64LeO32)
*addr = src
XLoad8U32Z(XLoad8U32Z)
low32(dst) = zext_8_32(*addr)
XLoad8S32Z(XLoad8S32Z)
low32(dst) = sext_8_32(*addr)
XLoad16LeU32Z(XLoad16LeU32Z)
low32(dst) = zext_16_32(*addr)
XLoad16LeS32Z(XLoad16LeS32Z)
low32(dst) = sext_16_32(*addr)
XLoad32LeZ(XLoad32LeZ)
low32(dst) = *addr
XLoad64LeZ(XLoad64LeZ)
dst = *addr
XStore8Z(XStore8Z)
*addr = low8(src)
XStore16LeZ(XStore16LeZ)
*addr = low16(src)
XStore32LeZ(XStore32LeZ)
*addr = low32(src)
XStore64LeZ(XStore64LeZ)
*addr = src
XLoad8U32G32(XLoad8U32G32)
low32(dst) = zext_8_32(*addr)
XLoad8S32G32(XLoad8S32G32)
low32(dst) = sext_8_32(*addr)
XLoad16LeU32G32(XLoad16LeU32G32)
low32(dst) = zext_16_32(*addr)
XLoad16LeS32G32(XLoad16LeS32G32)
low32(dst) = sext_16_32(*addr)
XLoad32LeG32(XLoad32LeG32)
low32(dst) = *addr
XLoad64LeG32(XLoad64LeG32)
dst = *addr
XStore8G32(XStore8G32)
*addr = low8(src)
XStore16LeG32(XStore16LeG32)
*addr = low16(src)
XStore32LeG32(XStore32LeG32)
*addr = low32(src)
XStore64LeG32(XStore64LeG32)
*addr = src
XLoad8U32G32Bne(XLoad8U32G32Bne)
low32(dst) = zext_8_32(*addr)
XLoad8S32G32Bne(XLoad8S32G32Bne)
low32(dst) = sext_8_32(*addr)
XLoad16LeU32G32Bne(XLoad16LeU32G32Bne)
low32(dst) = zext_16_32(*addr)
XLoad16LeS32G32Bne(XLoad16LeS32G32Bne)
low32(dst) = sext_16_32(*addr)
XLoad32LeG32Bne(XLoad32LeG32Bne)
low32(dst) = *addr
XLoad64LeG32Bne(XLoad64LeG32Bne)
dst = *addr
XStore8G32Bne(XStore8G32Bne)
*addr = low8(src)
XStore16LeG32Bne(XStore16LeG32Bne)
*addr = low16(src)
XStore32LeG32Bne(XStore32LeG32Bne)
*addr = low32(src)
XStore64LeG32Bne(XStore64LeG32Bne)
*addr = src
PushFrame(PushFrame)
push lr; push fp; fp = sp
PopFrame(PopFrame)
sp = fp; pop fp; pop lr
PushFrameSave(PushFrameSave)
Macro-instruction to enter a function, allocate some stack, and then save some registers.
This is equivalent to push_frame, stack_alloc32 amt, then
saving all of regs to the top of the stack just allocated.
PopFrameRestore(PopFrameRestore)
Inverse of push_frame_save. Restores regs from the top of
the stack, then runs stack_free32 amt, then runs pop_frame.
StackAlloc32(StackAlloc32)
sp = sp.checked_sub(amt)
StackFree32(StackFree32)
sp = sp + amt
Zext8(Zext8)
dst = zext(low8(src))
Zext16(Zext16)
dst = zext(low16(src))
Zext32(Zext32)
dst = zext(low32(src))
Sext8(Sext8)
dst = sext(low8(src))
Sext16(Sext16)
dst = sext(low16(src))
Sext32(Sext32)
dst = sext(low32(src))
XAbs32(XAbs32)
low32(dst) = |low32(src)|
XAbs64(XAbs64)
dst = |src|
XDiv32S(XDiv32S)
low32(dst) = low32(src1) / low32(src2) (signed)
XDiv64S(XDiv64S)
dst = src1 / src2 (signed)
XDiv32U(XDiv32U)
low32(dst) = low32(src1) / low32(src2) (unsigned)
XDiv64U(XDiv64U)
dst = src1 / src2 (unsigned)
XRem32S(XRem32S)
low32(dst) = low32(src1) % low32(src2) (signed)
XRem64S(XRem64S)
dst = src1 / src2 (signed)
XRem32U(XRem32U)
low32(dst) = low32(src1) % low32(src2) (unsigned)
XRem64U(XRem64U)
dst = src1 / src2 (unsigned)
XBand32(XBand32)
low32(dst) = low32(src1) & low32(src2)
Xband32S8(Xband32S8)
Same as xband64 but src2 is a sign-extended 8-bit immediate.
Xband32S32(Xband32S32)
Same as xband32 but src2 is a sign-extended 32-bit immediate.
XBand64(XBand64)
dst = src1 & src2
Xband64S8(Xband64S8)
Same as xband64 but src2 is a sign-extended 8-bit immediate.
Xband64S32(Xband64S32)
Same as xband64 but src2 is a sign-extended 32-bit immediate.
XBor32(XBor32)
low32(dst) = low32(src1) | low32(src2)
Xbor32S8(Xbor32S8)
Same as xbor64 but src2 is a sign-extended 8-bit immediate.
Xbor32S32(Xbor32S32)
Same as xbor32 but src2 is a sign-extended 32-bit immediate.
XBor64(XBor64)
dst = src1 | src2
Xbor64S8(Xbor64S8)
Same as xbor64 but src2 is a sign-extended 8-bit immediate.
Xbor64S32(Xbor64S32)
Same as xbor64 but src2 is a sign-extended 32-bit immediate.
XBxor32(XBxor32)
low32(dst) = low32(src1) ^ low32(src2)
Xbxor32S8(Xbxor32S8)
Same as xbxor64 but src2 is a sign-extended 8-bit immediate.
Xbxor32S32(Xbxor32S32)
Same as xbxor32 but src2 is a sign-extended 32-bit immediate.
XBxor64(XBxor64)
dst = src1 ^ src2
Xbxor64S8(Xbxor64S8)
Same as xbxor64 but src2 is a sign-extended 8-bit immediate.
Xbxor64S32(Xbxor64S32)
Same as xbxor64 but src2 is a sign-extended 32-bit immediate.
XBnot32(XBnot32)
low32(dst) = !low32(src1)
XBnot64(XBnot64)
dst = !src1
Xmin32U(Xmin32U)
low32(dst) = min(low32(src1), low32(src2)) (unsigned)
Xmin32S(Xmin32S)
low32(dst) = min(low32(src1), low32(src2)) (signed)
Xmax32U(Xmax32U)
low32(dst) = max(low32(src1), low32(src2)) (unsigned)
Xmax32S(Xmax32S)
low32(dst) = max(low32(src1), low32(src2)) (signed)
Xmin64U(Xmin64U)
dst = min(src1, src2) (unsigned)
Xmin64S(Xmin64S)
dst = min(src1, src2) (signed)
Xmax64U(Xmax64U)
dst = max(src1, src2) (unsigned)
Xmax64S(Xmax64S)
dst = max(src1, src2) (signed)
XSelect32(XSelect32)
low32(dst) = low32(cond) ? low32(if_nonzero) : low32(if_zero)
XSelect64(XSelect64)
dst = low32(cond) ? if_nonzero : if_zero
ExtendedOp(ExtendedOp)
An extended operation/instruction.
Implementations§
Trait Implementations§
Source§impl<'arbitrary> Arbitrary<'arbitrary> for Op
 
impl<'arbitrary> Arbitrary<'arbitrary> for Op
Source§fn arbitrary(u: &mut Unstructured<'arbitrary>) -> Result<Self>
 
fn arbitrary(u: &mut Unstructured<'arbitrary>) -> Result<Self>
Self from the given unstructured data. Read moreSource§fn arbitrary_take_rest(u: Unstructured<'arbitrary>) -> Result<Self>
 
fn arbitrary_take_rest(u: Unstructured<'arbitrary>) -> Result<Self>
Self from the entirety of the given
unstructured data. Read more