diff --git a/src/interpreter/ByteCode.cpp b/src/interpreter/ByteCode.cpp index 3794f75ba..a657ff392 100644 --- a/src/interpreter/ByteCode.cpp +++ b/src/interpreter/ByteCode.cpp @@ -17,6 +17,8 @@ #include "Walrus.h" #include "interpreter/ByteCode.h" #include "runtime/ObjectType.h" +#include "runtime/Value.h" +#include namespace Walrus { @@ -53,38 +55,38 @@ size_t ByteCode::getSize() const { switch (this->opcode()) { case BrTableOpcode: { - const BrTable* brTable = reinterpret_cast(this); + const BrTable *brTable = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(BrTable) + sizeof(int32_t) * brTable->tableSize()); } case CallOpcode: { - const Call* call = reinterpret_cast(this); + const Call *call = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(Call) + sizeof(ByteCodeStackOffset) * call->parameterOffsetsSize() + sizeof(ByteCodeStackOffset) * call->resultOffsetsSize()); } case CallIndirectOpcode: { - const CallIndirect* callIndirect = reinterpret_cast(this); + const CallIndirect *callIndirect = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(CallIndirect) + sizeof(ByteCodeStackOffset) * callIndirect->parameterOffsetsSize() + sizeof(ByteCodeStackOffset) * callIndirect->resultOffsetsSize()); } case CallRefOpcode: { - const CallRef* callRef = reinterpret_cast(this); + const CallRef *callRef = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(CallRef) + sizeof(ByteCodeStackOffset) * callRef->parameterOffsetsSize() + sizeof(ByteCodeStackOffset) * callRef->resultOffsetsSize()); } case EndOpcode: { - const End* end = reinterpret_cast(this); + const End *end = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(End) + sizeof(ByteCodeStackOffset) * end->offsetsSize()); } case ThrowOpcode: { - const Throw* throwCode = reinterpret_cast(this); + const Throw *throwCode = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(Throw) + sizeof(ByteCodeStackOffset) * throwCode->offsetsSize()); } case ArrayNewFixedOpcode: { - const ArrayNewFixed* arrayNewFixedCode = reinterpret_cast(this); + const ArrayNewFixed *arrayNewFixedCode = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(ArrayNewFixed) + sizeof(ByteCodeStackOffset) * arrayNewFixedCode->offsetsSize()); } case StructNewOpcode: { - const StructNew* structNewCode = reinterpret_cast(this); + const StructNew *structNewCode = reinterpret_cast(this); return ByteCode::pointerAlignedSize(sizeof(StructNew) + sizeof(ByteCodeStackOffset) * structNewCode->offsetsSize()); } default: { @@ -94,5 +96,1318 @@ size_t ByteCode::getSize() const RELEASE_ASSERT_NOT_REACHED(); } +std::vector ByteCode::getByteCodeStackOffsets(FunctionType *funcType) const +{ + std::vector offsets; + + switch (this->opcode()) { +#define GENERATE_BINARY_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_BINARY_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_BINARY_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_BINARY_SHIFT_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_BINARY_OTHER(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_BINARY_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_BINARY_OTHER(GENERATE_BINARY_CODE_CASE) +#undef GENERATE_BINARY_CODE_CASE + case Walrus::ByteCode::MemoryCopyOpcode: { + ByteCodeOffset3 *binOp = reinterpret_cast(const_cast(this)); + offsets.push_back(binOp->stackOffset1()); + offsets.push_back(binOp->stackOffset2()); + offsets.push_back(binOp->stackOffset3()); + break; + } +#define GENERATE_UNARY_CODE_CASE(name, ...) case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_UNARY_OP(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_UNARY_OP_2(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_UNARY_OP(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_UNARY_CONVERT_OP(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_UNARY_OTHER(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_UNARY_OTHER(GENERATE_UNARY_CODE_CASE) +#undef GENERATE_UNARY_CODE_CASE + case Walrus::ByteCode::MoveI32Opcode: + case Walrus::ByteCode::MoveI64Opcode: + case Walrus::ByteCode::MoveV128Opcode: + case Walrus::ByteCode::MemoryGrowOpcode: + case Walrus::ByteCode::RefI31Opcode: + case Walrus::ByteCode::Store32Opcode: + case Walrus::ByteCode::Store64Opcode: + case Walrus::ByteCode::Load32Opcode: + case Walrus::ByteCode::Load64Opcode: { + ByteCodeOffset2 *unOp = reinterpret_cast(const_cast(this)); + offsets.push_back(unOp->stackOffset1()); + offsets.push_back(unOp->stackOffset2()); + break; + } + case Walrus::ByteCode::MoveF32Opcode: + case Walrus::ByteCode::MoveF64Opcode: { + MoveFloat *move = reinterpret_cast(const_cast(this)); + offsets.push_back(move->srcOffset()); + offsets.push_back(move->dstOffset()); + break; + } +#define GENERATE_TERNARY_CODE_CASE(name, ...) case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_RELAXED_SIMD_TERNARY_OP(GENERATE_TERNARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_TERNARY_OTHER(GENERATE_TERNARY_CODE_CASE) +#undef GENERATE_TERNARY_CODE_CASE + { + ByteCodeOffset4 *ternary = reinterpret_cast(const_cast(this)); + offsets.push_back(ternary->src0Offset()); + offsets.push_back(ternary->src1Offset()); + offsets.push_back(ternary->src2Offset()); + offsets.push_back(ternary->dstOffset()); + break; + } + case Walrus::ByteCode::F32LoadOpcode: + case Walrus::ByteCode::F64LoadOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::F32StoreOpcode: + case Walrus::ByteCode::F64StoreOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->valueOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } +#define GENERATE_MEMORY_LOAD_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_LOAD_INT_OP(GENERATE_MEMORY_LOAD_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_LOAD_EXTEND_OP(GENERATE_MEMORY_LOAD_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_LOAD_SPLAT_OP(GENERATE_MEMORY_LOAD_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_ETC_MEMIDX_OP(GENERATE_MEMORY_LOAD_CODE_CASE) + FOR_EACH_BYTECODE_ATOMIC_LOAD_OP(GENERATE_MEMORY_LOAD_CODE_CASE) +#undef GENERATE_MEMORY_LOAD_CODE_CASE + case Walrus::ByteCode::V128Load32ZeroOpcode: + case Walrus::ByteCode::V128Load64ZeroOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } +#define GENERATE_SIMD_MEMORY_LOAD_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_LOAD_LANE_OP(GENERATE_SIMD_MEMORY_LOAD_CASE) +#undef GENERATE_SIMD_MEMORY_LOAD_CASE + { + offsets.push_back(reinterpret_cast(const_cast(this))->index()); + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } +#define GENERATE_MEMORY_STORE_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_STORE_INT_OP(GENERATE_MEMORY_STORE_CODE_CASE) + FOR_EACH_BYTECODE_ATOMIC_STORE_OP(GENERATE_MEMORY_STORE_CODE_CASE) +#undef GENERATE_MEMORY_STORE_CODE_CASE + { + offsets.push_back(reinterpret_cast(const_cast(this))->valueOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } +#define GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_STORE_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) + FOR_EACH_BYTECODE_LOAD_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) + FOR_EACH_BYTECODE_ATOMIC_STORE_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) + FOR_EACH_BYTECODE_ATOMIC_LOAD_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) +#undef GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE + { + offsets.push_back(reinterpret_cast(const_cast(this))->stackOffset1()); + offsets.push_back(reinterpret_cast(const_cast(this))->stackOffset2()); + break; + } +#define GENERATE_SIMD_MEMORY_STORE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_STORE_LANE_OP(GENERATE_SIMD_MEMORY_STORE_CASE) +#undef GENERATE_SIMD_MEMORY_STORE_CASE + { + offsets.push_back(reinterpret_cast(const_cast(this))->index()); + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + break; + } +#define GENERATE_SIMD_EXTRACT_LANE_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_EXTRACT_LANE_OP(GENERATE_SIMD_EXTRACT_LANE_CODE_CASE) +#undef GENERATE_SIMD_EXTRACT_LANE_CODE_CASE + { + // offsets.push_back(reinterpret_cast(const_cast(this))->index()); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } +#define GENERATE_SIMD_REPLACE_LANE_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_REPLACE_LANE_OP(GENERATE_SIMD_REPLACE_LANE_CODE_CASE) +#undef GENERATE_SIMD_REPLACE_LANE_CODE_CASE + { + offsets.push_back(reinterpret_cast(const_cast(this))->index()); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + // Special cases that require manual handling. This list needs to be extended if new byte codes are introduced. + case Walrus::ByteCode::GlobalGet32Opcode: + case Walrus::ByteCode::GlobalGet64Opcode: + case Walrus::ByteCode::GlobalGet128Opcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::GlobalSet32Opcode: + case Walrus::ByteCode::GlobalSet64Opcode: + case Walrus::ByteCode::GlobalSet128Opcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + break; + } + case Walrus::ByteCode::SelectOpcode: { + Walrus::Select *sel = reinterpret_cast(const_cast(this)); + offsets.push_back(sel->src0Offset()); + offsets.push_back(sel->src1Offset()); + offsets.push_back(sel->condOffset()); + offsets.push_back(sel->dstOffset()); + break; + } + case Walrus::ByteCode::Const32Opcode: + case Walrus::ByteCode::Const64Opcode: + case Walrus::ByteCode::Const128Opcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::MemorySizeOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::MemoryInitOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[2]); + break; + } + case Walrus::ByteCode::MemoryFillOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[2]); + break; + } + case Walrus::ByteCode::TableSizeOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::TableGrowOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::TableGetOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::TableSetOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + break; + } + case Walrus::ByteCode::TableInitOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[2]); + break; + } + case Walrus::ByteCode::TableCopyOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[2]); + break; + } + case Walrus::ByteCode::TableFillOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[2]); + break; + } + case Walrus::ByteCode::I8X16ShuffleOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::V128BitSelectOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[0]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[1]); + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffsets()[2]); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::JumpIfTrueOpcode: + case Walrus::ByteCode::JumpIfFalseOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + break; + } +#define GENERATE_ATOMIC_RMW_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_ATOMIC_RMW_OP(GENERATE_ATOMIC_RMW_CODE_CASE) +#undef GENERATE_ATOMIC_RMW_CODE_CASE + { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } +#define GENERATE_ATOMIC_RMW_CMPXCHG_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_ATOMIC_RMW_CMPXCHG_OP(GENERATE_ATOMIC_RMW_CMPXCHG_CODE_CASE) +#undef GENERATE_ATOMIC_RMW_CODE_CASE + { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src2Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::MemoryAtomicNotifyOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::MemoryAtomicNotifyMemIdxOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::MemoryAtomicWait32Opcode: + case Walrus::ByteCode::MemoryAtomicWait32MemIdxOpcode: + case Walrus::ByteCode::MemoryAtomicWait64Opcode: + case Walrus::ByteCode::MemoryAtomicWait64MemIdxOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src2Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + // WebAssembly3 opcodes + case Walrus::ByteCode::JumpIfNullOpcode: + case Walrus::ByteCode::JumpIfNonNullOpcode: + case Walrus::ByteCode::JumpIfCastGenericOpcode: + case Walrus::ByteCode::JumpIfCastDefinedOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->stackOffset()); + break; + } + case Walrus::ByteCode::RefFuncOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::RefAsNonNullOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->stackOffset()); + break; + } + case Walrus::ByteCode::RefCastGenericOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + break; + } + case Walrus::ByteCode::RefCastDefinedOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + break; + } + case Walrus::ByteCode::RefTestGenericOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::RefTestDefinedOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::I31GetSOpcode: + case Walrus::ByteCode::I31GetUOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::ArrayNewOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::ArrayLenOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + + case Walrus::ByteCode::ArrayNewDefaultOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::ArrayNewFixedOpcode: { + Walrus::ArrayNewFixed *fixedArr = reinterpret_cast(const_cast(this)); + for (uint32_t i = 0; i < fixedArr->length(); i++) { + offsets.push_back(fixedArr->dataOffsets()[i]); + } + offsets.push_back(fixedArr->dstOffset()); + break; + } + case Walrus::ByteCode::ArrayNewDataOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::ArrayNewElemOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::ArrayFillOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src2Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src3Offset()); + break; + } + case Walrus::ByteCode::ArrayCopyOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src2Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src3Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src4Offset()); + break; + } + case Walrus::ByteCode::ArrayInitDataOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src2Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src3Offset()); + break; + } + case Walrus::ByteCode::ArrayInitElemOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src2Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src3Offset()); + break; + } + case Walrus::ByteCode::ArrayGetOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::ArraySetOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src2Offset()); + break; + } + case Walrus::ByteCode::StructNewOpcode: { + Walrus::StructNew *structNew = reinterpret_cast(const_cast(this)); + for (uint32_t i = 0; i < structNew->offsetsSize(); i++) { + offsets.push_back(structNew->dataOffsets()[i]); + } + offsets.push_back(structNew->dstOffset()); + break; + } + case Walrus::ByteCode::StructNewDefaultOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::StructGetOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->srcOffset()); + offsets.push_back(reinterpret_cast(const_cast(this))->dstOffset()); + break; + } + case Walrus::ByteCode::StructSetOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->src0Offset()); + offsets.push_back(reinterpret_cast(const_cast(this))->src1Offset()); + break; + } + case Walrus::ByteCode::EndOpcode: { + Walrus::End *end = reinterpret_cast(const_cast(this)); + for (uint32_t i = 0; i < end->offsetsSize(); i++) { + offsets.push_back(end->resultOffsets()[i]); + } + break; + } + case Walrus::ByteCode::CallOpcode: { + Walrus::Call *call = reinterpret_cast(const_cast(this)); + for (uint32_t i = 0; i < call->parameterOffsetsSize() + call->resultOffsetsSize(); i++) { + offsets.push_back(call->stackOffsets()[i]); + } + break; + } + case Walrus::ByteCode::CallIndirectOpcode: { + Walrus::CallIndirect *call = reinterpret_cast(const_cast(this)); + offsets.push_back(call->calleeOffset()); + + size_t offsetCounter = 0; +#if defined(WALRUS_64) + for (uint32_t i = 0; i < call->functionType()->param().size(); i++) { + offsets.push_back(call->stackOffsets()[offsetCounter]); + + if (call->functionType()->param().types()[i] == Walrus::Value::Type::V128) { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + } + offsetCounter++; + } + + for (uint32_t i = 0; i < call->functionType()->result().size(); i++) { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + + if (call->functionType()->result().types()[i] == Walrus::Value::Type::V128) { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + } + } +#elif defined(WALRUS_32) + for (uint32_t i = 0; i < call->functionType()->param().size(); i++) { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + + switch (call->functionType()->param().types()[i]) { + case Walrus::Value::Type::I64: + case Walrus::Value::Type::F64: { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + break; + } + case Walrus::Value::Type::V128: { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + break; + } + default: { + break; + } + } + } + for (uint32_t i = 0; i < call->functionType()->result().size(); i++) { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + + switch (call->functionType()->result().types()[i]) { + case Walrus::Value::Type::I64: + case Walrus::Value::Type::F64: { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + break; + } + case Walrus::Value::Type::V128: { + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + offsets.push_back(call->stackOffsets()[offsetCounter]); + offsetCounter++; + break; + } + default: { + break; + } + } + } +#endif + break; + } + case Walrus::ByteCode::CallRefOpcode: { + Walrus::CallRef *callRef = reinterpret_cast(const_cast(this)); + offsets.push_back(callRef->calleeOffset()); + for (uint32_t i = 0; i < callRef->parameterOffsetsSize() + callRef->resultOffsetsSize(); i++) { + offsets.push_back(callRef->stackOffsets()[i]); + } + break; + } + case Walrus::ByteCode::BrTableOpcode: { + offsets.push_back(reinterpret_cast(const_cast(this))->condOffset()); + break; + } + case Walrus::ByteCode::ThrowOpcode: { + Walrus::Throw *thr = reinterpret_cast(const_cast(this)); + for (uint32_t i = 0; i < thr->offsetsSize(); i++) { + offsets.push_back(thr->dataOffsets()[i]); + } + break; + } + case Walrus::ByteCode::ElemDropOpcode: + case Walrus::ByteCode::DataDropOpcode: + case Walrus::ByteCode::JumpOpcode: +#if !defined(NDEBUG) + case Walrus::ByteCode::NopOpcode: +#endif + case Walrus::ByteCode::UnreachableOpcode: + case Walrus::ByteCode::AtomicFenceOpcode: { + break; + } + default: { + RELEASE_ASSERT_NOT_REACHED(); + } + } + + return offsets; +} + +void ByteCode::setByteCodeOffset(size_t index, Walrus::ByteCodeStackOffset offset, Walrus::ByteCodeStackOffset original) +{ + switch (this->opcode()) { +#define GENERATE_BINARY_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_BINARY_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_BINARY_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_BINARY_SHIFT_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_BINARY_OTHER(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_BINARY_OP(GENERATE_BINARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_BINARY_OTHER(GENERATE_BINARY_CODE_CASE) +#undef GENERATE_BINARY_CODE_CASE + case Walrus::ByteCode::MemoryCopyOpcode: + case Walrus::ByteCode::MemoryFillOpcode: { + ByteCodeOffset3 *code = reinterpret_cast(this); + code->setStackOffset(index, offset); + break; + } +#define GENERATE_UNARY_CODE_CASE(name, ...) case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_UNARY_OP(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_UNARY_OP_2(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_UNARY_OP(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_UNARY_CONVERT_OP(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_UNARY_OTHER(GENERATE_UNARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_UNARY_OTHER(GENERATE_UNARY_CODE_CASE) +#undef GENERATE_UNARY_CODE_CASE + case Walrus::ByteCode::MoveI32Opcode: + case Walrus::ByteCode::MoveI64Opcode: + case Walrus::ByteCode::MoveV128Opcode: + case Walrus::ByteCode::MemoryGrowOpcode: + case Walrus::ByteCode::Load32Opcode: + case Walrus::ByteCode::Load64Opcode: + case Walrus::ByteCode::Store32Opcode: + case Walrus::ByteCode::Store64Opcode: + case Walrus::ByteCode::RefI31Opcode: { + ByteCodeOffset2 *code = reinterpret_cast(this); + if (index == 0) { + code->setStackOffset1(offset); + } else { + code->setStackOffset2(offset); + } + + break; + } + case Walrus::ByteCode::MoveF32Opcode: + case Walrus::ByteCode::MoveF64Opcode: { + MoveFloat *code = reinterpret_cast(this); + if (index == 0) { + code->setSrcOffset(offset); + } else { + code->setDstOffset(offset); + } + break; + } +#define GENERATE_TERNARY_CODE_CASE(name, ...) case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_RELAXED_SIMD_TERNARY_OP(GENERATE_TERNARY_CODE_CASE) + FOR_EACH_BYTECODE_RELAXED_SIMD_TERNARY_OTHER(GENERATE_TERNARY_CODE_CASE) +#undef GENERATE_TERNARY_CODE_CASE + { + ByteCodeOffset4 *ternary = reinterpret_cast(const_cast(this)); + ternary->setStackOffset(index, offset); + break; + } + case Walrus::ByteCode::F32StoreOpcode: + case Walrus::ByteCode::F64StoreOpcode: { + MemoryStoreFloat *store = reinterpret_cast(const_cast(this)); + if (index == 0) { + store->setStackOffset1(offset); + } else { + store->setStackOffset2(offset); + } + break; + } + case Walrus::ByteCode::F32LoadOpcode: + case Walrus::ByteCode::F64LoadOpcode: { + MemoryLoadFloat *store = reinterpret_cast(this); + if (index == 0) { + store->setStackOffset2(offset); + } else { + store->setStackOffset1(offset); + } + break; + } +#define GENERATE_MEMORY_LOAD_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_LOAD_INT_OP(GENERATE_MEMORY_LOAD_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_LOAD_EXTEND_OP(GENERATE_MEMORY_LOAD_CODE_CASE) + FOR_EACH_BYTECODE_SIMD_LOAD_SPLAT_OP(GENERATE_MEMORY_LOAD_CODE_CASE) + FOR_EACH_BYTECODE_ATOMIC_LOAD_OP(GENERATE_MEMORY_LOAD_CODE_CASE) +#undef GENERATE_MEMORY_LOAD_CODE_CASE + { + MemoryLoad *load = reinterpret_cast(this); + if (index == 0) { + load->setStackOffset1(offset); + } else { + load->setStackOffset2(offset); + } + break; + } +#define GENERATE_MEMORY_STORE_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_STORE_INT_OP(GENERATE_MEMORY_STORE_CODE_CASE) + FOR_EACH_BYTECODE_ATOMIC_STORE_OP(GENERATE_MEMORY_STORE_CODE_CASE) +#undef GENERATE_MEMORY_STORE_CODE_CASE + { + MemoryStore *store = reinterpret_cast(this); + if (index == 0) { + store->setStackOffset2(offset); + } else { + store->setStackOffset1(offset); + } + break; + } + case Walrus::ByteCode::TableGetOpcode: + case Walrus::ByteCode::TableSetOpcode: + case Walrus::ByteCode::I31GetSOpcode: + case Walrus::ByteCode::I31GetUOpcode: + case Walrus::ByteCode::ArrayLenOpcode: + case Walrus::ByteCode::V128Load32ZeroOpcode: + case Walrus::ByteCode::V128Load64ZeroOpcode: { + ByteCodeOffset2Value *code = reinterpret_cast(this); + if (index == 0) { + code->setStackOffset1(offset); + } else { + code->setStackOffset2(offset); + } + break; + } +#define GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_STORE_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) + FOR_EACH_BYTECODE_LOAD_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) + FOR_EACH_BYTECODE_ATOMIC_STORE_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) + FOR_EACH_BYTECODE_ATOMIC_LOAD_MEMIDX_OP(GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE) +#undef GENERATE_BYTECODE_OFFSET2VALUE_MEMIDX_CASE + { + ByteCodeOffset2ValueMemIdx *code = reinterpret_cast(this); + if (index == 0) { + code->setStackOffset1(offset); + } else { + code->setStackOffset2(offset); + } + break; + } +#define GENERATE_SIMD_MEMORY_LOAD_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_LOAD_LANE_OP(GENERATE_SIMD_MEMORY_LOAD_CASE) +#undef GENERATE_SIMD_MEMORY_LOAD_CASE + { + SIMDMemoryLoad *memoryLoad = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + memoryLoad->setIndex(offset); + break; + } + case 1: { + memoryLoad->setSrc0Offset(offset); + break; + } + case 2: { + memoryLoad->setSrc1Offset(offset); + break; + } + case 3: { + memoryLoad->setDstOffset(offset); + break; + } + } + break; + } +#define GENERATE_SIMD_MEMORY_LOAD_LANE_MEMIDX_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_LOAD_LANE_MEMIDX_OP(GENERATE_SIMD_MEMORY_LOAD_LANE_MEMIDX_CASE) +#undef GENERATE_SIMD_MEMORY_LOAD_LANE_MEMIDX_CASE + { + SIMDMemoryLoadMemIdx *memoryLoad = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + memoryLoad->setIndex(offset); + break; + } + case 1: { + memoryLoad->setSrc0Offset(offset); + break; + } + case 2: { + memoryLoad->setSrc1Offset(offset); + break; + } + case 3: { + memoryLoad->setDstOffset(offset); + break; + } + } + break; + } +#define GENERATE_SIMD_MEMORY_STORE_LANE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_STORE_LANE_OP(GENERATE_SIMD_MEMORY_STORE_LANE_CASE) +#undef GENERATE_SIMD_MEMORY_STORE_LANE_CASE + { + SIMDMemoryStore *memoryStore = reinterpret_cast(const_cast(this)); + if (index == 0) { + memoryStore->setIndex(offset); + } else if (index == 1) { + memoryStore->setSrc0Offset(offset); + } else { + memoryStore->setSrc1Offset(offset); + } + break; + } +#define GENERATE_SIMD_MEMORY_STORE_LANE_MEMIDX_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_STORE_LANE_MEMIDX_OP(GENERATE_SIMD_MEMORY_STORE_LANE_MEMIDX_CASE) +#undef GENERATE_SIMD_MEMORY_STORE_LANE_MEMIDX_CASE + { + SIMDMemoryStoreMemIdx *memoryStore = reinterpret_cast(const_cast(this)); + if (index == 0) { + memoryStore->setIndex(offset); + } else if (index == 1) { + memoryStore->setSrc0Offset(offset); + } else { + memoryStore->setSrc1Offset(offset); + } + break; + } +#define GENERATE_SIMD_EXTRACT_LANE_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_EXTRACT_LANE_OP(GENERATE_SIMD_EXTRACT_LANE_CODE_CASE) +#undef GENERATE_SIMD_EXTRACT_LANE_CODE_CASE + { + SIMDExtractLane *extractLane = reinterpret_cast(const_cast(this)); + // if (index == 0) { + // extractLane->setIndex(offset); + // } else + if (index == 0) { + extractLane->setSrcOffset(offset); + } else { + extractLane->setDstOffset(offset); + } + break; + } +#define GENERATE_SIMD_REPLACE_LANE_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_SIMD_REPLACE_LANE_OP(GENERATE_SIMD_REPLACE_LANE_CODE_CASE) +#undef GENERATE_SIMD_REPLACE_LANE_CODE_CASE + { + SIMDReplaceLane *replaceLane = reinterpret_cast(const_cast(this)); + + switch (index) { + case 0: { + replaceLane->setIndex(offset); + break; + } + case 1: { + replaceLane->setSrc0Offset(offset); + break; + } + case 2: { + replaceLane->setSrc1Offset(offset); + break; + } + case 3: { + replaceLane->setDstOffset(offset); + break; + } + } + break; + } + case Walrus::ByteCode::SelectOpcode: { + Walrus::Select *sel = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + sel->setSrc0Offset(offset); + break; + } + case 1: { + sel->setSrc1Offset(offset); + break; + } + case 2: { + sel->setCondOffset(offset); + break; + } + case 3: { + sel->setDstOffset(offset); + break; + } + default: { + RELEASE_ASSERT_NOT_REACHED(); + } + } + break; + } + case Walrus::ByteCode::Const32Opcode: + case Walrus::ByteCode::Const64Opcode: + case Walrus::ByteCode::Const128Opcode: { + Const32 *code = reinterpret_cast(const_cast(this)); + code->setDstOffset(offset); + break; + } + case Walrus::ByteCode::MemorySizeOpcode: { + MemorySize *memorySize = reinterpret_cast(const_cast(this)); + memorySize->setDstOffset(offset); + break; + } + case Walrus::ByteCode::MemoryInitOpcode: { + MemoryInit *memoryInit = reinterpret_cast(const_cast(this)); + memoryInit->setStackOffset(index, offset); + break; + } + case Walrus::ByteCode::TableSizeOpcode: { + ByteCodeOffsetValue *tableSize = reinterpret_cast(const_cast(this)); + tableSize->setStackOffset(offset); + break; + } + case Walrus::ByteCode::TableGrowOpcode: { + TableGrow *tableGrow = reinterpret_cast(const_cast(this)); + if (index == 0) { + tableGrow->setSrc0Offset(offset); + } else if (index == 1) { + tableGrow->setSrc1Offset(offset); + } else { + tableGrow->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::TableInitOpcode: { + TableInit *tableInit = reinterpret_cast(const_cast(this)); + tableInit->setStackOffset(index, offset); + break; + } + case Walrus::ByteCode::TableCopyOpcode: { + TableCopy *tableCopy = reinterpret_cast(const_cast(this)); + tableCopy->setStackOffset(index, offset); + break; + } + case Walrus::ByteCode::TableFillOpcode: { + TableFill *tableFill = reinterpret_cast(const_cast(this)); + tableFill->setSrcOffset(index, offset); + break; + } + case Walrus::ByteCode::I8X16ShuffleOpcode: { + I8X16Shuffle *shuffle = reinterpret_cast(const_cast(this)); + if (index < 2) { + shuffle->setSrcOffset(index, offset); + } else { + shuffle->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::V128BitSelectOpcode: { + V128BitSelect *select = reinterpret_cast(const_cast(this)); + select->setStackOffset(index, offset); + break; + } + case Walrus::ByteCode::JumpIfTrueOpcode: + case Walrus::ByteCode::JumpIfFalseOpcode: + case Walrus::ByteCode::GlobalGet32Opcode: + case Walrus::ByteCode::GlobalGet64Opcode: + case Walrus::ByteCode::GlobalGet128Opcode: + case Walrus::ByteCode::GlobalSet32Opcode: + case Walrus::ByteCode::GlobalSet64Opcode: + case Walrus::ByteCode::GlobalSet128Opcode: + case Walrus::ByteCode::RefFuncOpcode: + case Walrus::ByteCode::JumpIfNullOpcode: + case Walrus::ByteCode::JumpIfNonNullOpcode: + case Walrus::ByteCode::JumpIfCastGenericOpcode: + case Walrus::ByteCode::JumpIfCastDefinedOpcode: { + Walrus::ByteCodeOffsetValue *code = reinterpret_cast(const_cast(this)); + code->setStackOffset(offset); + break; + } + case Walrus::ByteCode::EndOpcode: { + Walrus::End *end = reinterpret_cast(const_cast(this)); + end->setResultOffset(index, offset); + break; + } + case Walrus::ByteCode::CallOpcode: { + Walrus::Call *call = reinterpret_cast(const_cast(this)); + call->setStackOffset(index, offset); + break; + } + case Walrus::ByteCode::CallIndirectOpcode: { + Walrus::CallIndirect *call = reinterpret_cast(const_cast(this)); + if (index == 0) { + call->setCalleeOffset(offset); + } else { + call->setStackOffset(index - 1, offset); + } + break; + } + case Walrus::ByteCode::CallRefOpcode: { + Walrus::CallRef *callRef = reinterpret_cast(const_cast(this)); + if (index == 0) { + callRef->setCalleeOffset(offset); + } else { + callRef->setStackOffset(index - 1, offset); + } + break; + } + case Walrus::ByteCode::BrTableOpcode: { + Walrus::BrTable *brTable = reinterpret_cast(const_cast(this)); + brTable->setCondOffset(offset); + break; + } + case Walrus::ByteCode::ThrowOpcode: { + Walrus::Throw *thr = reinterpret_cast(const_cast(this)); + thr->setDataOffset(index, offset); + break; + } +#define GENERATE_ATOMIC_RMW_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_ATOMIC_RMW_OP(GENERATE_ATOMIC_RMW_CODE_CASE) +#undef GENERATE_ATOMIC_RMW_CODE_CASE + { + Walrus::AtomicRmw *atomic = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + atomic->setSrc0Offset(offset); + break; + } + case 1: { + atomic->setSrc1Offset(offset); + break; + } + case 2: { + atomic->setDstOffset(offset); + break; + } + } + break; + } + +#define GENERATE_ATOMIC_RMW_CMPXCHG_CODE_CASE(name, ...) \ + case Walrus::ByteCode::name##Opcode: + FOR_EACH_BYTECODE_ATOMIC_RMW_CMPXCHG_OP(GENERATE_ATOMIC_RMW_CMPXCHG_CODE_CASE) +#undef GENERATE_ATOMIC_RMW_CODE_CASE + { + Walrus::ByteCodeOffset4Value *code = reinterpret_cast(const_cast(this)); + + switch (index) { + case 0: { + code->setSrc0Offset(offset); + break; + } + case 1: { + code->setSrc1Offset(offset); + break; + } + case 2: { + code->setSrc2Offset(offset); + break; + } + case 3: { + code->setDstOffset(offset); + break; + } + } + break; + } + case Walrus::ByteCode::MemoryAtomicNotifyOpcode: { + Walrus::MemoryAtomicNotify *atomic = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + atomic->setSrc0Offset(offset); + break; + } + case 1: { + atomic->setSrc1Offset(offset); + break; + } + case 2: { + atomic->setDstOffset(offset); + break; + } + } + break; + } + case Walrus::ByteCode::MemoryAtomicNotifyMemIdxOpcode: { + Walrus::MemoryAtomicNotifyMemIdx *atomic = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + atomic->setSrc0Offset(offset); + break; + } + case 1: { + atomic->setSrc1Offset(offset); + break; + } + case 2: { + atomic->setDstOffset(offset); + break; + } + } + break; + } + case Walrus::ByteCode::MemoryAtomicWait32Opcode: + case Walrus::ByteCode::MemoryAtomicWait32MemIdxOpcode: + case Walrus::ByteCode::MemoryAtomicWait64Opcode: + case Walrus::ByteCode::MemoryAtomicWait64MemIdxOpcode: { + Walrus::ByteCodeOffset4Value *atomic = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + atomic->setSrc0Offset(offset); + break; + } + case 1: { + atomic->setSrc1Offset(offset); + break; + } + case 2: { + atomic->setSrc2Offset(offset); + break; + } + case 3: { + atomic->setDstOffset(offset); + break; + } + } + break; + } + case Walrus::ByteCode::RefAsNonNullOpcode: { + Walrus::RefAsNonNull *refAs = reinterpret_cast(const_cast(this)); + refAs->setStackOffset(offset); + break; + } + case Walrus::ByteCode::RefCastGenericOpcode: { + Walrus::RefCastGeneric *generic = reinterpret_cast(const_cast(this)); + generic->setSrcOffset(offset); + break; + } + case Walrus::ByteCode::RefCastDefinedOpcode: { + Walrus::RefCastDefined *generic = reinterpret_cast(const_cast(this)); + generic->setSrcOffset(offset); + break; + } + case Walrus::ByteCode::RefTestGenericOpcode: { + Walrus::RefTestGeneric *generic = reinterpret_cast(const_cast(this)); + if (index == 0) { + generic->setSrcOffset(offset); + } else { + generic->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::RefTestDefinedOpcode: { + Walrus::RefTestDefined *generic = reinterpret_cast(const_cast(this)); + if (index == 0) { + generic->setSrcOffset(offset); + } else { + generic->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::ArrayNewOpcode: { + Walrus::ArrayNew *arrNew = reinterpret_cast(const_cast(this)); + if (index == 0) { + arrNew->setSrc0Offset(offset); + } else if (index == 1) { + arrNew->setSrc1Offset(offset); + } else { + arrNew->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::ArrayNewDefaultOpcode: { + Walrus::ArrayNewDefault *arrNew = reinterpret_cast(const_cast(this)); + if (index == 0) { + arrNew->setSrcOffset(offset); + } else { + arrNew->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::ArrayNewFixedOpcode: { + Walrus::ArrayNewFixed *fixedArr = reinterpret_cast(const_cast(this)); + if (index < fixedArr->length()) { + fixedArr->dataOffsets()[index] = offset; + } else { + fixedArr->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::ArrayNewDataOpcode: { + Walrus::ArrayNewData *arr = reinterpret_cast(const_cast(this)); + if (index == 0) { + arr->setSrc0Offset(offset); + } else if (index == 1) { + arr->setSrc1Offset(offset); + } else { + arr->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::ArrayNewElemOpcode: { + Walrus::ArrayNewElem *arr = reinterpret_cast(const_cast(this)); + if (index == 0) { + arr->setSrc0Offset(offset); + } else if (index == 1) { + arr->setSrc1Offset(offset); + } else { + arr->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::ArrayFillOpcode: { + Walrus::ArrayFill *arr = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + arr->setSrc0Offset(offset); + break; + } + case 1: { + arr->setSrc1Offset(offset); + break; + } + case 2: { + arr->setSrc2Offset(offset); + break; + } + case 3: { + arr->setSrc3Offset(offset); + break; + } + } + break; + } + case Walrus::ByteCode::ArrayCopyOpcode: { + Walrus::ArrayCopy *arr = reinterpret_cast(const_cast(this)); + switch (index) { + case 0: { + arr->setSrc0Offset(offset); + break; + } + case 1: { + arr->setSrc1Offset(offset); + break; + } + case 2: { + arr->setSrc2Offset(offset); + break; + } + case 3: { + arr->setSrc3Offset(offset); + break; + } + case 4: { + arr->setSrc4Offset(offset); + break; + } + } + break; + } + case Walrus::ByteCode::ArrayInitDataOpcode: { + Walrus::ArrayInitData *arr = reinterpret_cast(const_cast(this)); + if (index == 0) { + arr->setSrc0Offset(offset); + } else if (index == 1) { + arr->setSrc1Offset(offset); + } else if (index == 2) { + arr->setSrc2Offset(offset); + } else { + arr->setSrc3Offset(offset); + } + break; + } + case Walrus::ByteCode::ArrayInitElemOpcode: { + Walrus::ArrayInitElem *arr = reinterpret_cast(const_cast(this)); + if (index == 0) { + arr->setSrc0Offset(offset); + } else if (index == 1) { + arr->setSrc1Offset(offset); + } else if (index == 2) { + arr->setSrc2Offset(offset); + } else { + arr->setSrc3Offset(offset); + } + break; + } + case Walrus::ByteCode::ArrayGetOpcode: { + Walrus::ArrayGet *arr = reinterpret_cast(const_cast(this)); + if (index == 0) { + arr->setSrc0Offset(offset); + } else if (index == 1) { + arr->setSrc1Offset(offset); + } else { + arr->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::ArraySetOpcode: { + Walrus::ArraySet *arr = reinterpret_cast(const_cast(this)); + if (index == 0) { + arr->setSrc0Offset(offset); + } else if (index == 1) { + arr->setSrc1Offset(offset); + } else { + arr->setSrc2Offset(offset); + } + break; + } + case Walrus::ByteCode::StructNewOpcode: { + Walrus::StructNew *structNew = reinterpret_cast(const_cast(this)); + if (index < structNew->offsetsSize()) { + structNew->setDataOffset(offset, index); + } else { + structNew->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::StructNewDefaultOpcode: { + Walrus::StructNewDefault *structNewDefault = reinterpret_cast(const_cast(this)); + structNewDefault->setDstOffset(offset); + break; + } + case Walrus::ByteCode::StructGetOpcode: { + Walrus::StructGet *structGet = reinterpret_cast(const_cast(this)); + if (index == 0) { + structGet->setSrcOffset(offset); + } else { + structGet->setDstOffset(offset); + } + break; + } + case Walrus::ByteCode::StructSetOpcode: { + Walrus::StructSet *structSet = reinterpret_cast(const_cast(this)); + if (index == 0) { + structSet->setSrc0Offset(offset); + } else { + structSet->setSrc1Offset(offset); + } + break; + } + case Walrus::ByteCode::ElemDropOpcode: + case Walrus::ByteCode::DataDropOpcode: + case Walrus::ByteCode::JumpOpcode: +#if !defined(NDEBUG) + case Walrus::ByteCode::NopOpcode: +#endif + case Walrus::ByteCode::UnreachableOpcode: + case Walrus::ByteCode::AtomicFenceOpcode: { + break; + } + default: { + RELEASE_ASSERT_NOT_REACHED(); + } + } +} } // namespace Walrus diff --git a/src/interpreter/ByteCode.h b/src/interpreter/ByteCode.h index fa6d2e6a2..3acde16d3 100644 --- a/src/interpreter/ByteCode.h +++ b/src/interpreter/ByteCode.h @@ -18,6 +18,8 @@ #define __WalrusByteCode__ #include "runtime/Module.h" +#include "Walrus.h" +#include #if !defined(NDEBUG) #include @@ -966,6 +968,12 @@ class ByteCode { Opcode opcode() const; size_t getSize() const; + std::vector getByteCodeStackOffsets(FunctionType* funcType) const; + void setByteCodeOffset(size_t index, Walrus::ByteCodeStackOffset offset, Walrus::ByteCodeStackOffset original); + void dump() const + { + return; + } protected: friend class Interpreter; @@ -998,6 +1006,8 @@ class ByteCodeOffset2 : public ByteCode { ByteCodeStackOffset stackOffset1() const { return m_stackOffset1; } ByteCodeStackOffset stackOffset2() const { return m_stackOffset2; } + void setStackOffset1(Walrus::ByteCodeStackOffset o) { m_stackOffset1 = o; } + void setStackOffset2(Walrus::ByteCodeStackOffset o) { m_stackOffset2 = o; } protected: ByteCodeStackOffset m_stackOffset1; @@ -1016,6 +1026,7 @@ class ByteCodeOffset3 : public ByteCode { ByteCodeStackOffset stackOffset1() const { return m_stackOffsets[0]; } ByteCodeStackOffset stackOffset2() const { return m_stackOffsets[1]; } ByteCodeStackOffset stackOffset3() const { return m_stackOffsets[2]; } + void setStackOffset(uint8_t index, Walrus::ByteCodeStackOffset o) { m_stackOffsets[index] = o; } protected: ByteCodeStackOffset m_stackOffsets[3]; @@ -1031,8 +1042,10 @@ class ByteCodeOffsetValue : public ByteCode { } ByteCodeStackOffset stackOffset() const { return m_stackOffset; } + void setStackOffset(ByteCodeStackOffset o) { m_stackOffset = o; } uint32_t uint32Value() const { return m_value; } int32_t int32Value() const { return static_cast(m_value); } + void addValue(uint32_t add) { m_value += add; } protected: ByteCodeStackOffset m_stackOffset; @@ -1050,7 +1063,9 @@ class ByteCodeOffset2Value : public ByteCode { } ByteCodeStackOffset stackOffset1() const { return m_stackOffset1; } + void setStackOffset1(ByteCodeStackOffset o) { m_stackOffset1 = o; } ByteCodeStackOffset stackOffset2() const { return m_stackOffset2; } + void setStackOffset2(ByteCodeStackOffset o) { m_stackOffset2 = o; } uint32_t uint32Value() const { return m_value; } int32_t int32Value() const { return static_cast(m_value); } @@ -1075,7 +1090,9 @@ class ByteCodeOffset2ValueMemIdx : public ByteCode { uint16_t memIndex() const { return m_memIndex; } uint16_t alignment() const { return m_alignment; } ByteCodeStackOffset stackOffset1() const { return m_stackOffset1; } + void setStackOffset1(ByteCodeStackOffset o) { m_stackOffset1 = o; } ByteCodeStackOffset stackOffset2() const { return m_stackOffset2; } + void setStackOffset2(ByteCodeStackOffset o) { m_stackOffset2 = o; } uint32_t uint32Value() const { return m_value; } int32_t int32Value() const { return static_cast(m_value); } @@ -1100,6 +1117,7 @@ class ByteCodeOffset4 : public ByteCode { ByteCodeStackOffset src1Offset() const { return m_stackOffsets[1]; } ByteCodeStackOffset src2Offset() const { return m_stackOffsets[2]; } ByteCodeStackOffset dstOffset() const { return m_stackOffsets[3]; } + void setStackOffset(size_t index, ByteCodeStackOffset o) { m_stackOffsets[index] = o; } protected: ByteCodeStackOffset m_stackOffsets[4]; @@ -1118,9 +1136,13 @@ class ByteCodeOffset4Value : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_stackOffset1; } + void setSrc0Offset(ByteCodeStackOffset o) { m_stackOffset1 = o; } ByteCodeStackOffset src1Offset() const { return m_stackOffset2; } + void setSrc1Offset(ByteCodeStackOffset o) { m_stackOffset2 = o; } ByteCodeStackOffset src2Offset() const { return m_stackOffset3; } + void setSrc2Offset(ByteCodeStackOffset o) { m_stackOffset3 = o; } ByteCodeStackOffset dstOffset() const { return m_stackOffset4; } + void setDstOffset(ByteCodeStackOffset o) { m_stackOffset4 = o; } uint32_t offset() const { return m_value; } protected: @@ -1269,6 +1291,7 @@ class BinaryOperation : public ByteCodeOffset3 { const ByteCodeStackOffset* srcOffset() const { return stackOffsets(); } ByteCodeStackOffset dstOffset() const { return stackOffset3(); } void setDstOffset(ByteCodeStackOffset o) { m_stackOffsets[2] = o; } + void setSrcOffsset(ByteCodeStackOffset o, size_t index) { m_stackOffsets[index] = o; } #if !defined(NDEBUG) void dump(size_t pos) { @@ -1413,7 +1436,9 @@ class MoveFloat : public ByteCode { } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset o) { m_srcOffset = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } protected: // The field list is intentionally reserved, to avoid @@ -1466,6 +1491,11 @@ class Call : public ByteCode { return reinterpret_cast(reinterpret_cast(this) + sizeof(Call)); } + void setStackOffset(size_t index, ByteCodeStackOffset o) + { + reinterpret_cast(reinterpret_cast(this) + sizeof(Call))[index] = o; + } + uint16_t parameterOffsetsSize() const { return m_parameterOffsetsSize; @@ -1516,6 +1546,7 @@ class CallIndirect : public ByteCode { } ByteCodeStackOffset calleeOffset() const { return m_calleeOffset; } + void setCalleeOffset(ByteCodeStackOffset o) { m_calleeOffset = o; } uint32_t tableIndex() const { return m_tableIndex; } FunctionType* functionType() const { return m_functionType; } ByteCodeStackOffset* stackOffsets() const @@ -1523,6 +1554,11 @@ class CallIndirect : public ByteCode { return reinterpret_cast(reinterpret_cast(this) + sizeof(CallIndirect)); } + void setStackOffset(size_t index, ByteCodeStackOffset o) + { + (reinterpret_cast(reinterpret_cast(this) + sizeof(CallIndirect)))[index] = o; + } + uint16_t parameterOffsetsSize() const { return m_parameterOffsetsSize; @@ -1576,11 +1612,16 @@ class CallRef : public ByteCode { } ByteCodeStackOffset calleeOffset() const { return m_calleeOffset; } + void setCalleeOffset(ByteCodeStackOffset o) { m_calleeOffset = o; } FunctionType* functionType() const { return m_functionType; } ByteCodeStackOffset* stackOffsets() const { return reinterpret_cast(reinterpret_cast(this) + sizeof(CallRef)); } + void setStackOffset(size_t index, ByteCodeStackOffset o) + { + (reinterpret_cast(reinterpret_cast(this) + sizeof(CallRef)))[index] = o; + } uint16_t parameterOffsetsSize() const { @@ -1707,6 +1748,7 @@ class Jump : public ByteCode { } int32_t offset() const { return m_offset; } + void addOffset(uint32_t add) { m_offset += add; } void setOffset(int32_t offset) { m_offset = offset; @@ -1922,11 +1964,15 @@ class Select : public ByteCode { } ByteCodeStackOffset condOffset() const { return m_condOffset; } + void setCondOffset(ByteCodeStackOffset o) { m_condOffset = o; } uint16_t valueSize() const { return m_valueSize; } bool isFloat() const { return m_isFloat != 0; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -1959,6 +2005,7 @@ class BrTable : public ByteCode { } ByteCodeStackOffset condOffset() const { return m_condOffset; } + void setCondOffset(ByteCodeStackOffset o) { m_condOffset = o; } int32_t defaultOffset() const { return m_defaultOffset; } static inline size_t offsetOfDefault() { return offsetof(BrTable, m_defaultOffset); } @@ -1996,6 +2043,11 @@ class MemorySize : public ByteCode { { } + void setDstOffset(Walrus::ByteCodeStackOffset o) + { + m_dstOffset = o; + } + ByteCodeStackOffset dstOffset() const { return m_dstOffset; } uint16_t memIndex() const { return m_memIndex; } @@ -2032,6 +2084,10 @@ class MemoryInit : public ByteCode { { return m_srcOffsets; } + void setStackOffset(size_t index, ByteCodeStackOffset o) + { + m_srcOffsets[index] = o; + } uint16_t memIndex() const { return m_memIndex; } @@ -2239,9 +2295,13 @@ class SIMDMemoryLoad : public ByteCode { uint32_t offset() const { return m_offset; } ByteCodeStackOffset index() const { return m_index; } + void setIndex(ByteCodeStackOffset o) { m_index = o; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -2273,9 +2333,13 @@ class SIMDMemoryLoadMemIdx : public ByteCode { uint32_t offset() const { return m_offset; } ByteCodeStackOffset index() const { return m_index; } + void setIndex(ByteCodeStackOffset o) { m_index = o; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } uint16_t memIndex() const { return m_memIndex; } uint16_t alignment() const { return m_alignment; } @@ -2456,8 +2520,11 @@ class SIMDMemoryStore : public ByteCode { uint32_t offset() const { return m_offset; } ByteCodeStackOffset index() const { return m_index; } + void setIndex(Walrus::ByteCodeStackOffset o) { m_index = o; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -2487,8 +2554,11 @@ class SIMDMemoryStoreMemIdx : public ByteCode { uint32_t offset() const { return m_offset; } ByteCodeStackOffset index() const { return m_index; } + void setIndex(Walrus::ByteCodeStackOffset o) { m_index = o; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(Walrus::ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(Walrus::ByteCodeStackOffset o) { m_src1Offset = o; } uint16_t memIndex() const { return m_memIndex; } uint16_t alignment() const { return m_alignment; } @@ -2518,8 +2588,11 @@ class SIMDExtractLane : public ByteCode { } ByteCodeStackOffset index() const { return m_index; } + void setIndex(ByteCodeStackOffset o) { m_index = o; } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset o) { m_srcOffset = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -2544,8 +2617,12 @@ class SIMDReplaceLane : public ByteCode { } uint32_t index() const { return m_index; } + void setIndex(ByteCodeStackOffset o) { m_index = o; } const ByteCodeStackOffset* srcOffsets() const { return m_srcOffsets; } + void setSrc0Offset(ByteCodeStackOffset o) { m_srcOffsets[0] = o; } + void setSrc1Offset(ByteCodeStackOffset o) { m_srcOffsets[1] = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -2706,8 +2783,11 @@ class AtomicRmw : public ByteCode { uint32_t offset() const { return m_offset; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -2892,8 +2972,11 @@ class MemoryAtomicNotify : public ByteCode { uint32_t offset() const { return m_offset; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = 0; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = 0; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = 0; } #if !defined(NDEBUG) void dump(size_t pos) @@ -2923,8 +3006,11 @@ class MemoryAtomicNotifyMemIdx : public ByteCode { uint32_t offset() const { return m_offset; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = 0; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = 0; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = 0; } uint16_t memIndex() const { return m_memIndex; } uint16_t alignment() const { return m_alignment; } @@ -3200,7 +3286,9 @@ class I8X16Shuffle : public ByteCode { } const ByteCodeStackOffset* srcOffsets() const { return m_srcOffsets; } + void setSrcOffset(uint32_t index, ByteCodeStackOffset o) { m_srcOffsets[index] = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } const uint8_t* value() const { return m_value; } #if !defined(NDEBUG) @@ -3272,8 +3360,11 @@ class TableGrow : public ByteCode { uint32_t tableIndex() const { return m_tableIndex; } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset o) { m_dstOffset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -3329,6 +3420,10 @@ class TableCopy : public ByteCode { { return m_srcOffsets; } + void setStackOffset(size_t index, ByteCodeStackOffset o) + { + m_srcOffsets[index] = o; + } #if !defined(NDEBUG) void dump(size_t pos) @@ -3361,6 +3456,8 @@ class TableFill : public ByteCode { { return m_srcOffsets; } + void setSrcOffset(uint32_t index, ByteCodeStackOffset o) { m_srcOffsets[index] = o; } + #if !defined(NDEBUG) void dump(size_t pos) { @@ -3393,6 +3490,11 @@ class TableInit : public ByteCode { { return m_srcOffsets; } + void setStackOffset(size_t index, ByteCodeStackOffset o) + { + m_srcOffsets[index] = o; + } + #if !defined(NDEBUG) void dump(size_t pos) { @@ -3463,6 +3565,7 @@ class RefAsNonNull : public ByteCode { } ByteCodeStackOffset stackOffset() const { return m_stackOffset; } + void setStackOffset(ByteCodeStackOffset o) { m_stackOffset = o; } #if !defined(NDEBUG) void dump(size_t pos) @@ -3487,6 +3590,7 @@ class RefCastGeneric : public ByteCode { } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset offset) { m_srcOffset = offset; } Value::Type typeInfo() const { return m_typeInfo; } uint8_t srcInfo() const { return m_srcInfo; } @@ -3518,6 +3622,7 @@ class RefCastDefined : public ByteCode { } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset offset) { m_srcOffset = offset; } const CompositeType** typeInfo() const { return m_typeInfo; } uint8_t srcInfo() const { return m_srcInfo; } @@ -3548,7 +3653,9 @@ class RefTestGeneric : public ByteCode { } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset offset) { m_srcOffset = offset; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } Value::Type typeInfo() const { return m_typeInfo; } uint8_t srcInfo() const { return m_srcInfo; } @@ -3581,7 +3688,9 @@ class RefTestDefined : public ByteCode { } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset offset) { m_srcOffset = offset; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } const CompositeType** typeInfo() const { return m_typeInfo; } uint8_t srcInfo() const { return m_srcInfo; } @@ -3653,8 +3762,11 @@ class ArrayNew : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset offset) { m_src0Offset = offset; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset offset) { m_src1Offset = offset; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } const ArrayType* typeInfo() const { return m_typeInfo; } #if !defined(NDEBUG) @@ -3685,7 +3797,9 @@ class ArrayNewDefault : public ByteCode { } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset offset) { m_srcOffset = offset; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } const ArrayType* typeInfo() const { return m_typeInfo; } #if !defined(NDEBUG) @@ -3763,8 +3877,11 @@ class ArrayNewFrom : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset offset) { m_src0Offset = offset; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset offset) { m_src1Offset = offset; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } const ArrayType* typeInfo() const { return m_typeInfo; } uint32_t index() { return m_index; } @@ -3820,9 +3937,13 @@ class ArrayFill : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } ByteCodeStackOffset src2Offset() const { return m_src2Offset; } + void setSrc2Offset(ByteCodeStackOffset o) { m_src2Offset = o; } ByteCodeStackOffset src3Offset() const { return m_src3Offset; } + void setSrc3Offset(ByteCodeStackOffset o) { m_src3Offset = o; } Value::Type type() const { return m_type; } bool isNullable() const { return m_isNullable != 0; } @@ -3864,10 +3985,15 @@ class ArrayCopy : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset o) { m_src0Offset = o; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset o) { m_src1Offset = o; } ByteCodeStackOffset src2Offset() const { return m_src2Offset; } + void setSrc2Offset(ByteCodeStackOffset o) { m_src2Offset = o; } ByteCodeStackOffset src3Offset() const { return m_src3Offset; } + void setSrc3Offset(ByteCodeStackOffset o) { m_src3Offset = o; } ByteCodeStackOffset src4Offset() const { return m_src4Offset; } + void setSrc4Offset(ByteCodeStackOffset o) { m_src4Offset = o; } uint8_t log2Size() const { return m_log2Size; } bool dstIsNullable() const { return (m_isNullable & DstIsNullable) != 0; } bool srcIsNullable() const { return (m_isNullable & SrcIsNullable) != 0; } @@ -3915,9 +4041,13 @@ class ArrayInitFrom : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset offset) { m_src0Offset = offset; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset offset) { m_src1Offset = offset; } ByteCodeStackOffset src2Offset() const { return m_src2Offset; } + void setSrc2Offset(ByteCodeStackOffset offset) { m_src2Offset = offset; } ByteCodeStackOffset src3Offset() const { return m_src3Offset; } + void setSrc3Offset(ByteCodeStackOffset offset) { m_src3Offset = offset; } uint8_t log2Size() const { return m_log2Size; } bool isNullable() const { return m_isNullable; } uint32_t index() { return m_index; } @@ -3981,8 +4111,11 @@ class ArrayGet : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset offset) { m_src0Offset = offset; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset offset) { m_src1Offset = offset; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } Value::Type type() const { return m_type; } bool isSigned() const { return (m_info & IsSigned) != 0; } bool isNullable() const { return (m_info & IsNullable) != 0; } @@ -4019,8 +4152,11 @@ class ArraySet : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset offset) { m_src0Offset = offset; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset offset) { m_src1Offset = offset; } ByteCodeStackOffset src2Offset() const { return m_src2Offset; } + void setSrc2Offset(ByteCodeStackOffset offset) { m_src2Offset = offset; } Value::Type type() const { return m_type; } bool isNullable() const { return (m_info & ArrayGet::IsNullable) != 0; } @@ -4081,6 +4217,10 @@ class StructNew : public ByteCode { { return reinterpret_cast(reinterpret_cast(this) + sizeof(StructNew)); } + void setDataOffset(ByteCodeStackOffset o, uint32_t index) + { + reinterpret_cast(reinterpret_cast(this) + sizeof(StructNew))[index] = o; + } uint32_t offsetsSize() const { @@ -4116,6 +4256,7 @@ class StructNewDefault : public ByteCode { } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } const StructType* typeInfo() const { return m_typeInfo; } #if !defined(NDEBUG) @@ -4148,7 +4289,9 @@ class StructGet : public ByteCode { } ByteCodeStackOffset srcOffset() const { return m_srcOffset; } + void setSrcOffset(ByteCodeStackOffset offset) { m_srcOffset = offset; } ByteCodeStackOffset dstOffset() const { return m_dstOffset; } + void setDstOffset(ByteCodeStackOffset offset) { m_dstOffset = offset; } uint32_t memberOffset() const { return m_memberOffset; } Value::Type type() const { return m_type; } bool isSigned() const { return (m_info & IsSigned) != 0; } @@ -4185,7 +4328,9 @@ class StructSet : public ByteCode { } ByteCodeStackOffset src0Offset() const { return m_src0Offset; } + void setSrc0Offset(ByteCodeStackOffset offset) { m_src0Offset = offset; } ByteCodeStackOffset src1Offset() const { return m_src1Offset; } + void setSrc1Offset(ByteCodeStackOffset offset) { m_src1Offset = offset; } uint32_t memberOffset() const { return m_memberOffset; } Value::Type type() const { return m_type; } bool isNullable() const { return (m_info & StructGet::IsNullable) != 0; } @@ -4341,6 +4486,10 @@ class Throw : public ByteCode { { return reinterpret_cast(reinterpret_cast(this) + sizeof(Throw)); } + void setDataOffset(size_t index, ByteCodeStackOffset o) + { + reinterpret_cast(reinterpret_cast(this) + sizeof(Throw))[index] = o; + } uint32_t offsetsSize() const { @@ -4416,6 +4565,11 @@ class End : public ByteCode { return reinterpret_cast(reinterpret_cast(this) + sizeof(End)); } + void setResultOffset(size_t index, ByteCodeStackOffset o) + { + reinterpret_cast(reinterpret_cast(this) + sizeof(End))[index] = o; + } + uint32_t offsetsSize() const { return m_offsetsSize; diff --git a/src/interpreter/Interpreter.cpp b/src/interpreter/Interpreter.cpp index 43240c884..ee9addf30 100644 --- a/src/interpreter/Interpreter.cpp +++ b/src/interpreter/Interpreter.cpp @@ -2248,6 +2248,7 @@ ByteCodeStackOffset* Interpreter::interpret(ExecutionState& state, auto& param = tag->functionType()->param().types(); for (size_t i = 0; i < param.size(); i++) { auto sz = valueStackAllocatedSize(param[i]); + // auto sz = valueSize(param[i]); memcpy(ptr, bp + code->dataOffsets()[i], sz); ptr += sz; } diff --git a/src/jit/Backend.cpp b/src/jit/Backend.cpp index c84da17bf..83e4dd6e1 100644 --- a/src/jit/Backend.cpp +++ b/src/jit/Backend.cpp @@ -100,7 +100,7 @@ void JITArg::set(Operand* operand) Instruction* instr = VARIABLE_GET_IMM(*operand); #if (defined SLJIT_32BIT_ARCHITECTURE && SLJIT_32BIT_ARCHITECTURE) - ASSERT(instr->opcode() == ByteCode::Const32Opcode); + // ASSERT(instr->opcode() == ByteCode::Const32Opcode); this->argw = static_cast(reinterpret_cast(instr->byteCode())->value()); #else /* !SLJIT_32BIT_ARCHITECTURE */ diff --git a/src/jit/ByteCodeParser.cpp b/src/jit/ByteCodeParser.cpp index ec407c127..7ad4c6c2a 100644 --- a/src/jit/ByteCodeParser.cpp +++ b/src/jit/ByteCodeParser.cpp @@ -24,7 +24,7 @@ #include -#if defined(COMPILER_MSVC) +#if defined(COMPILER_MSVC) && !defined(ssize_t) #include typedef SSIZE_T ssize_t; #endif @@ -651,7 +651,7 @@ static void compileFunction(JITCompiler* compiler) paramType = ParamTypes::ParamSrc2Dst; info = Instruction::kIs32Bit; if (opcode == ByteCode::I32AndOpcode) { - info |= Instruction::kIsMergeCompare; + // info |= Instruction::kIsMergeCompare; } requiredInit = OTOp2I32; break; @@ -2926,6 +2926,8 @@ const uint8_t* VariableList::getOperandDescriptor(Instruction* instr) return instr->getOperandDescriptor(); } +size_t counter = 0; + void Module::jitCompile(ModuleFunction** functions, size_t functionsLength, uint32_t JITFlags) { JITCompiler compiler(this, JITFlags); @@ -2934,6 +2936,7 @@ void Module::jitCompile(ModuleFunction** functions, size_t functionsLength, uint size_t functionCount = m_functions.size(); for (size_t i = 0; i < functionCount; i++) { + counter++; if (m_functions[i]->jitFunction() == nullptr) { if (JITFlags & JITFlagValue::JITVerbose) { printf("[[[[[[[ Function %3d ]]]]]]]\n", static_cast(i)); diff --git a/src/parser/LiveAnalysis.cpp b/src/parser/LiveAnalysis.cpp new file mode 100644 index 000000000..0b170fbbd --- /dev/null +++ b/src/parser/LiveAnalysis.cpp @@ -0,0 +1,769 @@ +/* + * Copyright (c) 2022-present Samsung Electronics Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Walrus.h" +#include "interpreter/ByteCode.h" +#include "runtime/Module.h" +#include "runtime/Value.h" +#include "parser/LiveAnalysis.h" +#include "util/Vector.h" + +namespace wabt { +void LiveAnalysis::pushByteCodeToFront(const Walrus::ByteCode& code) +{ + m_byteCode.reserve(m_byteCode.size() + code.getSize()); + char* first = (char*)&code; + for (size_t i = 0; i < code.getSize(); i++) { + m_byteCode.insert(i, *first); + first++; + } +} + +void LiveAnalysis::assignBasicBlocks(Walrus::ByteCode* code, std::vector& basicBlocks, uint64_t byteCodeOffset) +{ + switch (code->opcode()) { + case Walrus::ByteCode::JumpOpcode: { + Walrus::Jump* jump = reinterpret_cast(code); + basicBlocks.push_back(new LiveAnalysis::BasicBlock(byteCodeOffset, byteCodeOffset + jump->offset())); + break; + } + case Walrus::ByteCode::JumpIfTrueOpcode: + case Walrus::ByteCode::JumpIfFalseOpcode: + case Walrus::ByteCode::JumpIfNullOpcode: + case Walrus::ByteCode::JumpIfNonNullOpcode: + case Walrus::ByteCode::JumpIfCastGenericOpcode: + case Walrus::ByteCode::JumpIfCastDefinedOpcode: { + Walrus::ByteCodeOffsetValue* jumpIf = reinterpret_cast(code); + basicBlocks.push_back(new LiveAnalysis::BasicBlock(byteCodeOffset, byteCodeOffset + jumpIf->int32Value())); + break; + } + case Walrus::ByteCode::BrTableOpcode: { + Walrus::BrTable* table = reinterpret_cast(code); + basicBlocks.push_back(new LiveAnalysis::BasicBlock(byteCodeOffset, byteCodeOffset + table->defaultOffset())); + for (size_t i = 0; i < table->tableSize(); i++) { + if (byteCodeOffset + table->jumpOffsets()[i]) { + basicBlocks.push_back(new LiveAnalysis::BasicBlock(byteCodeOffset, byteCodeOffset + table->jumpOffsets()[i])); + } + } + break; + } + default: { + return; + } + } +} + +void LiveAnalysis::orderInsAndOuts(std::vector& basicBlocks, VariableRange* ranges, uint64_t rangesSize, uint64_t end, uint64_t position) +{ + size_t currentBlockIdx = 0; + while (position < end && currentBlockIdx < basicBlocks.size()) { + uint16_t blockStart = basicBlocks[currentBlockIdx]->from < basicBlocks[currentBlockIdx]->to ? basicBlocks[currentBlockIdx]->from : basicBlocks[currentBlockIdx]->to; + uint16_t blockEnd = basicBlocks[currentBlockIdx]->from < basicBlocks[currentBlockIdx]->to ? basicBlocks[currentBlockIdx]->to : basicBlocks[currentBlockIdx]->from; + + for (uint64_t i = 0; i < rangesSize; i++) { + if ((blockStart < ranges[i].start && ranges[i].end < blockEnd)) { + // || (ranges[i].start < blockStart && blockStart < ranges[i].end) + // || (ranges[i].start < blockEnd && blockEnd < ranges[i].end)) { + basicBlocks[currentBlockIdx]->containedVariables.push_back(&ranges[i]); + continue; + } + + // Forward jump case + if (basicBlocks[currentBlockIdx]->from < basicBlocks[currentBlockIdx]->to) { + if (ranges[i].start < basicBlocks[currentBlockIdx]->from) { + if (std::find(basicBlocks[currentBlockIdx]->in.begin(), + basicBlocks[currentBlockIdx]->in.end(), &ranges[i]) + == basicBlocks[currentBlockIdx]->in.end()) { + basicBlocks[currentBlockIdx]->in.push_back(&ranges[i]); + } + } + if (ranges[i].end > basicBlocks[currentBlockIdx]->to) { + if (std::find(basicBlocks[currentBlockIdx]->out.begin(), + basicBlocks[currentBlockIdx]->out.end(), &ranges[i]) + == basicBlocks[currentBlockIdx]->out.end()) { + basicBlocks[currentBlockIdx]->out.push_back(&ranges[i]); + } + } + // Backward jump case. + } else { + if (ranges[i].start < basicBlocks[currentBlockIdx]->to) { + if (std::find(basicBlocks[currentBlockIdx]->in.begin(), + basicBlocks[currentBlockIdx]->in.end(), &ranges[i]) + == basicBlocks[currentBlockIdx]->in.end()) { + basicBlocks[currentBlockIdx]->in.push_back(&ranges[i]); + } + } + if (ranges[i].end > basicBlocks[currentBlockIdx]->from) { + if (std::find(basicBlocks[currentBlockIdx]->out.begin(), + basicBlocks[currentBlockIdx]->out.end(), &ranges[i]) + == basicBlocks[currentBlockIdx]->out.end()) { + basicBlocks[currentBlockIdx]->out.push_back(&ranges[i]); + } + } + } + } + + currentBlockIdx++; + position = blockEnd; + } +} + +void LiveAnalysis::extendNaiveRange(std::vector& basicBlocks, VariableRange* ranges, uint64_t rangesSize) +{ + for (BasicBlock* block : basicBlocks) { + if (block->to < block->from) { + for (VariableRange* in : block->in) { + if (in->end < block->from) { + in->end = block->from; + } + } + + for (VariableRange*& out : block->out) { + if (block->to < out->start) { + out->start = block->to; + } + } + } else { // block->from < block->to + for (VariableRange* in : block->in) { + if (in->end < block->to) { + in->end = block->to; + } + } + + for (VariableRange* out : block->out) { + if (block->from < out->start) { + out->start = block->from; + } + } + } + + for (VariableRange* range : block->containedVariables) { + if (range->isConstant || range->isParam) { + continue; + } + + if (block->from < block->to) { + if (block->from < range->start) { + range->start = block->from; + } + if (range->end < block->to) { + range->end = block->to; + } + } else { + if (block->to < range->start) { + range->start = block->to; + } + if (range->end < block->from) { + range->end = block->from; + } + } + } + } + + for (uint64_t i = 0; i < rangesSize; i++) { + if (ranges[i].isConstant) { + ranges[i].start = 0; + ranges[i].needsInit = true; + } + + if (ranges[i].reads.empty() && ranges[i].sets.empty()) { + continue; + } + + if (ranges[i].sets.empty() && !ranges[i].reads.empty()) { + ranges[i].start = 0; + ranges[i].needsInit = true; + continue; + } + + uint64_t setsMin = 0; + uint64_t readsMin = 0; + if (!ranges[i].sets.empty()) { + setsMin = *std::min_element(ranges[i].sets.begin(), ranges[i].sets.end()); + } + + if (!ranges[i].reads.empty()) { + readsMin = *std::min_element(ranges[i].reads.begin(), ranges[i].reads.end()); + } + + if ((!ranges[i].sets.empty() && !ranges[i].reads.empty()) && (readsMin <= setsMin)) { + ranges[i].needsInit = true; + ranges[i].start = 0; + } + + if (!ranges[i].reads.empty() && !basicBlocks.empty()) { + bool insertedValue = false; + std::vector setInBlock; + for (Walrus::ByteCodeStackOffset set : ranges[i].sets) { + if (set <= readsMin) { + for (BasicBlock* block : basicBlocks) { + if ((block->from <= set && set <= block->to) || (block->to <= set && set <= block->from)) { + setInBlock.push_back(true); + insertedValue = true; + break; + } + } + + if (!insertedValue) { + setInBlock.push_back(false); + } + } + } + + if (std::find(setInBlock.begin(), setInBlock.end(), false) == setInBlock.end()) { + ranges[i].start = 0; + ranges[i].needsInit = true; + } + } + } +} + +void LiveAnalysis::pushVariableInits(VariableRange* ranges, uint64_t rangesSize, Walrus::ModuleFunction* func) +{ + uint32_t constSize = 0; + if (!UnusedReads.elements.empty()) { + if (UnusedReads.valueSize == 4) { + pushByteCodeToFront(Walrus::Const32(UnusedReads.pos, 0)); + constSize += sizeof(Walrus::Const32); + } else if (UnusedReads.valueSize == 8) { + pushByteCodeToFront(Walrus::Const64(UnusedReads.pos, 0)); + constSize += sizeof(Walrus::Const64); + } else if (UnusedReads.valueSize == 16) { + uint8_t empty[16] = { 0 }; + pushByteCodeToFront(Walrus::Const128(UnusedReads.pos, empty)); + constSize += sizeof(Walrus::Const128); + } else { + RELEASE_ASSERT_NOT_REACHED(); + } + } + + for (uint64_t i = 0; i < rangesSize; i++) { + if (ranges[i].isParam || !ranges[i].needsInit || (ranges[i].sets.empty() && ranges[i].reads.empty())) { + continue; + } + + if (ranges[i].newOffset != UINT16_MAX && (ranges[i].newOffset == UnusedReads.pos || ranges[i].newOffset == UnusedWrites.pos)) { + continue; + } + + switch (ranges[i].value.type()) { + case Walrus::Value::I32: { + pushByteCodeToFront(Walrus::Const32(ranges[i].newOffset, ranges[i].value.asI32())); + constSize += sizeof(Walrus::Const32); + break; + } + case Walrus::Value::F32: { + uint8_t constantBuffer[4]; + ranges[i].value.writeToMemory(constantBuffer); + pushByteCodeToFront(Walrus::Const32(ranges[i].newOffset, *reinterpret_cast(constantBuffer))); + constSize += sizeof(Walrus::Const32); + break; + } + case Walrus::Value::I64: { + pushByteCodeToFront(Walrus::Const64(ranges[i].newOffset, ranges[i].value.asI64())); + constSize += sizeof(Walrus::Const64); + break; + } + case Walrus::Value::F64: { + uint8_t constantBuffer[8]; + ranges[i].value.writeToMemory(constantBuffer); + pushByteCodeToFront(Walrus::Const64(ranges[i].newOffset, *reinterpret_cast(constantBuffer))); + constSize += sizeof(Walrus::Const64); + break; + } + case Walrus::Value::V128: { + uint8_t constantBuffer[16]; + ranges[i].value.writeToMemory(constantBuffer); + pushByteCodeToFront(Walrus::Const128(ranges[i].newOffset, constantBuffer)); + constSize += sizeof(Walrus::Const128); + break; + } + case Walrus::Value::ExternRef: + case Walrus::Value::FuncRef: { + pushByteCodeToFront(Walrus::RefFunc(0, ranges[i].newOffset)); + constSize += sizeof(Walrus::Const64); + break; + } + default: { + break; + } + } + } + + for (auto& tryCatch : func->catchInfo()) { + tryCatch.m_tryStart += constSize; + tryCatch.m_tryEnd += constSize; + tryCatch.m_catchStartPosition += constSize; + } +} + +void LiveAnalysis::orderStack(Walrus::ModuleFunction* func, VariableRange* ranges, uint64_t rangesSize, uint16_t stackStart) +{ + std::vector> freeSpaces = { std::make_pair(stackStart, UINT16_MAX) }; + + for (uint64_t i = 0; i < rangesSize; i++) { + if ((ranges[i].reads.empty() && ranges[i].sets.empty()) || ranges[i].isParam || ranges[i].isConstant) { + continue; + } + + if (ranges[i].sets.empty() && !ranges[i].reads.empty() && !ranges[i].isResult) { + UnusedReads.elements.push_back(&ranges[i]); + + if (UnusedReads.valueSize < Walrus::valueSize(ranges[i].value.type())) { + // if (UnusedReads.valueSize < Walrus::valueStackAllocatedSize(ranges[i].value.type())) { + UnusedReads.valueSize = Walrus::valueSize(ranges[i].value.type()); + // UnusedReads.valueSize = Walrus::valueStackAllocatedSize(ranges[i].value.type()); + } + } else if (!ranges[i].sets.empty() && ranges[i].reads.empty()) { + UnusedWrites.elements.push_back(&ranges[i]); + + if (UnusedWrites.valueSize < Walrus::valueSize(ranges[i].value.type())) { + // if (UnusedWrites.valueSize < Walrus::valueStackAllocatedSize(ranges[i].value.type())) { + UnusedWrites.valueSize = Walrus::valueSize(ranges[i].value.type()); + // UnusedWrites.valueSize = Walrus::valueStackAllocatedSize(ranges[i].value.type()); + } + } + } + + if (!UnusedWrites.elements.empty()) { + UnusedWrites.pos = freeSpaces.front().first; + freeSpaces.front().first += UnusedWrites.valueSize; + + for (VariableRange* range : UnusedWrites.elements) { + range->newOffset = UnusedWrites.pos; + range->end = UINT64_MAX; + } + } + + if (!UnusedReads.elements.empty()) { + UnusedReads.pos = freeSpaces.front().first; + freeSpaces.front().first += UnusedReads.valueSize; + + for (VariableRange* range : UnusedReads.elements) { + range->newOffset = UnusedReads.pos; + range->end = UINT64_MAX; + } + } + + uint64_t byteCodeOffset = 0; + // while (byteCodeOffset < func->currentByteCodeSize()) { + while (byteCodeOffset < m_byteCode.size()) { + Walrus::ByteCode* code = reinterpret_cast(const_cast(m_byteCode.data() + byteCodeOffset)); + + std::vector offsets = code->getByteCodeStackOffsets(func->functionType()); + std::vector writtenOffsets(offsets.size(), false); + for (uint64_t i = 0; i < rangesSize; i++) { + if (ranges[i].start == UINT64_MAX && ranges[i].end == 0) { + continue; + } + + ASSERT(!freeSpaces.empty()); + bool isUnusedRead = std::find(UnusedReads.elements.begin(), UnusedReads.elements.end(), &ranges[i]) != UnusedReads.elements.end(); + bool isUnusedWrite = std::find(UnusedWrites.elements.begin(), UnusedWrites.elements.end(), &ranges[i]) != UnusedWrites.elements.end(); + + if (ranges[i].start == byteCodeOffset && !ranges[i].isParam && !(isUnusedRead || isUnusedWrite)) { + for (size_t j = freeSpaces.size() - 1; 0 <= j; j--) { + if ((freeSpaces[j].second - freeSpaces[j].first) >= (Walrus::ByteCodeStackOffset)Walrus::valueSize(ranges[i].value.type())) { + // if ((freeSpaces[j].second - freeSpaces[j].first) >= (Walrus::ByteCodeStackOffset)Walrus::valueStackAllocatedSize(ranges[i].value.type())) { + ranges[i].newOffset = freeSpaces[j].first; + + if (freeSpaces[j].second - freeSpaces[j].first == 0) { + freeSpaces.erase(freeSpaces.begin() + i); + } else { + freeSpaces[j].first += Walrus::valueSize(ranges[i].value.type()); + // freeSpaces[j].first += Walrus::valueStackAllocatedSize(ranges[i].value.type()); + } + break; + } + } + } + + if (ranges[i].end == byteCodeOffset && ranges[i].newOffset != UINT16_MAX && !(isUnusedRead || isUnusedWrite) && !ranges[i].isParam && !ranges[i].isConstant) { + bool insertedSpace = false; + for (auto& space : freeSpaces) { + if (space.first - Walrus::valueSize(ranges[i].value.type()) == ranges[i].newOffset) { + // if (space.first - Walrus::valueStackAllocatedSize(ranges[i].value.type()) == ranges[i].newOffset) { + space.first -= Walrus::valueSize(ranges[i].value.type()); + // space.first -= Walrus::valueStackAllocatedSize(ranges[i].value.type()); + insertedSpace = true; + break; + } else if (space.second == ranges[i].newOffset) { + space.second += Walrus::valueSize(ranges[i].value.type()); + // space.second += Walrus::valueStackAllocatedSize(ranges[i].value.type()); + insertedSpace = true; + break; + } + } + + if (!insertedSpace) { + freeSpaces.push_back(std::make_pair(ranges[i].newOffset, ranges[i].newOffset + Walrus::valueSize(ranges[i].value.type()))); + // freeSpaces.push_back(std::make_pair(ranges[i].newOffset, ranges[i].newOffset + Walrus::valueStackAllocatedSize(ranges[i].value.type()))); + } + } + + if (ranges[i].start <= byteCodeOffset && ranges[i].end >= byteCodeOffset) { + for (uint8_t j = 0; j < offsets.size(); j++) { + if (ranges[i].originalOffset == offsets[j] && !writtenOffsets[j]) { + code->setByteCodeOffset(j, ranges[i].newOffset, ranges[i].originalOffset); + writtenOffsets[j] = true; + + switch (code->opcode()) { + case Walrus::ByteCode::EndOpcode: + case Walrus::ByteCode::CallOpcode: + case Walrus::ByteCode::CallIndirectOpcode: + case Walrus::ByteCode::CallRefOpcode: +#if defined(WALRUS_64) + if (ranges[i].value.type() == Walrus::Value::V128) { + code->setByteCodeOffset(j + 1, ranges[i].newOffset + 8, ranges[i].originalOffset); + writtenOffsets[j + 1] = true; + j++; + } +#elif defined(WALRUS_32) + switch (ranges[i].value.type()) { + case Walrus::Value::Type::I64: + case Walrus::Value::Type::F64: { + code->setByteCodeOffset(j + 1, ranges[i].newOffset + 4, ranges[i].originalOffset); + writtenOffsets[j + 1] = true; + j++; + break; + } + case Walrus::Value::Type::V128: { + code->setByteCodeOffset(j + 1, ranges[i].newOffset + 4, ranges[i].originalOffset); + writtenOffsets[j + 1] = true; + j++; + + code->setByteCodeOffset(j + 1, ranges[i].newOffset + 8, ranges[i].originalOffset); + writtenOffsets[j + 1] = true; + j++; + + code->setByteCodeOffset(j + 1, ranges[i].newOffset + 12, ranges[i].originalOffset); + writtenOffsets[j + 1] = true; + j++; + break; + } + default: { + break; + } + } +#endif + default: { + break; + } + } + } + } + } + } + + byteCodeOffset += code->getSize(); + } + + Walrus::ByteCodeStackOffset highestNewOffset = 0; + Walrus::ByteCodeStackOffset highestOldOffset = 0; + for (uint64_t i = 0; i < rangesSize; i++) { + if (ranges[i].newOffset != UINT16_MAX && highestNewOffset <= ranges[i].newOffset) { + highestNewOffset = ranges[i].newOffset + Walrus::valueSize(ranges[i].value.type()); + } + if (highestOldOffset <= ranges[i].originalOffset) { + highestOldOffset = ranges[i].originalOffset + Walrus::valueSize(ranges[i].value.type()); + } + } + + if (UnusedReads.pos != UINT16_MAX && highestNewOffset <= UnusedReads.pos) { + highestNewOffset = UnusedReads.pos + UnusedReads.valueSize; + } + + if (UnusedWrites.pos != UINT16_MAX && highestNewOffset <= UnusedWrites.pos) { + highestNewOffset = UnusedWrites.pos + UnusedWrites.valueSize; + } + + Walrus::ByteCodeStackOffset offsetDiff = highestOldOffset - highestNewOffset; + if (0 < offsetDiff && !func->hasTryCatch()) { + uint64_t byteCodeOffset = 0; + // while (byteCodeOffset < func->currentByteCodeSize()) { + while (byteCodeOffset < m_byteCode.size()) { + Walrus::ByteCode* code = reinterpret_cast(const_cast(m_byteCode.data() + byteCodeOffset)); + std::vector offsets = code->getByteCodeStackOffsets(func->functionType()); + + for (uint8_t i = 0; i < offsets.size(); i++) { + bool local = false; + for (uint64_t j = 0; j < rangesSize; j++) { + if (offsets[i] == ranges[j].newOffset || (Walrus::valueSize(ranges[j].value.type()) == 8 && offsets[i] == ranges[j].newOffset + 4)) { + local = true; + + switch (code->opcode()) { + case Walrus::ByteCode::CallOpcode: + case Walrus::ByteCode::EndOpcode: { + if (ranges[i].value.type() == Walrus::Value::V128) { + i++; + } + } + default: { + break; + } + } + + break; + } + } + + if (!local && 0 <= reinterpret_cast(offsets[i] - offsetDiff)) { + code->setByteCodeOffset(i, offsets[i] - offsetDiff, offsets[i]); + } + } + + byteCodeOffset += code->getSize(); + } + + func->setStackSize(func->requiredStackSize() - offsetDiff); + } + +#if !defined(NDEBUG) + for (uint64_t i = 0; i < rangesSize; i++) { + if (ranges[i].isConstant) { + func->pushConstDebugData(ranges[i].value, ranges[i].newOffset); + } else if (!ranges[i].isParam) { + func->pushLocalDebugData(ranges[i].newOffset); + } + } +#endif + + pushVariableInits(ranges, rangesSize, func); +} + +void LiveAnalysis::orderNaiveRange(Walrus::ByteCode* code, Walrus::ModuleFunction* func, VariableRange* ranges, uint64_t rangesSize, uint64_t byteCodeOffset) +{ + std::vector offsets = code->getByteCodeStackOffsets(func->functionType()); + for (uint8_t i = 0; i < offsets.size(); i++) { + VariableRange* elem = nullptr; + + for (uint64_t j = 0; j < rangesSize; j++) { + if (ranges[j].originalOffset == offsets[i]) { + elem = &ranges[j]; + } + } + + if (elem != nullptr) { + if (elem->end < byteCodeOffset) { + elem->end = byteCodeOffset; + } + if (elem->start > byteCodeOffset) { + elem->start = byteCodeOffset; + } + + // Calls and End opcode need special cases. + switch (code->opcode()) { + case Walrus::ByteCode::EndOpcode: { + elem->isResult = true; + FALLTHROUGH; + } + case Walrus::ByteCode::I32StoreOpcode: + case Walrus::ByteCode::I32Store16Opcode: + case Walrus::ByteCode::I32Store8Opcode: + case Walrus::ByteCode::I64StoreOpcode: + case Walrus::ByteCode::I64Store32Opcode: + case Walrus::ByteCode::I64Store16Opcode: + case Walrus::ByteCode::I64Store8Opcode: + case Walrus::ByteCode::F32StoreOpcode: + case Walrus::ByteCode::F64StoreOpcode: + case Walrus::ByteCode::V128StoreOpcode: + case Walrus::ByteCode::I32StoreMemIdxOpcode: + case Walrus::ByteCode::I32Store16MemIdxOpcode: + case Walrus::ByteCode::I32Store8MemIdxOpcode: + case Walrus::ByteCode::I64StoreMemIdxOpcode: + case Walrus::ByteCode::I64Store32MemIdxOpcode: + case Walrus::ByteCode::I64Store16MemIdxOpcode: + case Walrus::ByteCode::I64Store8MemIdxOpcode: + case Walrus::ByteCode::F32StoreMemIdxOpcode: + case Walrus::ByteCode::F64StoreMemIdxOpcode: + case Walrus::ByteCode::V128StoreMemIdxOpcode: + case Walrus::ByteCode::Store32Opcode: + case Walrus::ByteCode::Store64Opcode: + case Walrus::ByteCode::JumpIfFalseOpcode: + case Walrus::ByteCode::JumpIfTrueOpcode: + case Walrus::ByteCode::JumpIfNullOpcode: + case Walrus::ByteCode::JumpIfNonNullOpcode: + case Walrus::ByteCode::JumpIfCastGenericOpcode: + case Walrus::ByteCode::JumpIfCastDefinedOpcode: + case Walrus::ByteCode::TableInitOpcode: + case Walrus::ByteCode::TableCopyOpcode: + case Walrus::ByteCode::TableSetOpcode: + case Walrus::ByteCode::TableFillOpcode: + case Walrus::ByteCode::MemoryFillOpcode: + case Walrus::ByteCode::MemoryInitOpcode: + case Walrus::ByteCode::MemoryCopyOpcode: + case Walrus::ByteCode::ThrowOpcode: + case Walrus::ByteCode::BrTableOpcode: + case Walrus::ByteCode::GlobalSet32Opcode: + case Walrus::ByteCode::GlobalSet64Opcode: + case Walrus::ByteCode::GlobalSet128Opcode: + // WebAsm3 + case Walrus::ByteCode::ArrayFillOpcode: + case Walrus::ByteCode::ArrayCopyOpcode: + // SIMD ByteCodes + case Walrus::ByteCode::V128Store8LaneOpcode: + case Walrus::ByteCode::V128Store8LaneMemIdxOpcode: + case Walrus::ByteCode::V128Store16LaneOpcode: + case Walrus::ByteCode::V128Store16LaneMemIdxOpcode: + case Walrus::ByteCode::V128Store32LaneOpcode: + case Walrus::ByteCode::V128Store32LaneMemIdxOpcode: + case Walrus::ByteCode::V128Store64LaneOpcode: + case Walrus::ByteCode::V128Store64LaneMemIdxOpcode: { + elem->reads.push_back(byteCodeOffset); + break; + } + case Walrus::ByteCode::GlobalGet32Opcode: + case Walrus::ByteCode::GlobalGet64Opcode: + case Walrus::ByteCode::GlobalGet128Opcode: { + elem->sets.push_back(byteCodeOffset); + break; + } + case Walrus::ByteCode::CallOpcode: { + Walrus::Call* call = reinterpret_cast(const_cast(code)); + if (i < call->parameterOffsetsSize()) { + elem->reads.push_back(byteCodeOffset); + } else { + elem->sets.push_back(byteCodeOffset); + } + break; + } + case Walrus::ByteCode::CallIndirectOpcode: { + Walrus::CallIndirect* call = reinterpret_cast(const_cast(code)); + if (offsets[i] == call->calleeOffset()) { + elem->reads.push_back(byteCodeOffset); + break; + } + + size_t resultStart = 0; + const Walrus::TypeVector& types = call->functionType()->param(); + // for (auto& type : call->functionType()->param()) { + for (uint32_t j = 0; j < types.size(); j++) { + resultStart++; + +#if defined(WALRUS_64) + if (types.types()[j] == Walrus::Value::Type::V128) { + resultStart++; + } +#elif defined(WALRUS_32) + switch (types.types()[j]) { + case Walrus::Value::Type::I64: + case Walrus::Value::Type::F64: { + resultStart++; + break; + } + case Walrus::Value::Type::V128: { + resultStart += 3; + break; + } + default: { + break; + } + } +#endif + } + + if (i <= resultStart) { + elem->reads.push_back(byteCodeOffset); + } else { + elem->sets.push_back(byteCodeOffset); + } + + break; + } + case Walrus::ByteCode::CallRefOpcode: { + Walrus::CallRef* callRef = reinterpret_cast(const_cast(code)); + + if (i < callRef->parameterOffsetsSize()) { + elem->reads.push_back(byteCodeOffset); + } else { + elem->sets.push_back(byteCodeOffset); + } + + break; + } + default: { + if (&offsets[i] == &offsets.back()) { + elem->sets.push_back(byteCodeOffset); + } else { + elem->reads.push_back(byteCodeOffset); + } + break; + } + } + + offsets[i] = UINT16_MAX; + } + } +} + +void LiveAnalysis::optimizeLocals(Walrus::ModuleFunction* func, std::vector>& locals, uint64_t constantStart) +{ + uint64_t rangesSize = locals.size(); + VariableRange* ranges = new VariableRange[rangesSize]; + std::vector offsets; + + for (uint32_t i = 0; i < locals.size(); i++) { + ranges[i] = VariableRange(locals[i].first, locals[i].second); + + if (i < func->functionType()->param().size()) { + ranges[i].isParam = true; + ranges[i].newOffset = ranges[i].originalOffset; + } + + if (i >= constantStart) { + ranges[i].start = 0; + ranges[i].end = UINT64_MAX; + ranges[i].isConstant = true; + } + } + + uint64_t byteCodeOffset = 0; + std::vector basicBlocks; + while (byteCodeOffset < m_byteCode.size()) { + Walrus::ByteCode* code = reinterpret_cast(const_cast(m_byteCode.data() + byteCodeOffset)); + + orderNaiveRange(code, func, ranges, rangesSize, byteCodeOffset); + assignBasicBlocks(code, basicBlocks, byteCodeOffset); + + byteCodeOffset += code->getSize(); + } + + uint16_t stackStart = UINT16_MAX; + if (!basicBlocks.empty()) { + orderInsAndOuts(basicBlocks, ranges, rangesSize, m_byteCode.size()); + } + extendNaiveRange(basicBlocks, ranges, rangesSize); + + for (uint64_t i = 0; i < rangesSize; i++) { + if (ranges[i].isParam) { + continue; + } + + if (ranges[i].originalOffset < stackStart) { + stackStart = ranges[i].originalOffset; + } + } + + orderStack(func, ranges, rangesSize, stackStart); + + for (uint32_t i = 0; i < basicBlocks.size(); i++) { + basicBlocks[i]->containedVariables.clear(); + delete basicBlocks[i]; + } + delete[] ranges; +} + +} // namespace wabt diff --git a/src/parser/LiveAnalysis.h b/src/parser/LiveAnalysis.h new file mode 100644 index 000000000..17beefa8b --- /dev/null +++ b/src/parser/LiveAnalysis.h @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2022-present Samsung Electronics Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "Walrus.h" + +#include "interpreter/ByteCode.h" +#include "runtime/Module.h" +#include "runtime/ObjectType.h" +#include "runtime/Value.h" +#include +#include + +namespace wabt { + +class LiveAnalysis { +public: + LiveAnalysis(Walrus::Vector>& byteCode) + : m_byteCode(byteCode) + { + } + + struct VariableRange { + Walrus::Value value; + uint64_t start; + uint64_t end; + Walrus::ByteCodeStackOffset originalOffset; + Walrus::ByteCodeStackOffset newOffset; + bool isParam; + bool needsInit; + std::vector sets; + std::vector reads; + // #if !defined(NDEBUG) + bool isConstant; + bool isResult; + // #endif + + VariableRange() + : value(0) + , start(UINT64_MAX) + , end(0) + , originalOffset(0) + , newOffset(UINT16_MAX) + , isParam(false) + , needsInit(false) + // #if !defined(NDEBUG) + , isConstant(false) + // #endif + { + } + + VariableRange(Walrus::ByteCodeStackOffset o, Walrus::Value value) + : value(value) + , start(UINT16_MAX) + , end(0) + , originalOffset(o) + , newOffset(UINT16_MAX) + , isParam(false) + , needsInit(false) + // #if !defined(NDEBUG) + , isConstant(false) + , isResult(false) + // #endif + { + } + }; + + struct BasicBlock { + uint64_t from; + uint64_t to; + std::vector in; + std::vector out; + std::vector containedVariables; + + BasicBlock(size_t from, size_t to) + : from(from) + , to(to) + { + } + }; + + struct StackElement { + Walrus::ByteCodeStackOffset pos; + uint8_t valueSize; + std::vector elements; + + StackElement() + : pos(UINT16_MAX) + , valueSize(0) + { + } + }; + + void orderStack(Walrus::ModuleFunction* func, VariableRange* ranges, uint64_t rangesSize, uint16_t stackStart); + void extendNaiveRange(std::vector& basicBlocks, VariableRange* ranges, uint64_t rangesSize); + void orderInsAndOuts(std::vector& basicBlocks, VariableRange* ranges, uint64_t rangesSize, uint64_t end, uint64_t position = 0); + void assignBasicBlocks(Walrus::ByteCode* code, std::vector& basicBlocks, uint64_t byteCodeOffset); + void optimizeLocals(Walrus::ModuleFunction* func, std::vector>& locals, uint64_t constantStart); + void orderNaiveRange(Walrus::ByteCode* code, Walrus::ModuleFunction* func, VariableRange* ranges, uint64_t rangesSize, uint64_t byteCodeOffset); + void pushVariableInits(LiveAnalysis::VariableRange* ranges, uint64_t rangesSize, Walrus::ModuleFunction* func); + void pushByteCodeToFront(const Walrus::ByteCode& code); + + StackElement UnusedReads; + StackElement UnusedWrites; + Walrus::Vector>& m_byteCode; +}; +} // namespace wabt diff --git a/src/parser/WASMParser.cpp b/src/parser/WASMParser.cpp index e61f1720c..76f896689 100644 --- a/src/parser/WASMParser.cpp +++ b/src/parser/WASMParser.cpp @@ -16,6 +16,7 @@ #include "Walrus.h" #include "parser/WASMParser.h" +#include "parser/LiveAnalysis.h" #include "interpreter/ByteCode.h" #include "runtime/GCArray.h" #include "runtime/Module.h" @@ -23,7 +24,10 @@ #include "runtime/TypeStore.h" #include "wabt/binary-reader.h" +#include "runtime/Value.h" #include "wabt/walrus/binary-reader-walrus.h" +#include +#include namespace wabt { @@ -1394,48 +1398,55 @@ class WASMBinaryReader : public wabt::WASMBinaryReaderDelegate { m_functionStackSizeSoFar = m_initialFunctionStackSize; m_currentFunction->m_requiredStackSize = std::max(m_currentFunction->m_requiredStackSize, m_functionStackSizeSoFar); - // Explicit init local variable if needs - for (size_t i = m_currentFunctionType->param().size(); i < m_localInfo.size(); i++) { - if (m_preprocessData.m_localVariableInfo[i].m_needsExplicitInitOnStartup) { - auto localPos = m_localInfo[i].m_position; - auto size = Walrus::valueSize(m_localInfo[i].m_valueType); - if (size == 4) { - pushByteCode(Walrus::Const32(localPos, 0), WASMOpcode::I32ConstOpcode); - } else if (size == 8) { - pushByteCode(Walrus::Const64(localPos, 0), WASMOpcode::I64ConstOpcode); - } else { - ASSERT(size == 16); - uint8_t empty[16] = { - 0, - }; - pushByteCode(Walrus::Const128(localPos, empty), WASMOpcode::V128ConstOpcode); - } - } -#if !defined(NDEBUG) - m_currentFunction->m_localDebugData.push_back(m_localInfo[i].m_position); + if (false) { + // Explicit init local variable if needs + for (size_t i = m_currentFunctionType->param().size(); i < m_localInfo.size(); i++) { + if (m_preprocessData.m_localVariableInfo[i].m_needsExplicitInitOnStartup) { +#if false + auto localPos = m_localInfo[i].m_position; + auto size = Walrus::valueSize(m_localInfo[i].m_valueType); + if (size == 4) { + pushByteCode(Walrus::Const32(localPos, 0), WASMOpcode::I32ConstOpcode); + } else if (size == 8) { + pushByteCode(Walrus::Const64(localPos, 0), WASMOpcode::I64ConstOpcode); + } else { + ASSERT(size == 16); + uint8_t empty[16] = { + 0, + }; + pushByteCode(Walrus::Const128(localPos, empty), WASMOpcode::V128ConstOpcode); + } + } #endif - } + // #if !defined(NDEBUG) + // m_currentFunction->m_localDebugData.push_back(m_localInfo[i].m_position); + // #endif + } - // init constant space - for (size_t i = 0; i < m_preprocessData.m_constantData.size(); i++) { - const auto& constValue = m_preprocessData.m_constantData[i].first; - auto constType = m_preprocessData.m_constantData[i].first.type(); - auto constPos = m_preprocessData.m_constantData[i].second; - size_t constSize = Walrus::valueSize(constType); - - uint8_t constantBuffer[16]; - constValue.writeToMemory(constantBuffer); - if (constSize == 4) { - pushByteCode(Walrus::Const32(constPos, *reinterpret_cast(constantBuffer)), WASMOpcode::I32ConstOpcode); - } else if (constSize == 8) { - pushByteCode(Walrus::Const64(constPos, *reinterpret_cast(constantBuffer)), WASMOpcode::I64ConstOpcode); - } else { - ASSERT(constSize == 16); - pushByteCode(Walrus::Const128(constPos, constantBuffer), WASMOpcode::V128ConstOpcode); - } -#if !defined(NDEBUG) - m_currentFunction->m_constantDebugData.pushBack(m_preprocessData.m_constantData[i]); + // init constant space + for (size_t i = 0; i < m_preprocessData.m_constantData.size(); i++) { +#if false + const auto& constValue = m_preprocessData.m_constantData[i].first; + auto constType = m_preprocessData.m_constantData[i].first.type(); + auto constPos = m_preprocessData.m_constantData[i].second; + size_t constSize = Walrus::valueSize(constType); + + uint8_t constantBuffer[16]; + constValue.writeToMemory(constantBuffer); + if (constSize == 4) { + pushByteCode(Walrus::Const32(constPos, *reinterpret_cast(constantBuffer)), WASMOpcode::I32ConstOpcode); + } else if (constSize == 8) { + pushByteCode(Walrus::Const64(constPos, *reinterpret_cast(constantBuffer)), WASMOpcode::I64ConstOpcode); + } else { + ASSERT(constSize == 16); + pushByteCode(Walrus::Const128(constPos, constantBuffer), WASMOpcode::V128ConstOpcode); + } #endif + // #if !defined(NDEBUG) + // m_currentFunction->m_constantDebugData.pushBack(m_preprocessData.m_constantData[i]); + // #endif + } + } } } @@ -3194,6 +3205,26 @@ class WASMBinaryReader : public wabt::WASMBinaryReaderDelegate { } m_lastI32EqzPos = s_noI32Eqz; + + ASSERT(m_currentFunction == m_result.m_functions[index]); + std::vector> locals; + locals.reserve(m_localInfo.size() + m_preprocessData.m_constantData.size()); + for (uint32_t i = 0; i < m_localInfo.size(); i++) { + locals.push_back(std::make_pair(m_localInfo[i].m_position, Walrus::Value(m_localInfo[i].m_valueType))); + } + + for (auto constant : m_preprocessData.m_constantData) { + locals.push_back(std::make_pair(constant.second, constant.first)); + } + +#if !defined(NDEBUG) + m_currentFunction->m_localDebugData.clear(); + m_currentFunction->m_constantDebugData.clear(); +#endif + + LiveAnalysis analysis(m_currentByteCode); + analysis.optimizeLocals(m_currentFunction, locals, m_localInfo.size()); + #if !defined(NDEBUG) if (getenv("DUMP_BYTECODE") && strlen(getenv("DUMP_BYTECODE"))) { m_currentFunction->dumpByteCode(m_currentByteCode); @@ -3206,7 +3237,6 @@ class WASMBinaryReader : public wabt::WASMBinaryReaderDelegate { } #endif - ASSERT(m_currentFunction == m_result.m_functions[index]); endFunction(); } diff --git a/src/runtime/Function.cpp b/src/runtime/Function.cpp index 92980c724..89b8e453a 100644 --- a/src/runtime/Function.cpp +++ b/src/runtime/Function.cpp @@ -74,7 +74,9 @@ void DefinedFunction::call(ExecutionState& state, Value* argv, Value* result) ASSERT(Value::isRefType(paramTypeInfo[i]) ? argv[i].isRef() : argv[i].type() == paramTypeInfo[i]); argv[i].writeToMemory(paramBuffer); size_t stackAllocatedSize = valueStackAllocatedSize(paramTypeInfo[i]); + // size_t stackAllocatedSize = valueSize(paramTypeInfo[i]); for (size_t j = 0; j < stackAllocatedSize; j += sizeof(size_t)) { + // for (size_t j = 0; j < stackAllocatedSize; j += stackAllocatedSize) { offsetBuffer[offsetIndex++] = reinterpret_cast(paramBuffer) - reinterpret_cast(valueBuffer) + j; } paramBuffer += stackAllocatedSize; @@ -84,7 +86,9 @@ void DefinedFunction::call(ExecutionState& state, Value* argv, Value* result) size_t resultOffset = 0; for (size_t i = 0; i < resultTypeInfo.size(); i++) { size_t stackAllocatedSize = valueStackAllocatedSize(resultTypeInfo[i]); + // size_t stackAllocatedSize = valueSize(resultTypeInfo[i]); for (size_t j = 0; j < stackAllocatedSize; j += sizeof(size_t)) { + // for (size_t j = 0; j < stackAllocatedSize; j += stackAllocatedSize) { offsetBuffer[offsetIndex++] = resultOffset + j; } resultOffset += stackAllocatedSize; @@ -96,7 +100,12 @@ void DefinedFunction::call(ExecutionState& state, Value* argv, Value* result) for (size_t i = 0; i < resultTypeInfo.size(); i++) { result[i] = Value(resultTypeInfo[i], valueBuffer + offsetBuffer[resultOffsetIndex + parameterOffsetSize]); size_t stackAllocatedSize = valueStackAllocatedSize(resultTypeInfo[i]); + // size_t stackAllocatedSize = valueSize(resultTypeInfo[i]); resultOffsetIndex += stackAllocatedSize / sizeof(size_t); + // if (stackAllocatedSize > sizeof(uint64_t)) { + // resultOffsetIndex++; + // } + // resultOffsetIndex++; } } diff --git a/src/runtime/Module.cpp b/src/runtime/Module.cpp index f700af8ba..7b0dbaf6a 100644 --- a/src/runtime/Module.cpp +++ b/src/runtime/Module.cpp @@ -540,8 +540,13 @@ void ModuleFunction::dumpByteCode(Walrus::Vector namespace wabt { class WASMBinaryReader; @@ -213,6 +216,7 @@ class ModuleFunction { FunctionType* functionType() const { return m_functionType; } const uint8_t* byteCode() const { return m_byteCode.data(); } + VectorWithFixedSize> byteCodeVector() { return m_byteCode; } size_t byteCodeSize() const { @@ -246,6 +250,22 @@ class ModuleFunction { return m_jitFunction; } #endif + void setStackSize(uint16_t size) + { + m_requiredStackSize = size; + } + +#if !defined(NDEBUG) + void pushLocalDebugData(Walrus::ByteCodeStackOffset o) + { + m_localDebugData.push_back(o); + } + + void pushConstDebugData(Walrus::Value value, Walrus::ByteCodeStackOffset o) + { + m_constantDebugData.push_back(std::pair(value, o)); + } +#endif private: bool m_hasTryCatch; diff --git a/src/runtime/Value.h b/src/runtime/Value.h index 34aa9d2b7..8ba658079 100644 --- a/src/runtime/Value.h +++ b/src/runtime/Value.h @@ -21,6 +21,8 @@ #include "util/BitOperation.h" #include "runtime/ExecutionState.h" #include "runtime/Exception.h" +#include +#include namespace Walrus { @@ -208,7 +210,7 @@ class Value { } Value(Type type) - : m_i64(0) + : m_v128() , m_type(type) { } @@ -472,6 +474,31 @@ class Value { return isNull(m_ref); } + bool isZeroValue() + { + switch (m_type) { + case I32: + return m_i32 == 0; + case F32: + return m_f32 == 0.0f && !std::signbit(m_f32); + case I64: + return m_i64 == 0; + case F64: + return m_f64 == +0.0 && !std::signbit(m_f64); + case V128: { + for (uint8_t i = 0; i < 16; i++) { + if (m_v128.m_data[i] != 0) { + return false; + } + } + return true; + } + default: + ASSERT(isRef()); + return m_ref == nullptr; + } + } + static bool isNull(void* ptr) { return ptr == reinterpret_cast(NullBits); diff --git a/src/wasi/WASI.cpp b/src/wasi/WASI.cpp index d935be7a2..5b9fa96ef 100644 --- a/src/wasi/WASI.cpp +++ b/src/wasi/WASI.cpp @@ -27,6 +27,7 @@ namespace Walrus { uvwasi_t* WASI::g_uvwasi; WASI::WasiFuncInfo WASI::g_wasiFunctions[WasiFuncIndex::FuncEnd]; +std::vector> WASI::preopens; static void* get_memory_pointer(Instance* instance, Value& value, size_t size) { diff --git a/src/wasi/WASI.h b/src/wasi/WASI.h index 00e0a73a2..a87059a00 100644 --- a/src/wasi/WASI.h +++ b/src/wasi/WASI.h @@ -169,6 +169,20 @@ class WASI { static void initialize(uvwasi_t* uvwasi); static WasiFuncInfo* find(const std::string& funcName); + static uvwasi_t* getUvwasi() + { + return g_uvwasi; + } + static std::vector>& getPreopen() + { + return preopens; + } + + static void setPreopen(std::vector> pre) + { + WASI::preopens = pre; + } + private: // wasi functions @@ -178,6 +192,7 @@ class WASI { static uvwasi_t* g_uvwasi; static WasiFuncInfo g_wasiFunctions[FuncEnd]; + static std::vector> preopens; }; } // namespace Walrus diff --git a/test/basic/br.wast b/test/basic/br.wast index 070004802..31e3a2749 100644 --- a/test/basic/br.wast +++ b/test/basic/br.wast @@ -70,14 +70,6 @@ f64.eq end ) - (func (export "br_block_return") (result i32) - (local i32) - block (result i32) - i32.const 20 - br 1 - local.set 0 ;; dead code - end - ) ) (assert_return (invoke "br0") (i32.const 1)(i32.const 2)(i32.const 3)) @@ -89,4 +81,3 @@ (assert_return (invoke "br0_1"(i32.const 0))(i32.const 200)) (assert_return (invoke "br_if_cmp"(i32.const 0)(i32.const 100)(i64.const 100)(f32.const 100.0)(f64.const 100.0)) (i32.const 1)(i32.const 1)(i32.const 1)(i32.const 1)) -(assert_return (invoke "br_block_return") (i32.const 20)) diff --git a/test/basic/initialization.wast b/test/basic/initialization.wast new file mode 100644 index 000000000..7a802d224 --- /dev/null +++ b/test/basic/initialization.wast @@ -0,0 +1,116 @@ +(module + +(func (export "f1") (result i32) + (local i32) + + i32.const 0 + (if + (then + i32.const 1 + local.set 0 + ) + ) + + local.get 0 +) + +(func (export "f2") (result i32) + (local i32) + + (loop $loop + + i32.const 1 + local.set 0 + + i32.const 0 + br_if $loop + ) + + local.get 0 +) + +(func (export "f3") (result i32) + (local i32) + + local.get 0 +) + +(func (export "f4") (result i32) + (local i32) + + local.get 0 + i32.const 1 + i32.add + local.tee 0 +) + + +(func (export "f5") (result i32) + (local i32 i32 i32) + (block $while + (loop $loop + i32.const 1 + br_if $while + + i32.const 1 + local.set 0 + + br $loop + ) + ) + + i32.const 1 + local.set 2 + (block $while + (loop $loop + local.get 2 + br_if $while + + local.get 0 + local.set 1 + + i32.const 1 + local.get 2 + i32.sub + local.set 2 + + br $loop + ) + ) + + local.get 1 +) + +(func (export "f6") (param i32 ) (result i32) + (local i32) + + (block + (block + (block + local.get 0 + (br_table 0 1 2) + ) + + i32.const 1 + local.tee 1 + + return + ) + i32.const 2 + local.set 1 + ) + + local.get 1 +) + +) + +(assert_return (invoke "f1") (i32.const 0)) +(assert_return (invoke "f2") (i32.const 1)) +(assert_return (invoke "f3") (i32.const 0)) +(assert_return (invoke "f4") (i32.const 1)) +(assert_return (invoke "f5") (i32.const 0)) +(assert_return (invoke "f6" (i32.const 0)) (i32.const 1)) +(assert_return (invoke "f6" (i32.const 1)) (i32.const 2)) +(assert_return (invoke "f6" (i32.const 2)) (i32.const 0)) + diff --git a/test/basic/local_livelyness.wast b/test/basic/local_livelyness.wast new file mode 100644 index 000000000..9704bd9aa --- /dev/null +++ b/test/basic/local_livelyness.wast @@ -0,0 +1,105 @@ +(module + (func $local_zero (export "local_zero")(result i32) + (local i32) + local.get 0 + ) + + (func $local_loop1 (export "local_loop1")(result i32) + (local i32 i32 i32) + i32.const 10 + local.set 0 ;;start of 0 + + ;;start of 1 + (loop $loop + i32.const 1 + local.set 1 ;;start of 1, but inside loop + + local.get 0 + i32.const 1 + i32.sub + local.tee 0 + i32.eqz + br_if $loop + ) + + local.get 1 ;;end of 1 + ) + + (func $local_blocks (export "local_block1")(result i32) + (local i32 i32 i32 i32 i64) + + ;;end of 2 + + local.get 4 ;; start of 4 + local.get 3 ;; start of 3 + drop + drop + + i32.const 0 + local.set 0 ;; start of 0 + + + (block $block1 + i32.const 1 + local.get 0 + i32.add + local.set 0 + + (loop $block2 + local.get 1 ;; start of 1 + i32.const 3 + i32.eq + br_if $block2 + ) + + i32.const 0 + local.get 1 + i32.add + drop + + i32.const 0 + br_if $block1 + ) ;; end of 1 + + ;; end of 3, 4 + i32.const 0 + ) + + (func $local_blocks2 (export "local_block2")(param i32)(result i32) + (local i32) + + i32.const 1 + i32.const 1 + i32.sub + drop + + local.get 0 + local.tee 1 + ) + + (func $params (export "params")(param i32 i64 i32 v128)(result i32) + (local i32 i64 v128) + i32.const 0 + ) + + (func $params2 (export "params2")(param v128 i32 v128)(result i32) + i32.const 0 + ) + + + (func $params3 (export "params3")(param v128 i32 v128) + i32.const 0 + local.set 1 + v128.const i64x2 0 0 + local.set 0 + ) + +) + +(assert_return (invoke "local_zero") (i32.const 0)) +(assert_return (invoke "local_loop1") (i32.const 1)) +(assert_return (invoke "local_block1") (i32.const 0)) +(assert_return (invoke "local_block2" (i32.const 42)) (i32.const 42)) +(assert_return (invoke "params" (i32.const 1) (i64.const 2) (i32.const 3) (v128.const i64x2 4 5)) (i32.const 0)) +(assert_return (invoke "params2" (v128.const i64x2 1 2) (i32.const 3) (v128.const i64x2 4 5)) (i32.const 0)) +(assert_return (invoke "params3" (v128.const i64x2 1 2) (i32.const 3) (v128.const i64x2 4 5))) diff --git a/test/basic/local_sets.wast b/test/basic/local_sets.wast new file mode 100644 index 000000000..2cf41d3af --- /dev/null +++ b/test/basic/local_sets.wast @@ -0,0 +1,109 @@ +(module + (func $test (export "test")(param i32)(result i32) + (local i32 i32 i32 i64) + + local.get 0 ;; start of 0 + local.get 1 ;; start of 1 + drop + drop + + i32.const 32 + local.set 0 + + i32.const 33 + local.set 1 + + local.get 0 + local.get 1 ;; end of 1 + drop + drop + + i32.const 34 + local.set 0 ;; end of 0 + + + i32.const 1 + local.set 2 ;; start of 2 + local.get 2 ;; end of 2 + drop + + i64.const 23 + local.set 4 + local.get 4 + drop + + i32.const 0 + ) + + + (func $test2 (export "test2")(result i32) + (local i32 i32 i32 i32 i32) + + i32.const 10 + local.set 0 + (loop $outer ;; runs 10 times + + i32.const 5 + local.set 1 + (loop $inner1 ;; runs 5 times + i32.const 42 + local.set 2 + local.get 2 + drop + + local.get 1 + i32.const 1 + i32.sub + local.tee 1 + + i32.const 0 + i32.eq + br_if $inner1 + ) + + i32.const 8 + local.set 3 + (loop $inner2 ;; runs 8 times + local.get 3 + i32.const 1 + i32.sub + local.tee 3 + + i32.const 0 + i32.eq + br_if $inner2 + ) + + local.get 0 + i32.const 1 + i32.sub + local.tee 0 + + i32.const 0 + i32.eq + br_if $outer + ) + + (block $block + i32.const 99999 + local.set 4 + + i32.const 0 + i32.eqz + br_if $block + local.get 4 + + ;;junk + i32.const 0 + i32.add + i32.eqz + i32.clz + drop + ) + + i32.const 0 + ) +) + +(assert_return (invoke "test" (i32.const 12))(i32.const 0)) +(assert_return (invoke "test2")(i32.const 0)) diff --git a/test/basic/test.wast b/test/basic/test.wast new file mode 100644 index 000000000..d22785eaf --- /dev/null +++ b/test/basic/test.wast @@ -0,0 +1,43 @@ +(module + (func $some_func) + (table 514 514 funcref) + (elem (i32.const 1) func + $0 $1 $2 $3 $4 $5 $6 $7 $8 $9 $10 $11 $12 $13 $14 $15 $16 $17 $18 $19 $20 $21 $22 $23 $24 $25 $26 $27 $28 $29 $30 $31 $32 $33 + ) + + (func $0) + (func $1) + (func $2) + (func $3) + (func $4) + (func $5) + (func $6) + (func $7) + (func $8) + (func $9) + (func $10) + (func $11) + (func $12) + (func $13) + (func $14) + (func $15) + (func $16) + (func $17) + (func $18) + (func $19) + (func $20) + (func $21) + (func $22) + (func $23) + (func $24) + (func $25) + (func $26) + (func $27) + (func $28) + (func $29) + (func $30) + (func $31) + (func $32) + (func $33) +) + diff --git a/test/basic/useless_locals.wast b/test/basic/useless_locals.wast new file mode 100644 index 000000000..a96b64083 --- /dev/null +++ b/test/basic/useless_locals.wast @@ -0,0 +1,8 @@ +(module + (func $useless_locals (export "useless_locals")(param i32)(result i32) + (local i32 i32) + i32.const 42 + ) +) + +(assert_return (invoke "useless_locals" (i32.const 222)) (i32.const 42)) diff --git a/tools/run-tests.py b/tools/run-tests.py index 94cfe0748..7cc77d629 100755 --- a/tools/run-tests.py +++ b/tools/run-tests.py @@ -137,6 +137,25 @@ def run_core_tests(engine): raise Exception("wasm-test-core failed") +@runner('wasm-test-core-simd', default=True) +def run_core_simd_tests(engine): + TEST_DIR = join(PROJECT_SOURCE_DIR, 'test', 'wasm-spec', 'core', 'simd') + + print('Running wasm-test-core tests:') + xpass = glob(join(TEST_DIR, '*.wast')) + xpass_result = _run_wast_tests(engine, xpass, False) + + tests_total = len(xpass) + fail_total = xpass_result + print('TOTAL: %d' % (tests_total)) + print('%sPASS : %d%s' % (COLOR_GREEN, tests_total - fail_total, COLOR_RESET)) + print('%sFAIL : %d%s' % (COLOR_RED, fail_total, COLOR_RESET)) + + if fail_total > 0: + raise Exception("wasm-test-core failed") + + + @runner('wasi', default=True) def run_wasi_tests(engine): TEST_DIR = join(PROJECT_SOURCE_DIR, 'test', 'wasi')