lower.go

   1package frontend
   2
   3import (
   4	"encoding/binary"
   5	"fmt"
   6	"math"
   7	"runtime"
   8	"strings"
   9
  10	"github.com/tetratelabs/wazero/api"
  11	"github.com/tetratelabs/wazero/internal/engine/wazevo/ssa"
  12	"github.com/tetratelabs/wazero/internal/engine/wazevo/wazevoapi"
  13	"github.com/tetratelabs/wazero/internal/leb128"
  14	"github.com/tetratelabs/wazero/internal/wasm"
  15)
  16
  17type (
  18	// loweringState is used to keep the state of lowering.
  19	loweringState struct {
  20		// values holds the values on the Wasm stack.
  21		values           []ssa.Value
  22		controlFrames    []controlFrame
  23		unreachable      bool
  24		unreachableDepth int
  25		tmpForBrTable    []uint32
  26		pc               int
  27	}
  28	controlFrame struct {
  29		kind controlFrameKind
  30		// originalStackLen holds the number of values on the Wasm stack
  31		// when start executing this control frame minus params for the block.
  32		originalStackLenWithoutParam int
  33		// blk is the loop header if this is loop, and is the else-block if this is an if frame.
  34		blk,
  35		// followingBlock is the basic block we enter if we reach "end" of block.
  36		followingBlock ssa.BasicBlock
  37		blockType *wasm.FunctionType
  38		// clonedArgs hold the arguments to Else block.
  39		clonedArgs ssa.Values
  40	}
  41
  42	controlFrameKind byte
  43)
  44
  45// String implements fmt.Stringer for debugging.
  46func (l *loweringState) String() string {
  47	var str []string
  48	for _, v := range l.values {
  49		str = append(str, fmt.Sprintf("v%v", v.ID()))
  50	}
  51	var frames []string
  52	for i := range l.controlFrames {
  53		frames = append(frames, l.controlFrames[i].kind.String())
  54	}
  55	return fmt.Sprintf("\n\tunreachable=%v(depth=%d)\n\tstack: %s\n\tcontrol frames: %s",
  56		l.unreachable, l.unreachableDepth,
  57		strings.Join(str, ", "),
  58		strings.Join(frames, ", "),
  59	)
  60}
  61
  62const (
  63	controlFrameKindFunction = iota + 1
  64	controlFrameKindLoop
  65	controlFrameKindIfWithElse
  66	controlFrameKindIfWithoutElse
  67	controlFrameKindBlock
  68)
  69
  70// String implements fmt.Stringer for debugging.
  71func (k controlFrameKind) String() string {
  72	switch k {
  73	case controlFrameKindFunction:
  74		return "function"
  75	case controlFrameKindLoop:
  76		return "loop"
  77	case controlFrameKindIfWithElse:
  78		return "if_with_else"
  79	case controlFrameKindIfWithoutElse:
  80		return "if_without_else"
  81	case controlFrameKindBlock:
  82		return "block"
  83	default:
  84		panic(k)
  85	}
  86}
  87
  88// isLoop returns true if this is a loop frame.
  89func (ctrl *controlFrame) isLoop() bool {
  90	return ctrl.kind == controlFrameKindLoop
  91}
  92
  93// reset resets the state of loweringState for reuse.
  94func (l *loweringState) reset() {
  95	l.values = l.values[:0]
  96	l.controlFrames = l.controlFrames[:0]
  97	l.pc = 0
  98	l.unreachable = false
  99	l.unreachableDepth = 0
 100}
 101
 102func (l *loweringState) peek() (ret ssa.Value) {
 103	tail := len(l.values) - 1
 104	return l.values[tail]
 105}
 106
 107func (l *loweringState) pop() (ret ssa.Value) {
 108	tail := len(l.values) - 1
 109	ret = l.values[tail]
 110	l.values = l.values[:tail]
 111	return
 112}
 113
 114func (l *loweringState) push(ret ssa.Value) {
 115	l.values = append(l.values, ret)
 116}
 117
 118func (c *Compiler) nPeekDup(n int) ssa.Values {
 119	if n == 0 {
 120		return ssa.ValuesNil
 121	}
 122
 123	l := c.state()
 124	tail := len(l.values)
 125
 126	args := c.allocateVarLengthValues(n)
 127	args = args.Append(c.ssaBuilder.VarLengthPool(), l.values[tail-n:tail]...)
 128	return args
 129}
 130
 131func (l *loweringState) ctrlPop() (ret controlFrame) {
 132	tail := len(l.controlFrames) - 1
 133	ret = l.controlFrames[tail]
 134	l.controlFrames = l.controlFrames[:tail]
 135	return
 136}
 137
 138func (l *loweringState) ctrlPush(ret controlFrame) {
 139	l.controlFrames = append(l.controlFrames, ret)
 140}
 141
 142func (l *loweringState) ctrlPeekAt(n int) (ret *controlFrame) {
 143	tail := len(l.controlFrames) - 1
 144	return &l.controlFrames[tail-n]
 145}
 146
 147// lowerBody lowers the body of the Wasm function to the SSA form.
 148func (c *Compiler) lowerBody(entryBlk ssa.BasicBlock) {
 149	c.ssaBuilder.Seal(entryBlk)
 150
 151	if c.needListener {
 152		c.callListenerBefore()
 153	}
 154
 155	// Pushes the empty control frame which corresponds to the function return.
 156	c.loweringState.ctrlPush(controlFrame{
 157		kind:           controlFrameKindFunction,
 158		blockType:      c.wasmFunctionTyp,
 159		followingBlock: c.ssaBuilder.ReturnBlock(),
 160	})
 161
 162	for c.loweringState.pc < len(c.wasmFunctionBody) {
 163		blkBeforeLowering := c.ssaBuilder.CurrentBlock()
 164		c.lowerCurrentOpcode()
 165		blkAfterLowering := c.ssaBuilder.CurrentBlock()
 166		if blkBeforeLowering != blkAfterLowering {
 167			// In Wasm, once a block exits, that means we've done compiling the block.
 168			// Therefore, we finalize the known bounds at the end of the block for the exiting block.
 169			c.finalizeKnownSafeBoundsAtTheEndOfBlock(blkBeforeLowering.ID())
 170			// After that, we initialize the known bounds for the new compilation target block.
 171			c.initializeCurrentBlockKnownBounds()
 172		}
 173	}
 174}
 175
 176func (c *Compiler) state() *loweringState {
 177	return &c.loweringState
 178}
 179
 180func (c *Compiler) lowerCurrentOpcode() {
 181	op := c.wasmFunctionBody[c.loweringState.pc]
 182
 183	if c.needSourceOffsetInfo {
 184		c.ssaBuilder.SetCurrentSourceOffset(
 185			ssa.SourceOffset(c.loweringState.pc) + ssa.SourceOffset(c.wasmFunctionBodyOffsetInCodeSection),
 186		)
 187	}
 188
 189	builder := c.ssaBuilder
 190	state := c.state()
 191	switch op {
 192	case wasm.OpcodeI32Const:
 193		c := c.readI32s()
 194		if state.unreachable {
 195			break
 196		}
 197
 198		iconst := builder.AllocateInstruction().AsIconst32(uint32(c)).Insert(builder)
 199		value := iconst.Return()
 200		state.push(value)
 201	case wasm.OpcodeI64Const:
 202		c := c.readI64s()
 203		if state.unreachable {
 204			break
 205		}
 206		iconst := builder.AllocateInstruction().AsIconst64(uint64(c)).Insert(builder)
 207		value := iconst.Return()
 208		state.push(value)
 209	case wasm.OpcodeF32Const:
 210		f32 := c.readF32()
 211		if state.unreachable {
 212			break
 213		}
 214		f32const := builder.AllocateInstruction().
 215			AsF32const(f32).
 216			Insert(builder).
 217			Return()
 218		state.push(f32const)
 219	case wasm.OpcodeF64Const:
 220		f64 := c.readF64()
 221		if state.unreachable {
 222			break
 223		}
 224		f64const := builder.AllocateInstruction().
 225			AsF64const(f64).
 226			Insert(builder).
 227			Return()
 228		state.push(f64const)
 229	case wasm.OpcodeI32Add, wasm.OpcodeI64Add:
 230		if state.unreachable {
 231			break
 232		}
 233		y, x := state.pop(), state.pop()
 234		iadd := builder.AllocateInstruction()
 235		iadd.AsIadd(x, y)
 236		builder.InsertInstruction(iadd)
 237		value := iadd.Return()
 238		state.push(value)
 239	case wasm.OpcodeI32Sub, wasm.OpcodeI64Sub:
 240		if state.unreachable {
 241			break
 242		}
 243		y, x := state.pop(), state.pop()
 244		isub := builder.AllocateInstruction()
 245		isub.AsIsub(x, y)
 246		builder.InsertInstruction(isub)
 247		value := isub.Return()
 248		state.push(value)
 249	case wasm.OpcodeF32Add, wasm.OpcodeF64Add:
 250		if state.unreachable {
 251			break
 252		}
 253		y, x := state.pop(), state.pop()
 254		iadd := builder.AllocateInstruction()
 255		iadd.AsFadd(x, y)
 256		builder.InsertInstruction(iadd)
 257		value := iadd.Return()
 258		state.push(value)
 259	case wasm.OpcodeI32Mul, wasm.OpcodeI64Mul:
 260		if state.unreachable {
 261			break
 262		}
 263		y, x := state.pop(), state.pop()
 264		imul := builder.AllocateInstruction()
 265		imul.AsImul(x, y)
 266		builder.InsertInstruction(imul)
 267		value := imul.Return()
 268		state.push(value)
 269	case wasm.OpcodeF32Sub, wasm.OpcodeF64Sub:
 270		if state.unreachable {
 271			break
 272		}
 273		y, x := state.pop(), state.pop()
 274		isub := builder.AllocateInstruction()
 275		isub.AsFsub(x, y)
 276		builder.InsertInstruction(isub)
 277		value := isub.Return()
 278		state.push(value)
 279	case wasm.OpcodeF32Mul, wasm.OpcodeF64Mul:
 280		if state.unreachable {
 281			break
 282		}
 283		y, x := state.pop(), state.pop()
 284		isub := builder.AllocateInstruction()
 285		isub.AsFmul(x, y)
 286		builder.InsertInstruction(isub)
 287		value := isub.Return()
 288		state.push(value)
 289	case wasm.OpcodeF32Div, wasm.OpcodeF64Div:
 290		if state.unreachable {
 291			break
 292		}
 293		y, x := state.pop(), state.pop()
 294		isub := builder.AllocateInstruction()
 295		isub.AsFdiv(x, y)
 296		builder.InsertInstruction(isub)
 297		value := isub.Return()
 298		state.push(value)
 299	case wasm.OpcodeF32Max, wasm.OpcodeF64Max:
 300		if state.unreachable {
 301			break
 302		}
 303		y, x := state.pop(), state.pop()
 304		isub := builder.AllocateInstruction()
 305		isub.AsFmax(x, y)
 306		builder.InsertInstruction(isub)
 307		value := isub.Return()
 308		state.push(value)
 309	case wasm.OpcodeF32Min, wasm.OpcodeF64Min:
 310		if state.unreachable {
 311			break
 312		}
 313		y, x := state.pop(), state.pop()
 314		isub := builder.AllocateInstruction()
 315		isub.AsFmin(x, y)
 316		builder.InsertInstruction(isub)
 317		value := isub.Return()
 318		state.push(value)
 319	case wasm.OpcodeI64Extend8S:
 320		if state.unreachable {
 321			break
 322		}
 323		c.insertIntegerExtend(true, 8, 64)
 324	case wasm.OpcodeI64Extend16S:
 325		if state.unreachable {
 326			break
 327		}
 328		c.insertIntegerExtend(true, 16, 64)
 329	case wasm.OpcodeI64Extend32S, wasm.OpcodeI64ExtendI32S:
 330		if state.unreachable {
 331			break
 332		}
 333		c.insertIntegerExtend(true, 32, 64)
 334	case wasm.OpcodeI64ExtendI32U:
 335		if state.unreachable {
 336			break
 337		}
 338		c.insertIntegerExtend(false, 32, 64)
 339	case wasm.OpcodeI32Extend8S:
 340		if state.unreachable {
 341			break
 342		}
 343		c.insertIntegerExtend(true, 8, 32)
 344	case wasm.OpcodeI32Extend16S:
 345		if state.unreachable {
 346			break
 347		}
 348		c.insertIntegerExtend(true, 16, 32)
 349	case wasm.OpcodeI32Eqz, wasm.OpcodeI64Eqz:
 350		if state.unreachable {
 351			break
 352		}
 353		x := state.pop()
 354		zero := builder.AllocateInstruction()
 355		if op == wasm.OpcodeI32Eqz {
 356			zero.AsIconst32(0)
 357		} else {
 358			zero.AsIconst64(0)
 359		}
 360		builder.InsertInstruction(zero)
 361		icmp := builder.AllocateInstruction().
 362			AsIcmp(x, zero.Return(), ssa.IntegerCmpCondEqual).
 363			Insert(builder).
 364			Return()
 365		state.push(icmp)
 366	case wasm.OpcodeI32Eq, wasm.OpcodeI64Eq:
 367		if state.unreachable {
 368			break
 369		}
 370		c.insertIcmp(ssa.IntegerCmpCondEqual)
 371	case wasm.OpcodeI32Ne, wasm.OpcodeI64Ne:
 372		if state.unreachable {
 373			break
 374		}
 375		c.insertIcmp(ssa.IntegerCmpCondNotEqual)
 376	case wasm.OpcodeI32LtS, wasm.OpcodeI64LtS:
 377		if state.unreachable {
 378			break
 379		}
 380		c.insertIcmp(ssa.IntegerCmpCondSignedLessThan)
 381	case wasm.OpcodeI32LtU, wasm.OpcodeI64LtU:
 382		if state.unreachable {
 383			break
 384		}
 385		c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThan)
 386	case wasm.OpcodeI32GtS, wasm.OpcodeI64GtS:
 387		if state.unreachable {
 388			break
 389		}
 390		c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThan)
 391	case wasm.OpcodeI32GtU, wasm.OpcodeI64GtU:
 392		if state.unreachable {
 393			break
 394		}
 395		c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThan)
 396	case wasm.OpcodeI32LeS, wasm.OpcodeI64LeS:
 397		if state.unreachable {
 398			break
 399		}
 400		c.insertIcmp(ssa.IntegerCmpCondSignedLessThanOrEqual)
 401	case wasm.OpcodeI32LeU, wasm.OpcodeI64LeU:
 402		if state.unreachable {
 403			break
 404		}
 405		c.insertIcmp(ssa.IntegerCmpCondUnsignedLessThanOrEqual)
 406	case wasm.OpcodeI32GeS, wasm.OpcodeI64GeS:
 407		if state.unreachable {
 408			break
 409		}
 410		c.insertIcmp(ssa.IntegerCmpCondSignedGreaterThanOrEqual)
 411	case wasm.OpcodeI32GeU, wasm.OpcodeI64GeU:
 412		if state.unreachable {
 413			break
 414		}
 415		c.insertIcmp(ssa.IntegerCmpCondUnsignedGreaterThanOrEqual)
 416
 417	case wasm.OpcodeF32Eq, wasm.OpcodeF64Eq:
 418		if state.unreachable {
 419			break
 420		}
 421		c.insertFcmp(ssa.FloatCmpCondEqual)
 422	case wasm.OpcodeF32Ne, wasm.OpcodeF64Ne:
 423		if state.unreachable {
 424			break
 425		}
 426		c.insertFcmp(ssa.FloatCmpCondNotEqual)
 427	case wasm.OpcodeF32Lt, wasm.OpcodeF64Lt:
 428		if state.unreachable {
 429			break
 430		}
 431		c.insertFcmp(ssa.FloatCmpCondLessThan)
 432	case wasm.OpcodeF32Gt, wasm.OpcodeF64Gt:
 433		if state.unreachable {
 434			break
 435		}
 436		c.insertFcmp(ssa.FloatCmpCondGreaterThan)
 437	case wasm.OpcodeF32Le, wasm.OpcodeF64Le:
 438		if state.unreachable {
 439			break
 440		}
 441		c.insertFcmp(ssa.FloatCmpCondLessThanOrEqual)
 442	case wasm.OpcodeF32Ge, wasm.OpcodeF64Ge:
 443		if state.unreachable {
 444			break
 445		}
 446		c.insertFcmp(ssa.FloatCmpCondGreaterThanOrEqual)
 447	case wasm.OpcodeF32Neg, wasm.OpcodeF64Neg:
 448		if state.unreachable {
 449			break
 450		}
 451		x := state.pop()
 452		v := builder.AllocateInstruction().AsFneg(x).Insert(builder).Return()
 453		state.push(v)
 454	case wasm.OpcodeF32Sqrt, wasm.OpcodeF64Sqrt:
 455		if state.unreachable {
 456			break
 457		}
 458		x := state.pop()
 459		v := builder.AllocateInstruction().AsSqrt(x).Insert(builder).Return()
 460		state.push(v)
 461	case wasm.OpcodeF32Abs, wasm.OpcodeF64Abs:
 462		if state.unreachable {
 463			break
 464		}
 465		x := state.pop()
 466		v := builder.AllocateInstruction().AsFabs(x).Insert(builder).Return()
 467		state.push(v)
 468	case wasm.OpcodeF32Copysign, wasm.OpcodeF64Copysign:
 469		if state.unreachable {
 470			break
 471		}
 472		y, x := state.pop(), state.pop()
 473		v := builder.AllocateInstruction().AsFcopysign(x, y).Insert(builder).Return()
 474		state.push(v)
 475
 476	case wasm.OpcodeF32Ceil, wasm.OpcodeF64Ceil:
 477		if state.unreachable {
 478			break
 479		}
 480		x := state.pop()
 481		v := builder.AllocateInstruction().AsCeil(x).Insert(builder).Return()
 482		state.push(v)
 483	case wasm.OpcodeF32Floor, wasm.OpcodeF64Floor:
 484		if state.unreachable {
 485			break
 486		}
 487		x := state.pop()
 488		v := builder.AllocateInstruction().AsFloor(x).Insert(builder).Return()
 489		state.push(v)
 490	case wasm.OpcodeF32Trunc, wasm.OpcodeF64Trunc:
 491		if state.unreachable {
 492			break
 493		}
 494		x := state.pop()
 495		v := builder.AllocateInstruction().AsTrunc(x).Insert(builder).Return()
 496		state.push(v)
 497	case wasm.OpcodeF32Nearest, wasm.OpcodeF64Nearest:
 498		if state.unreachable {
 499			break
 500		}
 501		x := state.pop()
 502		v := builder.AllocateInstruction().AsNearest(x).Insert(builder).Return()
 503		state.push(v)
 504	case wasm.OpcodeI64TruncF64S, wasm.OpcodeI64TruncF32S,
 505		wasm.OpcodeI32TruncF64S, wasm.OpcodeI32TruncF32S,
 506		wasm.OpcodeI64TruncF64U, wasm.OpcodeI64TruncF32U,
 507		wasm.OpcodeI32TruncF64U, wasm.OpcodeI32TruncF32U:
 508		if state.unreachable {
 509			break
 510		}
 511		ret := builder.AllocateInstruction().AsFcvtToInt(
 512			state.pop(),
 513			c.execCtxPtrValue,
 514			op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI32TruncF32S || op == wasm.OpcodeI32TruncF64S,
 515			op == wasm.OpcodeI64TruncF64S || op == wasm.OpcodeI64TruncF32S || op == wasm.OpcodeI64TruncF64U || op == wasm.OpcodeI64TruncF32U,
 516			false,
 517		).Insert(builder).Return()
 518		state.push(ret)
 519	case wasm.OpcodeMiscPrefix:
 520		state.pc++
 521		// A misc opcode is encoded as an unsigned variable 32-bit integer.
 522		miscOpUint, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc:])
 523		if err != nil {
 524			// In normal conditions this should never happen because the function has passed validation.
 525			panic(fmt.Sprintf("failed to read misc opcode: %v", err))
 526		}
 527		state.pc += int(num - 1)
 528		miscOp := wasm.OpcodeMisc(miscOpUint)
 529		switch miscOp {
 530		case wasm.OpcodeMiscI64TruncSatF64S, wasm.OpcodeMiscI64TruncSatF32S,
 531			wasm.OpcodeMiscI32TruncSatF64S, wasm.OpcodeMiscI32TruncSatF32S,
 532			wasm.OpcodeMiscI64TruncSatF64U, wasm.OpcodeMiscI64TruncSatF32U,
 533			wasm.OpcodeMiscI32TruncSatF64U, wasm.OpcodeMiscI32TruncSatF32U:
 534			if state.unreachable {
 535				break
 536			}
 537			ret := builder.AllocateInstruction().AsFcvtToInt(
 538				state.pop(),
 539				c.execCtxPtrValue,
 540				miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF32S || miscOp == wasm.OpcodeMiscI32TruncSatF64S,
 541				miscOp == wasm.OpcodeMiscI64TruncSatF64S || miscOp == wasm.OpcodeMiscI64TruncSatF32S || miscOp == wasm.OpcodeMiscI64TruncSatF64U || miscOp == wasm.OpcodeMiscI64TruncSatF32U,
 542				true,
 543			).Insert(builder).Return()
 544			state.push(ret)
 545
 546		case wasm.OpcodeMiscTableSize:
 547			tableIndex := c.readI32u()
 548			if state.unreachable {
 549				break
 550			}
 551
 552			// Load the table.
 553			loadTableInstancePtr := builder.AllocateInstruction()
 554			loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64)
 555			builder.InsertInstruction(loadTableInstancePtr)
 556			tableInstancePtr := loadTableInstancePtr.Return()
 557
 558			// Load the table's length.
 559			loadTableLen := builder.AllocateInstruction().
 560				AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32).
 561				Insert(builder)
 562			state.push(loadTableLen.Return())
 563
 564		case wasm.OpcodeMiscTableGrow:
 565			tableIndex := c.readI32u()
 566			if state.unreachable {
 567				break
 568			}
 569
 570			c.storeCallerModuleContext()
 571
 572			tableIndexVal := builder.AllocateInstruction().AsIconst32(tableIndex).Insert(builder).Return()
 573
 574			num := state.pop()
 575			r := state.pop()
 576
 577			tableGrowPtr := builder.AllocateInstruction().
 578				AsLoad(c.execCtxPtrValue,
 579					wazevoapi.ExecutionContextOffsetTableGrowTrampolineAddress.U32(),
 580					ssa.TypeI64,
 581				).Insert(builder).Return()
 582
 583			args := c.allocateVarLengthValues(4, c.execCtxPtrValue, tableIndexVal, num, r)
 584			callGrowRet := builder.
 585				AllocateInstruction().
 586				AsCallIndirect(tableGrowPtr, &c.tableGrowSig, args).
 587				Insert(builder).Return()
 588			state.push(callGrowRet)
 589
 590		case wasm.OpcodeMiscTableCopy:
 591			dstTableIndex := c.readI32u()
 592			srcTableIndex := c.readI32u()
 593			if state.unreachable {
 594				break
 595			}
 596
 597			copySize := builder.
 598				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 599			srcOffset := builder.
 600				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 601			dstOffset := builder.
 602				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 603
 604			// Out of bounds check.
 605			dstTableInstancePtr := c.boundsCheckInTable(dstTableIndex, dstOffset, copySize)
 606			srcTableInstancePtr := c.boundsCheckInTable(srcTableIndex, srcOffset, copySize)
 607
 608			dstTableBaseAddr := c.loadTableBaseAddr(dstTableInstancePtr)
 609			srcTableBaseAddr := c.loadTableBaseAddr(srcTableInstancePtr)
 610
 611			three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return()
 612
 613			dstOffsetInBytes := builder.AllocateInstruction().AsIshl(dstOffset, three).Insert(builder).Return()
 614			dstAddr := builder.AllocateInstruction().AsIadd(dstTableBaseAddr, dstOffsetInBytes).Insert(builder).Return()
 615			srcOffsetInBytes := builder.AllocateInstruction().AsIshl(srcOffset, three).Insert(builder).Return()
 616			srcAddr := builder.AllocateInstruction().AsIadd(srcTableBaseAddr, srcOffsetInBytes).Insert(builder).Return()
 617
 618			copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return()
 619			c.callMemmove(dstAddr, srcAddr, copySizeInBytes)
 620
 621		case wasm.OpcodeMiscMemoryCopy:
 622			state.pc += 2 // +2 to skip two memory indexes which are fixed to zero.
 623			if state.unreachable {
 624				break
 625			}
 626
 627			copySize := builder.
 628				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 629			srcOffset := builder.
 630				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 631			dstOffset := builder.
 632				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 633
 634			// Out of bounds check.
 635			memLen := c.getMemoryLenValue(false)
 636			c.boundsCheckInMemory(memLen, dstOffset, copySize)
 637			c.boundsCheckInMemory(memLen, srcOffset, copySize)
 638
 639			memBase := c.getMemoryBaseValue(false)
 640			dstAddr := builder.AllocateInstruction().AsIadd(memBase, dstOffset).Insert(builder).Return()
 641			srcAddr := builder.AllocateInstruction().AsIadd(memBase, srcOffset).Insert(builder).Return()
 642
 643			c.callMemmove(dstAddr, srcAddr, copySize)
 644
 645		case wasm.OpcodeMiscTableFill:
 646			tableIndex := c.readI32u()
 647			if state.unreachable {
 648				break
 649			}
 650			fillSize := state.pop()
 651			value := state.pop()
 652			offset := state.pop()
 653
 654			fillSizeExt := builder.
 655				AllocateInstruction().AsUExtend(fillSize, 32, 64).Insert(builder).Return()
 656			offsetExt := builder.
 657				AllocateInstruction().AsUExtend(offset, 32, 64).Insert(builder).Return()
 658			tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetExt, fillSizeExt)
 659
 660			three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return()
 661			offsetInBytes := builder.AllocateInstruction().AsIshl(offsetExt, three).Insert(builder).Return()
 662			fillSizeInBytes := builder.AllocateInstruction().AsIshl(fillSizeExt, three).Insert(builder).Return()
 663
 664			// Calculate the base address of the table.
 665			tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr)
 666			addr := builder.AllocateInstruction().AsIadd(tableBaseAddr, offsetInBytes).Insert(builder).Return()
 667
 668			// Prepare the loop and following block.
 669			beforeLoop := builder.AllocateBasicBlock()
 670			loopBlk := builder.AllocateBasicBlock()
 671			loopVar := loopBlk.AddParam(builder, ssa.TypeI64)
 672			followingBlk := builder.AllocateBasicBlock()
 673
 674			// Uses the copy trick for faster filling buffer like memory.fill, but in this case we copy 8 bytes at a time.
 675			// 	buf := memoryInst.Buffer[offset : offset+fillSize]
 676			// 	buf[0:8] = value
 677			// 	for i := 8; i < fillSize; i *= 2 { Begin with 8 bytes.
 678			// 		copy(buf[i:], buf[:i])
 679			// 	}
 680
 681			// Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics.
 682			zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return()
 683			ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSizeExt, zero, ssa.IntegerCmpCondEqual).
 684				Insert(builder).Return()
 685			builder.AllocateInstruction().AsBrnz(ifFillSizeZero, ssa.ValuesNil, followingBlk).Insert(builder)
 686			c.insertJumpToBlock(ssa.ValuesNil, beforeLoop)
 687
 688			// buf[0:8] = value
 689			builder.SetCurrentBlock(beforeLoop)
 690			builder.AllocateInstruction().AsStore(ssa.OpcodeStore, value, addr, 0).Insert(builder)
 691			initValue := builder.AllocateInstruction().AsIconst64(8).Insert(builder).Return()
 692			c.insertJumpToBlock(c.allocateVarLengthValues(1, initValue), loopBlk)
 693
 694			builder.SetCurrentBlock(loopBlk)
 695			dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return()
 696
 697			// If loopVar*2 > fillSizeInBytes, then count must be fillSizeInBytes-loopVar.
 698			var count ssa.Value
 699			{
 700				loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return()
 701				loopVarDoubledLargerThanFillSize := builder.
 702					AllocateInstruction().AsIcmp(loopVarDoubled, fillSizeInBytes, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual).
 703					Insert(builder).Return()
 704				diff := builder.AllocateInstruction().AsIsub(fillSizeInBytes, loopVar).Insert(builder).Return()
 705				count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return()
 706			}
 707
 708			c.callMemmove(dstAddr, addr, count)
 709
 710			shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return()
 711			newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return()
 712			loopVarLessThanFillSize := builder.AllocateInstruction().
 713				AsIcmp(newLoopVar, fillSizeInBytes, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return()
 714
 715			builder.AllocateInstruction().
 716				AsBrnz(loopVarLessThanFillSize, c.allocateVarLengthValues(1, newLoopVar), loopBlk).
 717				Insert(builder)
 718
 719			c.insertJumpToBlock(ssa.ValuesNil, followingBlk)
 720			builder.SetCurrentBlock(followingBlk)
 721
 722			builder.Seal(beforeLoop)
 723			builder.Seal(loopBlk)
 724			builder.Seal(followingBlk)
 725
 726		case wasm.OpcodeMiscMemoryFill:
 727			state.pc++ // Skip the memory index which is fixed to zero.
 728			if state.unreachable {
 729				break
 730			}
 731
 732			fillSize := builder.
 733				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 734			value := state.pop()
 735			offset := builder.
 736				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 737
 738			// Out of bounds check.
 739			c.boundsCheckInMemory(c.getMemoryLenValue(false), offset, fillSize)
 740
 741			// Calculate the base address:
 742			addr := builder.AllocateInstruction().AsIadd(c.getMemoryBaseValue(false), offset).Insert(builder).Return()
 743
 744			// Uses the copy trick for faster filling buffer: https://gist.github.com/taylorza/df2f89d5f9ab3ffd06865062a4cf015d
 745			// 	buf := memoryInst.Buffer[offset : offset+fillSize]
 746			// 	buf[0] = value
 747			// 	for i := 1; i < fillSize; i *= 2 {
 748			// 		copy(buf[i:], buf[:i])
 749			// 	}
 750
 751			// Prepare the loop and following block.
 752			beforeLoop := builder.AllocateBasicBlock()
 753			loopBlk := builder.AllocateBasicBlock()
 754			loopVar := loopBlk.AddParam(builder, ssa.TypeI64)
 755			followingBlk := builder.AllocateBasicBlock()
 756
 757			// Insert the jump to the beforeLoop block; If the fillSize is zero, then jump to the following block to skip entire logics.
 758			zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return()
 759			ifFillSizeZero := builder.AllocateInstruction().AsIcmp(fillSize, zero, ssa.IntegerCmpCondEqual).
 760				Insert(builder).Return()
 761			builder.AllocateInstruction().AsBrnz(ifFillSizeZero, ssa.ValuesNil, followingBlk).Insert(builder)
 762			c.insertJumpToBlock(ssa.ValuesNil, beforeLoop)
 763
 764			// buf[0] = value
 765			builder.SetCurrentBlock(beforeLoop)
 766			builder.AllocateInstruction().AsStore(ssa.OpcodeIstore8, value, addr, 0).Insert(builder)
 767			initValue := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return()
 768			c.insertJumpToBlock(c.allocateVarLengthValues(1, initValue), loopBlk)
 769
 770			builder.SetCurrentBlock(loopBlk)
 771			dstAddr := builder.AllocateInstruction().AsIadd(addr, loopVar).Insert(builder).Return()
 772
 773			// If loopVar*2 > fillSizeExt, then count must be fillSizeExt-loopVar.
 774			var count ssa.Value
 775			{
 776				loopVarDoubled := builder.AllocateInstruction().AsIadd(loopVar, loopVar).Insert(builder).Return()
 777				loopVarDoubledLargerThanFillSize := builder.
 778					AllocateInstruction().AsIcmp(loopVarDoubled, fillSize, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual).
 779					Insert(builder).Return()
 780				diff := builder.AllocateInstruction().AsIsub(fillSize, loopVar).Insert(builder).Return()
 781				count = builder.AllocateInstruction().AsSelect(loopVarDoubledLargerThanFillSize, diff, loopVar).Insert(builder).Return()
 782			}
 783
 784			c.callMemmove(dstAddr, addr, count)
 785
 786			shiftAmount := builder.AllocateInstruction().AsIconst64(1).Insert(builder).Return()
 787			newLoopVar := builder.AllocateInstruction().AsIshl(loopVar, shiftAmount).Insert(builder).Return()
 788			loopVarLessThanFillSize := builder.AllocateInstruction().
 789				AsIcmp(newLoopVar, fillSize, ssa.IntegerCmpCondUnsignedLessThan).Insert(builder).Return()
 790
 791			builder.AllocateInstruction().
 792				AsBrnz(loopVarLessThanFillSize, c.allocateVarLengthValues(1, newLoopVar), loopBlk).
 793				Insert(builder)
 794
 795			c.insertJumpToBlock(ssa.ValuesNil, followingBlk)
 796			builder.SetCurrentBlock(followingBlk)
 797
 798			builder.Seal(beforeLoop)
 799			builder.Seal(loopBlk)
 800			builder.Seal(followingBlk)
 801
 802		case wasm.OpcodeMiscMemoryInit:
 803			index := c.readI32u()
 804			state.pc++ // Skip the memory index which is fixed to zero.
 805			if state.unreachable {
 806				break
 807			}
 808
 809			copySize := builder.
 810				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 811			offsetInDataInstance := builder.
 812				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 813			offsetInMemory := builder.
 814				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 815
 816			dataInstPtr := c.dataOrElementInstanceAddr(index, c.offset.DataInstances1stElement)
 817
 818			// Bounds check.
 819			c.boundsCheckInMemory(c.getMemoryLenValue(false), offsetInMemory, copySize)
 820			c.boundsCheckInDataOrElementInstance(dataInstPtr, offsetInDataInstance, copySize, wazevoapi.ExitCodeMemoryOutOfBounds)
 821
 822			dataInstBaseAddr := builder.AllocateInstruction().AsLoad(dataInstPtr, 0, ssa.TypeI64).Insert(builder).Return()
 823			srcAddr := builder.AllocateInstruction().AsIadd(dataInstBaseAddr, offsetInDataInstance).Insert(builder).Return()
 824
 825			memBase := c.getMemoryBaseValue(false)
 826			dstAddr := builder.AllocateInstruction().AsIadd(memBase, offsetInMemory).Insert(builder).Return()
 827
 828			c.callMemmove(dstAddr, srcAddr, copySize)
 829
 830		case wasm.OpcodeMiscTableInit:
 831			elemIndex := c.readI32u()
 832			tableIndex := c.readI32u()
 833			if state.unreachable {
 834				break
 835			}
 836
 837			copySize := builder.
 838				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 839			offsetInElementInstance := builder.
 840				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 841			offsetInTable := builder.
 842				AllocateInstruction().AsUExtend(state.pop(), 32, 64).Insert(builder).Return()
 843
 844			elemInstPtr := c.dataOrElementInstanceAddr(elemIndex, c.offset.ElementInstances1stElement)
 845
 846			// Bounds check.
 847			tableInstancePtr := c.boundsCheckInTable(tableIndex, offsetInTable, copySize)
 848			c.boundsCheckInDataOrElementInstance(elemInstPtr, offsetInElementInstance, copySize, wazevoapi.ExitCodeTableOutOfBounds)
 849
 850			three := builder.AllocateInstruction().AsIconst64(3).Insert(builder).Return()
 851			// Calculates the destination address in the table.
 852			tableOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInTable, three).Insert(builder).Return()
 853			tableBaseAddr := c.loadTableBaseAddr(tableInstancePtr)
 854			dstAddr := builder.AllocateInstruction().AsIadd(tableBaseAddr, tableOffsetInBytes).Insert(builder).Return()
 855
 856			// Calculates the source address in the element instance.
 857			srcOffsetInBytes := builder.AllocateInstruction().AsIshl(offsetInElementInstance, three).Insert(builder).Return()
 858			elemInstBaseAddr := builder.AllocateInstruction().AsLoad(elemInstPtr, 0, ssa.TypeI64).Insert(builder).Return()
 859			srcAddr := builder.AllocateInstruction().AsIadd(elemInstBaseAddr, srcOffsetInBytes).Insert(builder).Return()
 860
 861			copySizeInBytes := builder.AllocateInstruction().AsIshl(copySize, three).Insert(builder).Return()
 862			c.callMemmove(dstAddr, srcAddr, copySizeInBytes)
 863
 864		case wasm.OpcodeMiscElemDrop:
 865			index := c.readI32u()
 866			if state.unreachable {
 867				break
 868			}
 869
 870			c.dropDataOrElementInstance(index, c.offset.ElementInstances1stElement)
 871
 872		case wasm.OpcodeMiscDataDrop:
 873			index := c.readI32u()
 874			if state.unreachable {
 875				break
 876			}
 877			c.dropDataOrElementInstance(index, c.offset.DataInstances1stElement)
 878
 879		default:
 880			panic("Unknown MiscOp " + wasm.MiscInstructionName(miscOp))
 881		}
 882
 883	case wasm.OpcodeI32ReinterpretF32:
 884		if state.unreachable {
 885			break
 886		}
 887		reinterpret := builder.AllocateInstruction().
 888			AsBitcast(state.pop(), ssa.TypeI32).
 889			Insert(builder).Return()
 890		state.push(reinterpret)
 891
 892	case wasm.OpcodeI64ReinterpretF64:
 893		if state.unreachable {
 894			break
 895		}
 896		reinterpret := builder.AllocateInstruction().
 897			AsBitcast(state.pop(), ssa.TypeI64).
 898			Insert(builder).Return()
 899		state.push(reinterpret)
 900
 901	case wasm.OpcodeF32ReinterpretI32:
 902		if state.unreachable {
 903			break
 904		}
 905		reinterpret := builder.AllocateInstruction().
 906			AsBitcast(state.pop(), ssa.TypeF32).
 907			Insert(builder).Return()
 908		state.push(reinterpret)
 909
 910	case wasm.OpcodeF64ReinterpretI64:
 911		if state.unreachable {
 912			break
 913		}
 914		reinterpret := builder.AllocateInstruction().
 915			AsBitcast(state.pop(), ssa.TypeF64).
 916			Insert(builder).Return()
 917		state.push(reinterpret)
 918
 919	case wasm.OpcodeI32DivS, wasm.OpcodeI64DivS:
 920		if state.unreachable {
 921			break
 922		}
 923		y, x := state.pop(), state.pop()
 924		result := builder.AllocateInstruction().AsSDiv(x, y, c.execCtxPtrValue).Insert(builder).Return()
 925		state.push(result)
 926
 927	case wasm.OpcodeI32DivU, wasm.OpcodeI64DivU:
 928		if state.unreachable {
 929			break
 930		}
 931		y, x := state.pop(), state.pop()
 932		result := builder.AllocateInstruction().AsUDiv(x, y, c.execCtxPtrValue).Insert(builder).Return()
 933		state.push(result)
 934
 935	case wasm.OpcodeI32RemS, wasm.OpcodeI64RemS:
 936		if state.unreachable {
 937			break
 938		}
 939		y, x := state.pop(), state.pop()
 940		result := builder.AllocateInstruction().AsSRem(x, y, c.execCtxPtrValue).Insert(builder).Return()
 941		state.push(result)
 942
 943	case wasm.OpcodeI32RemU, wasm.OpcodeI64RemU:
 944		if state.unreachable {
 945			break
 946		}
 947		y, x := state.pop(), state.pop()
 948		result := builder.AllocateInstruction().AsURem(x, y, c.execCtxPtrValue).Insert(builder).Return()
 949		state.push(result)
 950
 951	case wasm.OpcodeI32And, wasm.OpcodeI64And:
 952		if state.unreachable {
 953			break
 954		}
 955		y, x := state.pop(), state.pop()
 956		and := builder.AllocateInstruction()
 957		and.AsBand(x, y)
 958		builder.InsertInstruction(and)
 959		value := and.Return()
 960		state.push(value)
 961	case wasm.OpcodeI32Or, wasm.OpcodeI64Or:
 962		if state.unreachable {
 963			break
 964		}
 965		y, x := state.pop(), state.pop()
 966		or := builder.AllocateInstruction()
 967		or.AsBor(x, y)
 968		builder.InsertInstruction(or)
 969		value := or.Return()
 970		state.push(value)
 971	case wasm.OpcodeI32Xor, wasm.OpcodeI64Xor:
 972		if state.unreachable {
 973			break
 974		}
 975		y, x := state.pop(), state.pop()
 976		xor := builder.AllocateInstruction()
 977		xor.AsBxor(x, y)
 978		builder.InsertInstruction(xor)
 979		value := xor.Return()
 980		state.push(value)
 981	case wasm.OpcodeI32Shl, wasm.OpcodeI64Shl:
 982		if state.unreachable {
 983			break
 984		}
 985		y, x := state.pop(), state.pop()
 986		ishl := builder.AllocateInstruction()
 987		ishl.AsIshl(x, y)
 988		builder.InsertInstruction(ishl)
 989		value := ishl.Return()
 990		state.push(value)
 991	case wasm.OpcodeI32ShrU, wasm.OpcodeI64ShrU:
 992		if state.unreachable {
 993			break
 994		}
 995		y, x := state.pop(), state.pop()
 996		ishl := builder.AllocateInstruction()
 997		ishl.AsUshr(x, y)
 998		builder.InsertInstruction(ishl)
 999		value := ishl.Return()
1000		state.push(value)
1001	case wasm.OpcodeI32ShrS, wasm.OpcodeI64ShrS:
1002		if state.unreachable {
1003			break
1004		}
1005		y, x := state.pop(), state.pop()
1006		ishl := builder.AllocateInstruction()
1007		ishl.AsSshr(x, y)
1008		builder.InsertInstruction(ishl)
1009		value := ishl.Return()
1010		state.push(value)
1011	case wasm.OpcodeI32Rotl, wasm.OpcodeI64Rotl:
1012		if state.unreachable {
1013			break
1014		}
1015		y, x := state.pop(), state.pop()
1016		rotl := builder.AllocateInstruction()
1017		rotl.AsRotl(x, y)
1018		builder.InsertInstruction(rotl)
1019		value := rotl.Return()
1020		state.push(value)
1021	case wasm.OpcodeI32Rotr, wasm.OpcodeI64Rotr:
1022		if state.unreachable {
1023			break
1024		}
1025		y, x := state.pop(), state.pop()
1026		rotr := builder.AllocateInstruction()
1027		rotr.AsRotr(x, y)
1028		builder.InsertInstruction(rotr)
1029		value := rotr.Return()
1030		state.push(value)
1031	case wasm.OpcodeI32Clz, wasm.OpcodeI64Clz:
1032		if state.unreachable {
1033			break
1034		}
1035		x := state.pop()
1036		clz := builder.AllocateInstruction()
1037		clz.AsClz(x)
1038		builder.InsertInstruction(clz)
1039		value := clz.Return()
1040		state.push(value)
1041	case wasm.OpcodeI32Ctz, wasm.OpcodeI64Ctz:
1042		if state.unreachable {
1043			break
1044		}
1045		x := state.pop()
1046		ctz := builder.AllocateInstruction()
1047		ctz.AsCtz(x)
1048		builder.InsertInstruction(ctz)
1049		value := ctz.Return()
1050		state.push(value)
1051	case wasm.OpcodeI32Popcnt, wasm.OpcodeI64Popcnt:
1052		if state.unreachable {
1053			break
1054		}
1055		x := state.pop()
1056		popcnt := builder.AllocateInstruction()
1057		popcnt.AsPopcnt(x)
1058		builder.InsertInstruction(popcnt)
1059		value := popcnt.Return()
1060		state.push(value)
1061
1062	case wasm.OpcodeI32WrapI64:
1063		if state.unreachable {
1064			break
1065		}
1066		x := state.pop()
1067		wrap := builder.AllocateInstruction().AsIreduce(x, ssa.TypeI32).Insert(builder).Return()
1068		state.push(wrap)
1069	case wasm.OpcodeGlobalGet:
1070		index := c.readI32u()
1071		if state.unreachable {
1072			break
1073		}
1074		v := c.getWasmGlobalValue(index, false)
1075		state.push(v)
1076	case wasm.OpcodeGlobalSet:
1077		index := c.readI32u()
1078		if state.unreachable {
1079			break
1080		}
1081		v := state.pop()
1082		c.setWasmGlobalValue(index, v)
1083	case wasm.OpcodeLocalGet:
1084		index := c.readI32u()
1085		if state.unreachable {
1086			break
1087		}
1088		variable := c.localVariable(index)
1089		state.push(builder.MustFindValue(variable))
1090
1091	case wasm.OpcodeLocalSet:
1092		index := c.readI32u()
1093		if state.unreachable {
1094			break
1095		}
1096		variable := c.localVariable(index)
1097		newValue := state.pop()
1098		builder.DefineVariableInCurrentBB(variable, newValue)
1099
1100	case wasm.OpcodeLocalTee:
1101		index := c.readI32u()
1102		if state.unreachable {
1103			break
1104		}
1105		variable := c.localVariable(index)
1106		newValue := state.peek()
1107		builder.DefineVariableInCurrentBB(variable, newValue)
1108
1109	case wasm.OpcodeSelect, wasm.OpcodeTypedSelect:
1110		if op == wasm.OpcodeTypedSelect {
1111			state.pc += 2 // ignores the type which is only needed during validation.
1112		}
1113
1114		if state.unreachable {
1115			break
1116		}
1117
1118		cond := state.pop()
1119		v2 := state.pop()
1120		v1 := state.pop()
1121
1122		sl := builder.AllocateInstruction().
1123			AsSelect(cond, v1, v2).
1124			Insert(builder).
1125			Return()
1126		state.push(sl)
1127
1128	case wasm.OpcodeMemorySize:
1129		state.pc++ // skips the memory index.
1130		if state.unreachable {
1131			break
1132		}
1133
1134		var memSizeInBytes ssa.Value
1135		if c.offset.LocalMemoryBegin < 0 {
1136			memInstPtr := builder.AllocateInstruction().
1137				AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64).
1138				Insert(builder).
1139				Return()
1140
1141			memSizeInBytes = builder.AllocateInstruction().
1142				AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI32).
1143				Insert(builder).
1144				Return()
1145		} else {
1146			memSizeInBytes = builder.AllocateInstruction().
1147				AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), ssa.TypeI32).
1148				Insert(builder).
1149				Return()
1150		}
1151
1152		amount := builder.AllocateInstruction()
1153		amount.AsIconst32(uint32(wasm.MemoryPageSizeInBits))
1154		builder.InsertInstruction(amount)
1155		memSize := builder.AllocateInstruction().
1156			AsUshr(memSizeInBytes, amount.Return()).
1157			Insert(builder).
1158			Return()
1159		state.push(memSize)
1160
1161	case wasm.OpcodeMemoryGrow:
1162		state.pc++ // skips the memory index.
1163		if state.unreachable {
1164			break
1165		}
1166
1167		c.storeCallerModuleContext()
1168
1169		pages := state.pop()
1170		memoryGrowPtr := builder.AllocateInstruction().
1171			AsLoad(c.execCtxPtrValue,
1172				wazevoapi.ExecutionContextOffsetMemoryGrowTrampolineAddress.U32(),
1173				ssa.TypeI64,
1174			).Insert(builder).Return()
1175
1176		args := c.allocateVarLengthValues(1, c.execCtxPtrValue, pages)
1177		callGrowRet := builder.
1178			AllocateInstruction().
1179			AsCallIndirect(memoryGrowPtr, &c.memoryGrowSig, args).
1180			Insert(builder).Return()
1181		state.push(callGrowRet)
1182
1183		// After the memory grow, reload the cached memory base and len.
1184		c.reloadMemoryBaseLen()
1185
1186	case wasm.OpcodeI32Store,
1187		wasm.OpcodeI64Store,
1188		wasm.OpcodeF32Store,
1189		wasm.OpcodeF64Store,
1190		wasm.OpcodeI32Store8,
1191		wasm.OpcodeI32Store16,
1192		wasm.OpcodeI64Store8,
1193		wasm.OpcodeI64Store16,
1194		wasm.OpcodeI64Store32:
1195
1196		_, offset := c.readMemArg()
1197		if state.unreachable {
1198			break
1199		}
1200		var opSize uint64
1201		var opcode ssa.Opcode
1202		switch op {
1203		case wasm.OpcodeI32Store, wasm.OpcodeF32Store:
1204			opcode = ssa.OpcodeStore
1205			opSize = 4
1206		case wasm.OpcodeI64Store, wasm.OpcodeF64Store:
1207			opcode = ssa.OpcodeStore
1208			opSize = 8
1209		case wasm.OpcodeI32Store8, wasm.OpcodeI64Store8:
1210			opcode = ssa.OpcodeIstore8
1211			opSize = 1
1212		case wasm.OpcodeI32Store16, wasm.OpcodeI64Store16:
1213			opcode = ssa.OpcodeIstore16
1214			opSize = 2
1215		case wasm.OpcodeI64Store32:
1216			opcode = ssa.OpcodeIstore32
1217			opSize = 4
1218		default:
1219			panic("BUG")
1220		}
1221
1222		value := state.pop()
1223		baseAddr := state.pop()
1224		addr := c.memOpSetup(baseAddr, uint64(offset), opSize)
1225		builder.AllocateInstruction().
1226			AsStore(opcode, value, addr, offset).
1227			Insert(builder)
1228
1229	case wasm.OpcodeI32Load,
1230		wasm.OpcodeI64Load,
1231		wasm.OpcodeF32Load,
1232		wasm.OpcodeF64Load,
1233		wasm.OpcodeI32Load8S,
1234		wasm.OpcodeI32Load8U,
1235		wasm.OpcodeI32Load16S,
1236		wasm.OpcodeI32Load16U,
1237		wasm.OpcodeI64Load8S,
1238		wasm.OpcodeI64Load8U,
1239		wasm.OpcodeI64Load16S,
1240		wasm.OpcodeI64Load16U,
1241		wasm.OpcodeI64Load32S,
1242		wasm.OpcodeI64Load32U:
1243		_, offset := c.readMemArg()
1244		if state.unreachable {
1245			break
1246		}
1247
1248		var opSize uint64
1249		switch op {
1250		case wasm.OpcodeI32Load, wasm.OpcodeF32Load:
1251			opSize = 4
1252		case wasm.OpcodeI64Load, wasm.OpcodeF64Load:
1253			opSize = 8
1254		case wasm.OpcodeI32Load8S, wasm.OpcodeI32Load8U:
1255			opSize = 1
1256		case wasm.OpcodeI32Load16S, wasm.OpcodeI32Load16U:
1257			opSize = 2
1258		case wasm.OpcodeI64Load8S, wasm.OpcodeI64Load8U:
1259			opSize = 1
1260		case wasm.OpcodeI64Load16S, wasm.OpcodeI64Load16U:
1261			opSize = 2
1262		case wasm.OpcodeI64Load32S, wasm.OpcodeI64Load32U:
1263			opSize = 4
1264		default:
1265			panic("BUG")
1266		}
1267
1268		baseAddr := state.pop()
1269		addr := c.memOpSetup(baseAddr, uint64(offset), opSize)
1270		load := builder.AllocateInstruction()
1271		switch op {
1272		case wasm.OpcodeI32Load:
1273			load.AsLoad(addr, offset, ssa.TypeI32)
1274		case wasm.OpcodeI64Load:
1275			load.AsLoad(addr, offset, ssa.TypeI64)
1276		case wasm.OpcodeF32Load:
1277			load.AsLoad(addr, offset, ssa.TypeF32)
1278		case wasm.OpcodeF64Load:
1279			load.AsLoad(addr, offset, ssa.TypeF64)
1280		case wasm.OpcodeI32Load8S:
1281			load.AsExtLoad(ssa.OpcodeSload8, addr, offset, false)
1282		case wasm.OpcodeI32Load8U:
1283			load.AsExtLoad(ssa.OpcodeUload8, addr, offset, false)
1284		case wasm.OpcodeI32Load16S:
1285			load.AsExtLoad(ssa.OpcodeSload16, addr, offset, false)
1286		case wasm.OpcodeI32Load16U:
1287			load.AsExtLoad(ssa.OpcodeUload16, addr, offset, false)
1288		case wasm.OpcodeI64Load8S:
1289			load.AsExtLoad(ssa.OpcodeSload8, addr, offset, true)
1290		case wasm.OpcodeI64Load8U:
1291			load.AsExtLoad(ssa.OpcodeUload8, addr, offset, true)
1292		case wasm.OpcodeI64Load16S:
1293			load.AsExtLoad(ssa.OpcodeSload16, addr, offset, true)
1294		case wasm.OpcodeI64Load16U:
1295			load.AsExtLoad(ssa.OpcodeUload16, addr, offset, true)
1296		case wasm.OpcodeI64Load32S:
1297			load.AsExtLoad(ssa.OpcodeSload32, addr, offset, true)
1298		case wasm.OpcodeI64Load32U:
1299			load.AsExtLoad(ssa.OpcodeUload32, addr, offset, true)
1300		default:
1301			panic("BUG")
1302		}
1303		builder.InsertInstruction(load)
1304		state.push(load.Return())
1305	case wasm.OpcodeBlock:
1306		// Note: we do not need to create a BB for this as that would always have only one predecessor
1307		// which is the current BB, and therefore it's always ok to merge them in any way.
1308
1309		bt := c.readBlockType()
1310
1311		if state.unreachable {
1312			state.unreachableDepth++
1313			break
1314		}
1315
1316		followingBlk := builder.AllocateBasicBlock()
1317		c.addBlockParamsFromWasmTypes(bt.Results, followingBlk)
1318
1319		state.ctrlPush(controlFrame{
1320			kind:                         controlFrameKindBlock,
1321			originalStackLenWithoutParam: len(state.values) - len(bt.Params),
1322			followingBlock:               followingBlk,
1323			blockType:                    bt,
1324		})
1325	case wasm.OpcodeLoop:
1326		bt := c.readBlockType()
1327
1328		if state.unreachable {
1329			state.unreachableDepth++
1330			break
1331		}
1332
1333		loopHeader, afterLoopBlock := builder.AllocateBasicBlock(), builder.AllocateBasicBlock()
1334		c.addBlockParamsFromWasmTypes(bt.Params, loopHeader)
1335		c.addBlockParamsFromWasmTypes(bt.Results, afterLoopBlock)
1336
1337		originalLen := len(state.values) - len(bt.Params)
1338		state.ctrlPush(controlFrame{
1339			originalStackLenWithoutParam: originalLen,
1340			kind:                         controlFrameKindLoop,
1341			blk:                          loopHeader,
1342			followingBlock:               afterLoopBlock,
1343			blockType:                    bt,
1344		})
1345
1346		args := c.allocateVarLengthValues(originalLen)
1347		args = args.Append(builder.VarLengthPool(), state.values[originalLen:]...)
1348
1349		// Insert the jump to the header of loop.
1350		br := builder.AllocateInstruction()
1351		br.AsJump(args, loopHeader)
1352		builder.InsertInstruction(br)
1353
1354		c.switchTo(originalLen, loopHeader)
1355
1356		if c.ensureTermination {
1357			checkModuleExitCodePtr := builder.AllocateInstruction().
1358				AsLoad(c.execCtxPtrValue,
1359					wazevoapi.ExecutionContextOffsetCheckModuleExitCodeTrampolineAddress.U32(),
1360					ssa.TypeI64,
1361				).Insert(builder).Return()
1362
1363			args := c.allocateVarLengthValues(1, c.execCtxPtrValue)
1364			builder.AllocateInstruction().
1365				AsCallIndirect(checkModuleExitCodePtr, &c.checkModuleExitCodeSig, args).
1366				Insert(builder)
1367		}
1368	case wasm.OpcodeIf:
1369		bt := c.readBlockType()
1370
1371		if state.unreachable {
1372			state.unreachableDepth++
1373			break
1374		}
1375
1376		v := state.pop()
1377		thenBlk, elseBlk, followingBlk := builder.AllocateBasicBlock(), builder.AllocateBasicBlock(), builder.AllocateBasicBlock()
1378
1379		// We do not make the Wasm-level block parameters as SSA-level block params for if-else blocks
1380		// since they won't be PHI and the definition is unique.
1381
1382		// On the other hand, the following block after if-else-end will likely have
1383		// multiple definitions (one in Then and another in Else blocks).
1384		c.addBlockParamsFromWasmTypes(bt.Results, followingBlk)
1385
1386		args := c.allocateVarLengthValues(len(bt.Params))
1387		args = args.Append(builder.VarLengthPool(), state.values[len(state.values)-len(bt.Params):]...)
1388
1389		// Insert the conditional jump to the Else block.
1390		brz := builder.AllocateInstruction()
1391		brz.AsBrz(v, ssa.ValuesNil, elseBlk)
1392		builder.InsertInstruction(brz)
1393
1394		// Then, insert the jump to the Then block.
1395		br := builder.AllocateInstruction()
1396		br.AsJump(ssa.ValuesNil, thenBlk)
1397		builder.InsertInstruction(br)
1398
1399		state.ctrlPush(controlFrame{
1400			kind:                         controlFrameKindIfWithoutElse,
1401			originalStackLenWithoutParam: len(state.values) - len(bt.Params),
1402			blk:                          elseBlk,
1403			followingBlock:               followingBlk,
1404			blockType:                    bt,
1405			clonedArgs:                   args,
1406		})
1407
1408		builder.SetCurrentBlock(thenBlk)
1409
1410		// Then and Else (if exists) have only one predecessor.
1411		builder.Seal(thenBlk)
1412		builder.Seal(elseBlk)
1413	case wasm.OpcodeElse:
1414		ifctrl := state.ctrlPeekAt(0)
1415		if unreachable := state.unreachable; unreachable && state.unreachableDepth > 0 {
1416			// If it is currently in unreachable and is a nested if,
1417			// we just remove the entire else block.
1418			break
1419		}
1420
1421		ifctrl.kind = controlFrameKindIfWithElse
1422		if !state.unreachable {
1423			// If this Then block is currently reachable, we have to insert the branching to the following BB.
1424			followingBlk := ifctrl.followingBlock // == the BB after if-then-else.
1425			args := c.nPeekDup(len(ifctrl.blockType.Results))
1426			c.insertJumpToBlock(args, followingBlk)
1427		} else {
1428			state.unreachable = false
1429		}
1430
1431		// Reset the stack so that we can correctly handle the else block.
1432		state.values = state.values[:ifctrl.originalStackLenWithoutParam]
1433		elseBlk := ifctrl.blk
1434		for _, arg := range ifctrl.clonedArgs.View() {
1435			state.push(arg)
1436		}
1437
1438		builder.SetCurrentBlock(elseBlk)
1439
1440	case wasm.OpcodeEnd:
1441		if state.unreachableDepth > 0 {
1442			state.unreachableDepth--
1443			break
1444		}
1445
1446		ctrl := state.ctrlPop()
1447		followingBlk := ctrl.followingBlock
1448
1449		unreachable := state.unreachable
1450		if !unreachable {
1451			// Top n-th args will be used as a result of the current control frame.
1452			args := c.nPeekDup(len(ctrl.blockType.Results))
1453
1454			// Insert the unconditional branch to the target.
1455			c.insertJumpToBlock(args, followingBlk)
1456		} else { // recover from the unreachable state.
1457			state.unreachable = false
1458		}
1459
1460		switch ctrl.kind {
1461		case controlFrameKindFunction:
1462			break // This is the very end of function.
1463		case controlFrameKindLoop:
1464			// Loop header block can be reached from any br/br_table contained in the loop,
1465			// so now that we've reached End of it, we can seal it.
1466			builder.Seal(ctrl.blk)
1467		case controlFrameKindIfWithoutElse:
1468			// If this is the end of Then block, we have to emit the empty Else block.
1469			elseBlk := ctrl.blk
1470			builder.SetCurrentBlock(elseBlk)
1471			c.insertJumpToBlock(ctrl.clonedArgs, followingBlk)
1472		}
1473
1474		builder.Seal(followingBlk)
1475
1476		// Ready to start translating the following block.
1477		c.switchTo(ctrl.originalStackLenWithoutParam, followingBlk)
1478
1479	case wasm.OpcodeBr:
1480		labelIndex := c.readI32u()
1481		if state.unreachable {
1482			break
1483		}
1484
1485		targetBlk, argNum := state.brTargetArgNumFor(labelIndex)
1486		args := c.nPeekDup(argNum)
1487		c.insertJumpToBlock(args, targetBlk)
1488
1489		state.unreachable = true
1490
1491	case wasm.OpcodeBrIf:
1492		labelIndex := c.readI32u()
1493		if state.unreachable {
1494			break
1495		}
1496
1497		v := state.pop()
1498
1499		targetBlk, argNum := state.brTargetArgNumFor(labelIndex)
1500		args := c.nPeekDup(argNum)
1501		var sealTargetBlk bool
1502		if c.needListener && targetBlk.ReturnBlock() { // In this case, we have to call the listener before returning.
1503			// Save the currently active block.
1504			current := builder.CurrentBlock()
1505
1506			// Allocate the trampoline block to the return where we call the listener.
1507			targetBlk = builder.AllocateBasicBlock()
1508			builder.SetCurrentBlock(targetBlk)
1509			sealTargetBlk = true
1510
1511			c.callListenerAfter()
1512
1513			instr := builder.AllocateInstruction()
1514			instr.AsReturn(args)
1515			builder.InsertInstruction(instr)
1516
1517			args = ssa.ValuesNil
1518
1519			// Revert the current block.
1520			builder.SetCurrentBlock(current)
1521		}
1522
1523		// Insert the conditional jump to the target block.
1524		brnz := builder.AllocateInstruction()
1525		brnz.AsBrnz(v, args, targetBlk)
1526		builder.InsertInstruction(brnz)
1527
1528		if sealTargetBlk {
1529			builder.Seal(targetBlk)
1530		}
1531
1532		// Insert the unconditional jump to the Else block which corresponds to after br_if.
1533		elseBlk := builder.AllocateBasicBlock()
1534		c.insertJumpToBlock(ssa.ValuesNil, elseBlk)
1535
1536		// Now start translating the instructions after br_if.
1537		builder.Seal(elseBlk) // Else of br_if has the current block as the only one successor.
1538		builder.SetCurrentBlock(elseBlk)
1539
1540	case wasm.OpcodeBrTable:
1541		labels := state.tmpForBrTable[:0]
1542		labelCount := c.readI32u()
1543		for i := 0; i < int(labelCount); i++ {
1544			labels = append(labels, c.readI32u())
1545		}
1546		labels = append(labels, c.readI32u()) // default label.
1547		if state.unreachable {
1548			break
1549		}
1550
1551		index := state.pop()
1552		if labelCount == 0 { // If this br_table is empty, we can just emit the unconditional jump.
1553			targetBlk, argNum := state.brTargetArgNumFor(labels[0])
1554			args := c.nPeekDup(argNum)
1555			c.insertJumpToBlock(args, targetBlk)
1556		} else {
1557			c.lowerBrTable(labels, index)
1558		}
1559		state.tmpForBrTable = labels // reuse the temporary slice for next use.
1560		state.unreachable = true
1561
1562	case wasm.OpcodeNop:
1563	case wasm.OpcodeReturn:
1564		if state.unreachable {
1565			break
1566		}
1567		if c.needListener {
1568			c.callListenerAfter()
1569		}
1570
1571		results := c.nPeekDup(c.results())
1572		instr := builder.AllocateInstruction()
1573
1574		instr.AsReturn(results)
1575		builder.InsertInstruction(instr)
1576		state.unreachable = true
1577
1578	case wasm.OpcodeUnreachable:
1579		if state.unreachable {
1580			break
1581		}
1582		exit := builder.AllocateInstruction()
1583		exit.AsExitWithCode(c.execCtxPtrValue, wazevoapi.ExitCodeUnreachable)
1584		builder.InsertInstruction(exit)
1585		state.unreachable = true
1586
1587	case wasm.OpcodeCallIndirect:
1588		typeIndex := c.readI32u()
1589		tableIndex := c.readI32u()
1590		if state.unreachable {
1591			break
1592		}
1593		c.lowerCallIndirect(typeIndex, tableIndex)
1594
1595	case wasm.OpcodeCall:
1596		fnIndex := c.readI32u()
1597		if state.unreachable {
1598			break
1599		}
1600
1601		var typIndex wasm.Index
1602		if fnIndex < c.m.ImportFunctionCount {
1603			// Before transfer the control to the callee, we have to store the current module's moduleContextPtr
1604			// into execContext.callerModuleContextPtr in case when the callee is a Go function.
1605			c.storeCallerModuleContext()
1606			var fi int
1607			for i := range c.m.ImportSection {
1608				imp := &c.m.ImportSection[i]
1609				if imp.Type == wasm.ExternTypeFunc {
1610					if fi == int(fnIndex) {
1611						typIndex = imp.DescFunc
1612						break
1613					}
1614					fi++
1615				}
1616			}
1617		} else {
1618			typIndex = c.m.FunctionSection[fnIndex-c.m.ImportFunctionCount]
1619		}
1620		typ := &c.m.TypeSection[typIndex]
1621
1622		argN := len(typ.Params)
1623		tail := len(state.values) - argN
1624		vs := state.values[tail:]
1625		state.values = state.values[:tail]
1626		args := c.allocateVarLengthValues(2+len(vs), c.execCtxPtrValue)
1627
1628		sig := c.signatures[typ]
1629		call := builder.AllocateInstruction()
1630		if fnIndex >= c.m.ImportFunctionCount {
1631			args = args.Append(builder.VarLengthPool(), c.moduleCtxPtrValue) // This case the callee module is itself.
1632			args = args.Append(builder.VarLengthPool(), vs...)
1633			call.AsCall(FunctionIndexToFuncRef(fnIndex), sig, args)
1634			builder.InsertInstruction(call)
1635		} else {
1636			// This case we have to read the address of the imported function from the module context.
1637			moduleCtx := c.moduleCtxPtrValue
1638			loadFuncPtr, loadModuleCtxPtr := builder.AllocateInstruction(), builder.AllocateInstruction()
1639			funcPtrOffset, moduleCtxPtrOffset, _ := c.offset.ImportedFunctionOffset(fnIndex)
1640			loadFuncPtr.AsLoad(moduleCtx, funcPtrOffset.U32(), ssa.TypeI64)
1641			loadModuleCtxPtr.AsLoad(moduleCtx, moduleCtxPtrOffset.U32(), ssa.TypeI64)
1642			builder.InsertInstruction(loadFuncPtr)
1643			builder.InsertInstruction(loadModuleCtxPtr)
1644
1645			args = args.Append(builder.VarLengthPool(), loadModuleCtxPtr.Return())
1646			args = args.Append(builder.VarLengthPool(), vs...)
1647			call.AsCallIndirect(loadFuncPtr.Return(), sig, args)
1648			builder.InsertInstruction(call)
1649		}
1650
1651		first, rest := call.Returns()
1652		if first.Valid() {
1653			state.push(first)
1654		}
1655		for _, v := range rest {
1656			state.push(v)
1657		}
1658
1659		c.reloadAfterCall()
1660
1661	case wasm.OpcodeDrop:
1662		if state.unreachable {
1663			break
1664		}
1665		_ = state.pop()
1666	case wasm.OpcodeF64ConvertI32S, wasm.OpcodeF64ConvertI64S, wasm.OpcodeF64ConvertI32U, wasm.OpcodeF64ConvertI64U:
1667		if state.unreachable {
1668			break
1669		}
1670		result := builder.AllocateInstruction().AsFcvtFromInt(
1671			state.pop(),
1672			op == wasm.OpcodeF64ConvertI32S || op == wasm.OpcodeF64ConvertI64S,
1673			true,
1674		).Insert(builder).Return()
1675		state.push(result)
1676	case wasm.OpcodeF32ConvertI32S, wasm.OpcodeF32ConvertI64S, wasm.OpcodeF32ConvertI32U, wasm.OpcodeF32ConvertI64U:
1677		if state.unreachable {
1678			break
1679		}
1680		result := builder.AllocateInstruction().AsFcvtFromInt(
1681			state.pop(),
1682			op == wasm.OpcodeF32ConvertI32S || op == wasm.OpcodeF32ConvertI64S,
1683			false,
1684		).Insert(builder).Return()
1685		state.push(result)
1686	case wasm.OpcodeF32DemoteF64:
1687		if state.unreachable {
1688			break
1689		}
1690		cvt := builder.AllocateInstruction()
1691		cvt.AsFdemote(state.pop())
1692		builder.InsertInstruction(cvt)
1693		state.push(cvt.Return())
1694	case wasm.OpcodeF64PromoteF32:
1695		if state.unreachable {
1696			break
1697		}
1698		cvt := builder.AllocateInstruction()
1699		cvt.AsFpromote(state.pop())
1700		builder.InsertInstruction(cvt)
1701		state.push(cvt.Return())
1702
1703	case wasm.OpcodeVecPrefix:
1704		state.pc++
1705		vecOp := c.wasmFunctionBody[state.pc]
1706		switch vecOp {
1707		case wasm.OpcodeVecV128Const:
1708			state.pc++
1709			lo := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:])
1710			state.pc += 8
1711			hi := binary.LittleEndian.Uint64(c.wasmFunctionBody[state.pc:])
1712			state.pc += 7
1713			if state.unreachable {
1714				break
1715			}
1716			ret := builder.AllocateInstruction().AsVconst(lo, hi).Insert(builder).Return()
1717			state.push(ret)
1718		case wasm.OpcodeVecV128Load:
1719			_, offset := c.readMemArg()
1720			if state.unreachable {
1721				break
1722			}
1723			baseAddr := state.pop()
1724			addr := c.memOpSetup(baseAddr, uint64(offset), 16)
1725			load := builder.AllocateInstruction()
1726			load.AsLoad(addr, offset, ssa.TypeV128)
1727			builder.InsertInstruction(load)
1728			state.push(load.Return())
1729		case wasm.OpcodeVecV128Load8Lane, wasm.OpcodeVecV128Load16Lane, wasm.OpcodeVecV128Load32Lane:
1730			_, offset := c.readMemArg()
1731			state.pc++
1732			if state.unreachable {
1733				break
1734			}
1735			var lane ssa.VecLane
1736			var loadOp ssa.Opcode
1737			var opSize uint64
1738			switch vecOp {
1739			case wasm.OpcodeVecV128Load8Lane:
1740				loadOp, lane, opSize = ssa.OpcodeUload8, ssa.VecLaneI8x16, 1
1741			case wasm.OpcodeVecV128Load16Lane:
1742				loadOp, lane, opSize = ssa.OpcodeUload16, ssa.VecLaneI16x8, 2
1743			case wasm.OpcodeVecV128Load32Lane:
1744				loadOp, lane, opSize = ssa.OpcodeUload32, ssa.VecLaneI32x4, 4
1745			}
1746			laneIndex := c.wasmFunctionBody[state.pc]
1747			vector := state.pop()
1748			baseAddr := state.pop()
1749			addr := c.memOpSetup(baseAddr, uint64(offset), opSize)
1750			load := builder.AllocateInstruction().
1751				AsExtLoad(loadOp, addr, offset, false).
1752				Insert(builder).Return()
1753			ret := builder.AllocateInstruction().
1754				AsInsertlane(vector, load, laneIndex, lane).
1755				Insert(builder).Return()
1756			state.push(ret)
1757		case wasm.OpcodeVecV128Load64Lane:
1758			_, offset := c.readMemArg()
1759			state.pc++
1760			if state.unreachable {
1761				break
1762			}
1763			laneIndex := c.wasmFunctionBody[state.pc]
1764			vector := state.pop()
1765			baseAddr := state.pop()
1766			addr := c.memOpSetup(baseAddr, uint64(offset), 8)
1767			load := builder.AllocateInstruction().
1768				AsLoad(addr, offset, ssa.TypeI64).
1769				Insert(builder).Return()
1770			ret := builder.AllocateInstruction().
1771				AsInsertlane(vector, load, laneIndex, ssa.VecLaneI64x2).
1772				Insert(builder).Return()
1773			state.push(ret)
1774
1775		case wasm.OpcodeVecV128Load32zero, wasm.OpcodeVecV128Load64zero:
1776			_, offset := c.readMemArg()
1777			if state.unreachable {
1778				break
1779			}
1780
1781			var scalarType ssa.Type
1782			switch vecOp {
1783			case wasm.OpcodeVecV128Load32zero:
1784				scalarType = ssa.TypeF32
1785			case wasm.OpcodeVecV128Load64zero:
1786				scalarType = ssa.TypeF64
1787			}
1788
1789			baseAddr := state.pop()
1790			addr := c.memOpSetup(baseAddr, uint64(offset), uint64(scalarType.Size()))
1791
1792			ret := builder.AllocateInstruction().
1793				AsVZeroExtLoad(addr, offset, scalarType).
1794				Insert(builder).Return()
1795			state.push(ret)
1796
1797		case wasm.OpcodeVecV128Load8x8u, wasm.OpcodeVecV128Load8x8s,
1798			wasm.OpcodeVecV128Load16x4u, wasm.OpcodeVecV128Load16x4s,
1799			wasm.OpcodeVecV128Load32x2u, wasm.OpcodeVecV128Load32x2s:
1800			_, offset := c.readMemArg()
1801			if state.unreachable {
1802				break
1803			}
1804			var lane ssa.VecLane
1805			var signed bool
1806			switch vecOp {
1807			case wasm.OpcodeVecV128Load8x8s:
1808				signed = true
1809				fallthrough
1810			case wasm.OpcodeVecV128Load8x8u:
1811				lane = ssa.VecLaneI8x16
1812			case wasm.OpcodeVecV128Load16x4s:
1813				signed = true
1814				fallthrough
1815			case wasm.OpcodeVecV128Load16x4u:
1816				lane = ssa.VecLaneI16x8
1817			case wasm.OpcodeVecV128Load32x2s:
1818				signed = true
1819				fallthrough
1820			case wasm.OpcodeVecV128Load32x2u:
1821				lane = ssa.VecLaneI32x4
1822			}
1823			baseAddr := state.pop()
1824			addr := c.memOpSetup(baseAddr, uint64(offset), 8)
1825			load := builder.AllocateInstruction().
1826				AsLoad(addr, offset, ssa.TypeF64).
1827				Insert(builder).Return()
1828			ret := builder.AllocateInstruction().
1829				AsWiden(load, lane, signed, true).
1830				Insert(builder).Return()
1831			state.push(ret)
1832		case wasm.OpcodeVecV128Load8Splat, wasm.OpcodeVecV128Load16Splat,
1833			wasm.OpcodeVecV128Load32Splat, wasm.OpcodeVecV128Load64Splat:
1834			_, offset := c.readMemArg()
1835			if state.unreachable {
1836				break
1837			}
1838			var lane ssa.VecLane
1839			var opSize uint64
1840			switch vecOp {
1841			case wasm.OpcodeVecV128Load8Splat:
1842				lane, opSize = ssa.VecLaneI8x16, 1
1843			case wasm.OpcodeVecV128Load16Splat:
1844				lane, opSize = ssa.VecLaneI16x8, 2
1845			case wasm.OpcodeVecV128Load32Splat:
1846				lane, opSize = ssa.VecLaneI32x4, 4
1847			case wasm.OpcodeVecV128Load64Splat:
1848				lane, opSize = ssa.VecLaneI64x2, 8
1849			}
1850			baseAddr := state.pop()
1851			addr := c.memOpSetup(baseAddr, uint64(offset), opSize)
1852			ret := builder.AllocateInstruction().
1853				AsLoadSplat(addr, offset, lane).
1854				Insert(builder).Return()
1855			state.push(ret)
1856		case wasm.OpcodeVecV128Store:
1857			_, offset := c.readMemArg()
1858			if state.unreachable {
1859				break
1860			}
1861			value := state.pop()
1862			baseAddr := state.pop()
1863			addr := c.memOpSetup(baseAddr, uint64(offset), 16)
1864			builder.AllocateInstruction().
1865				AsStore(ssa.OpcodeStore, value, addr, offset).
1866				Insert(builder)
1867		case wasm.OpcodeVecV128Store8Lane, wasm.OpcodeVecV128Store16Lane,
1868			wasm.OpcodeVecV128Store32Lane, wasm.OpcodeVecV128Store64Lane:
1869			_, offset := c.readMemArg()
1870			state.pc++
1871			if state.unreachable {
1872				break
1873			}
1874			laneIndex := c.wasmFunctionBody[state.pc]
1875			var storeOp ssa.Opcode
1876			var lane ssa.VecLane
1877			var opSize uint64
1878			switch vecOp {
1879			case wasm.OpcodeVecV128Store8Lane:
1880				storeOp, lane, opSize = ssa.OpcodeIstore8, ssa.VecLaneI8x16, 1
1881			case wasm.OpcodeVecV128Store16Lane:
1882				storeOp, lane, opSize = ssa.OpcodeIstore16, ssa.VecLaneI16x8, 2
1883			case wasm.OpcodeVecV128Store32Lane:
1884				storeOp, lane, opSize = ssa.OpcodeIstore32, ssa.VecLaneI32x4, 4
1885			case wasm.OpcodeVecV128Store64Lane:
1886				storeOp, lane, opSize = ssa.OpcodeStore, ssa.VecLaneI64x2, 8
1887			}
1888			vector := state.pop()
1889			baseAddr := state.pop()
1890			addr := c.memOpSetup(baseAddr, uint64(offset), opSize)
1891			value := builder.AllocateInstruction().
1892				AsExtractlane(vector, laneIndex, lane, false).
1893				Insert(builder).Return()
1894			builder.AllocateInstruction().
1895				AsStore(storeOp, value, addr, offset).
1896				Insert(builder)
1897		case wasm.OpcodeVecV128Not:
1898			if state.unreachable {
1899				break
1900			}
1901			v1 := state.pop()
1902			ret := builder.AllocateInstruction().AsVbnot(v1).Insert(builder).Return()
1903			state.push(ret)
1904		case wasm.OpcodeVecV128And:
1905			if state.unreachable {
1906				break
1907			}
1908			v2 := state.pop()
1909			v1 := state.pop()
1910			ret := builder.AllocateInstruction().AsVband(v1, v2).Insert(builder).Return()
1911			state.push(ret)
1912		case wasm.OpcodeVecV128AndNot:
1913			if state.unreachable {
1914				break
1915			}
1916			v2 := state.pop()
1917			v1 := state.pop()
1918			ret := builder.AllocateInstruction().AsVbandnot(v1, v2).Insert(builder).Return()
1919			state.push(ret)
1920		case wasm.OpcodeVecV128Or:
1921			if state.unreachable {
1922				break
1923			}
1924			v2 := state.pop()
1925			v1 := state.pop()
1926			ret := builder.AllocateInstruction().AsVbor(v1, v2).Insert(builder).Return()
1927			state.push(ret)
1928		case wasm.OpcodeVecV128Xor:
1929			if state.unreachable {
1930				break
1931			}
1932			v2 := state.pop()
1933			v1 := state.pop()
1934			ret := builder.AllocateInstruction().AsVbxor(v1, v2).Insert(builder).Return()
1935			state.push(ret)
1936		case wasm.OpcodeVecV128Bitselect:
1937			if state.unreachable {
1938				break
1939			}
1940			c := state.pop()
1941			v2 := state.pop()
1942			v1 := state.pop()
1943			ret := builder.AllocateInstruction().AsVbitselect(c, v1, v2).Insert(builder).Return()
1944			state.push(ret)
1945		case wasm.OpcodeVecV128AnyTrue:
1946			if state.unreachable {
1947				break
1948			}
1949			v1 := state.pop()
1950			ret := builder.AllocateInstruction().AsVanyTrue(v1).Insert(builder).Return()
1951			state.push(ret)
1952		case wasm.OpcodeVecI8x16AllTrue, wasm.OpcodeVecI16x8AllTrue, wasm.OpcodeVecI32x4AllTrue, wasm.OpcodeVecI64x2AllTrue:
1953			if state.unreachable {
1954				break
1955			}
1956			var lane ssa.VecLane
1957			switch vecOp {
1958			case wasm.OpcodeVecI8x16AllTrue:
1959				lane = ssa.VecLaneI8x16
1960			case wasm.OpcodeVecI16x8AllTrue:
1961				lane = ssa.VecLaneI16x8
1962			case wasm.OpcodeVecI32x4AllTrue:
1963				lane = ssa.VecLaneI32x4
1964			case wasm.OpcodeVecI64x2AllTrue:
1965				lane = ssa.VecLaneI64x2
1966			}
1967			v1 := state.pop()
1968			ret := builder.AllocateInstruction().AsVallTrue(v1, lane).Insert(builder).Return()
1969			state.push(ret)
1970		case wasm.OpcodeVecI8x16BitMask, wasm.OpcodeVecI16x8BitMask, wasm.OpcodeVecI32x4BitMask, wasm.OpcodeVecI64x2BitMask:
1971			if state.unreachable {
1972				break
1973			}
1974			var lane ssa.VecLane
1975			switch vecOp {
1976			case wasm.OpcodeVecI8x16BitMask:
1977				lane = ssa.VecLaneI8x16
1978			case wasm.OpcodeVecI16x8BitMask:
1979				lane = ssa.VecLaneI16x8
1980			case wasm.OpcodeVecI32x4BitMask:
1981				lane = ssa.VecLaneI32x4
1982			case wasm.OpcodeVecI64x2BitMask:
1983				lane = ssa.VecLaneI64x2
1984			}
1985			v1 := state.pop()
1986			ret := builder.AllocateInstruction().AsVhighBits(v1, lane).Insert(builder).Return()
1987			state.push(ret)
1988		case wasm.OpcodeVecI8x16Abs, wasm.OpcodeVecI16x8Abs, wasm.OpcodeVecI32x4Abs, wasm.OpcodeVecI64x2Abs:
1989			if state.unreachable {
1990				break
1991			}
1992			var lane ssa.VecLane
1993			switch vecOp {
1994			case wasm.OpcodeVecI8x16Abs:
1995				lane = ssa.VecLaneI8x16
1996			case wasm.OpcodeVecI16x8Abs:
1997				lane = ssa.VecLaneI16x8
1998			case wasm.OpcodeVecI32x4Abs:
1999				lane = ssa.VecLaneI32x4
2000			case wasm.OpcodeVecI64x2Abs:
2001				lane = ssa.VecLaneI64x2
2002			}
2003			v1 := state.pop()
2004			ret := builder.AllocateInstruction().AsVIabs(v1, lane).Insert(builder).Return()
2005			state.push(ret)
2006		case wasm.OpcodeVecI8x16Neg, wasm.OpcodeVecI16x8Neg, wasm.OpcodeVecI32x4Neg, wasm.OpcodeVecI64x2Neg:
2007			if state.unreachable {
2008				break
2009			}
2010			var lane ssa.VecLane
2011			switch vecOp {
2012			case wasm.OpcodeVecI8x16Neg:
2013				lane = ssa.VecLaneI8x16
2014			case wasm.OpcodeVecI16x8Neg:
2015				lane = ssa.VecLaneI16x8
2016			case wasm.OpcodeVecI32x4Neg:
2017				lane = ssa.VecLaneI32x4
2018			case wasm.OpcodeVecI64x2Neg:
2019				lane = ssa.VecLaneI64x2
2020			}
2021			v1 := state.pop()
2022			ret := builder.AllocateInstruction().AsVIneg(v1, lane).Insert(builder).Return()
2023			state.push(ret)
2024		case wasm.OpcodeVecI8x16Popcnt:
2025			if state.unreachable {
2026				break
2027			}
2028			lane := ssa.VecLaneI8x16
2029			v1 := state.pop()
2030
2031			ret := builder.AllocateInstruction().AsVIpopcnt(v1, lane).Insert(builder).Return()
2032			state.push(ret)
2033		case wasm.OpcodeVecI8x16Add, wasm.OpcodeVecI16x8Add, wasm.OpcodeVecI32x4Add, wasm.OpcodeVecI64x2Add:
2034			if state.unreachable {
2035				break
2036			}
2037			var lane ssa.VecLane
2038			switch vecOp {
2039			case wasm.OpcodeVecI8x16Add:
2040				lane = ssa.VecLaneI8x16
2041			case wasm.OpcodeVecI16x8Add:
2042				lane = ssa.VecLaneI16x8
2043			case wasm.OpcodeVecI32x4Add:
2044				lane = ssa.VecLaneI32x4
2045			case wasm.OpcodeVecI64x2Add:
2046				lane = ssa.VecLaneI64x2
2047			}
2048			v2 := state.pop()
2049			v1 := state.pop()
2050			ret := builder.AllocateInstruction().AsVIadd(v1, v2, lane).Insert(builder).Return()
2051			state.push(ret)
2052		case wasm.OpcodeVecI8x16AddSatS, wasm.OpcodeVecI16x8AddSatS:
2053			if state.unreachable {
2054				break
2055			}
2056			var lane ssa.VecLane
2057			switch vecOp {
2058			case wasm.OpcodeVecI8x16AddSatS:
2059				lane = ssa.VecLaneI8x16
2060			case wasm.OpcodeVecI16x8AddSatS:
2061				lane = ssa.VecLaneI16x8
2062			}
2063			v2 := state.pop()
2064			v1 := state.pop()
2065			ret := builder.AllocateInstruction().AsVSaddSat(v1, v2, lane).Insert(builder).Return()
2066			state.push(ret)
2067		case wasm.OpcodeVecI8x16AddSatU, wasm.OpcodeVecI16x8AddSatU:
2068			if state.unreachable {
2069				break
2070			}
2071			var lane ssa.VecLane
2072			switch vecOp {
2073			case wasm.OpcodeVecI8x16AddSatU:
2074				lane = ssa.VecLaneI8x16
2075			case wasm.OpcodeVecI16x8AddSatU:
2076				lane = ssa.VecLaneI16x8
2077			}
2078			v2 := state.pop()
2079			v1 := state.pop()
2080			ret := builder.AllocateInstruction().AsVUaddSat(v1, v2, lane).Insert(builder).Return()
2081			state.push(ret)
2082		case wasm.OpcodeVecI8x16SubSatS, wasm.OpcodeVecI16x8SubSatS:
2083			if state.unreachable {
2084				break
2085			}
2086			var lane ssa.VecLane
2087			switch vecOp {
2088			case wasm.OpcodeVecI8x16SubSatS:
2089				lane = ssa.VecLaneI8x16
2090			case wasm.OpcodeVecI16x8SubSatS:
2091				lane = ssa.VecLaneI16x8
2092			}
2093			v2 := state.pop()
2094			v1 := state.pop()
2095			ret := builder.AllocateInstruction().AsVSsubSat(v1, v2, lane).Insert(builder).Return()
2096			state.push(ret)
2097		case wasm.OpcodeVecI8x16SubSatU, wasm.OpcodeVecI16x8SubSatU:
2098			if state.unreachable {
2099				break
2100			}
2101			var lane ssa.VecLane
2102			switch vecOp {
2103			case wasm.OpcodeVecI8x16SubSatU:
2104				lane = ssa.VecLaneI8x16
2105			case wasm.OpcodeVecI16x8SubSatU:
2106				lane = ssa.VecLaneI16x8
2107			}
2108			v2 := state.pop()
2109			v1 := state.pop()
2110			ret := builder.AllocateInstruction().AsVUsubSat(v1, v2, lane).Insert(builder).Return()
2111			state.push(ret)
2112
2113		case wasm.OpcodeVecI8x16Sub, wasm.OpcodeVecI16x8Sub, wasm.OpcodeVecI32x4Sub, wasm.OpcodeVecI64x2Sub:
2114			if state.unreachable {
2115				break
2116			}
2117			var lane ssa.VecLane
2118			switch vecOp {
2119			case wasm.OpcodeVecI8x16Sub:
2120				lane = ssa.VecLaneI8x16
2121			case wasm.OpcodeVecI16x8Sub:
2122				lane = ssa.VecLaneI16x8
2123			case wasm.OpcodeVecI32x4Sub:
2124				lane = ssa.VecLaneI32x4
2125			case wasm.OpcodeVecI64x2Sub:
2126				lane = ssa.VecLaneI64x2
2127			}
2128			v2 := state.pop()
2129			v1 := state.pop()
2130			ret := builder.AllocateInstruction().AsVIsub(v1, v2, lane).Insert(builder).Return()
2131			state.push(ret)
2132		case wasm.OpcodeVecI8x16MinS, wasm.OpcodeVecI16x8MinS, wasm.OpcodeVecI32x4MinS:
2133			if state.unreachable {
2134				break
2135			}
2136			var lane ssa.VecLane
2137			switch vecOp {
2138			case wasm.OpcodeVecI8x16MinS:
2139				lane = ssa.VecLaneI8x16
2140			case wasm.OpcodeVecI16x8MinS:
2141				lane = ssa.VecLaneI16x8
2142			case wasm.OpcodeVecI32x4MinS:
2143				lane = ssa.VecLaneI32x4
2144			}
2145			v2 := state.pop()
2146			v1 := state.pop()
2147			ret := builder.AllocateInstruction().AsVImin(v1, v2, lane).Insert(builder).Return()
2148			state.push(ret)
2149		case wasm.OpcodeVecI8x16MinU, wasm.OpcodeVecI16x8MinU, wasm.OpcodeVecI32x4MinU:
2150			if state.unreachable {
2151				break
2152			}
2153			var lane ssa.VecLane
2154			switch vecOp {
2155			case wasm.OpcodeVecI8x16MinU:
2156				lane = ssa.VecLaneI8x16
2157			case wasm.OpcodeVecI16x8MinU:
2158				lane = ssa.VecLaneI16x8
2159			case wasm.OpcodeVecI32x4MinU:
2160				lane = ssa.VecLaneI32x4
2161			}
2162			v2 := state.pop()
2163			v1 := state.pop()
2164			ret := builder.AllocateInstruction().AsVUmin(v1, v2, lane).Insert(builder).Return()
2165			state.push(ret)
2166		case wasm.OpcodeVecI8x16MaxS, wasm.OpcodeVecI16x8MaxS, wasm.OpcodeVecI32x4MaxS:
2167			if state.unreachable {
2168				break
2169			}
2170			var lane ssa.VecLane
2171			switch vecOp {
2172			case wasm.OpcodeVecI8x16MaxS:
2173				lane = ssa.VecLaneI8x16
2174			case wasm.OpcodeVecI16x8MaxS:
2175				lane = ssa.VecLaneI16x8
2176			case wasm.OpcodeVecI32x4MaxS:
2177				lane = ssa.VecLaneI32x4
2178			}
2179			v2 := state.pop()
2180			v1 := state.pop()
2181			ret := builder.AllocateInstruction().AsVImax(v1, v2, lane).Insert(builder).Return()
2182			state.push(ret)
2183		case wasm.OpcodeVecI8x16MaxU, wasm.OpcodeVecI16x8MaxU, wasm.OpcodeVecI32x4MaxU:
2184			if state.unreachable {
2185				break
2186			}
2187			var lane ssa.VecLane
2188			switch vecOp {
2189			case wasm.OpcodeVecI8x16MaxU:
2190				lane = ssa.VecLaneI8x16
2191			case wasm.OpcodeVecI16x8MaxU:
2192				lane = ssa.VecLaneI16x8
2193			case wasm.OpcodeVecI32x4MaxU:
2194				lane = ssa.VecLaneI32x4
2195			}
2196			v2 := state.pop()
2197			v1 := state.pop()
2198			ret := builder.AllocateInstruction().AsVUmax(v1, v2, lane).Insert(builder).Return()
2199			state.push(ret)
2200		case wasm.OpcodeVecI8x16AvgrU, wasm.OpcodeVecI16x8AvgrU:
2201			if state.unreachable {
2202				break
2203			}
2204			var lane ssa.VecLane
2205			switch vecOp {
2206			case wasm.OpcodeVecI8x16AvgrU:
2207				lane = ssa.VecLaneI8x16
2208			case wasm.OpcodeVecI16x8AvgrU:
2209				lane = ssa.VecLaneI16x8
2210			}
2211			v2 := state.pop()
2212			v1 := state.pop()
2213			ret := builder.AllocateInstruction().AsVAvgRound(v1, v2, lane).Insert(builder).Return()
2214			state.push(ret)
2215		case wasm.OpcodeVecI16x8Mul, wasm.OpcodeVecI32x4Mul, wasm.OpcodeVecI64x2Mul:
2216			if state.unreachable {
2217				break
2218			}
2219			var lane ssa.VecLane
2220			switch vecOp {
2221			case wasm.OpcodeVecI16x8Mul:
2222				lane = ssa.VecLaneI16x8
2223			case wasm.OpcodeVecI32x4Mul:
2224				lane = ssa.VecLaneI32x4
2225			case wasm.OpcodeVecI64x2Mul:
2226				lane = ssa.VecLaneI64x2
2227			}
2228			v2 := state.pop()
2229			v1 := state.pop()
2230			ret := builder.AllocateInstruction().AsVImul(v1, v2, lane).Insert(builder).Return()
2231			state.push(ret)
2232		case wasm.OpcodeVecI16x8Q15mulrSatS:
2233			if state.unreachable {
2234				break
2235			}
2236			v2 := state.pop()
2237			v1 := state.pop()
2238			ret := builder.AllocateInstruction().AsSqmulRoundSat(v1, v2, ssa.VecLaneI16x8).Insert(builder).Return()
2239			state.push(ret)
2240		case wasm.OpcodeVecI8x16Eq, wasm.OpcodeVecI16x8Eq, wasm.OpcodeVecI32x4Eq, wasm.OpcodeVecI64x2Eq:
2241			if state.unreachable {
2242				break
2243			}
2244			var lane ssa.VecLane
2245			switch vecOp {
2246			case wasm.OpcodeVecI8x16Eq:
2247				lane = ssa.VecLaneI8x16
2248			case wasm.OpcodeVecI16x8Eq:
2249				lane = ssa.VecLaneI16x8
2250			case wasm.OpcodeVecI32x4Eq:
2251				lane = ssa.VecLaneI32x4
2252			case wasm.OpcodeVecI64x2Eq:
2253				lane = ssa.VecLaneI64x2
2254			}
2255			v2 := state.pop()
2256			v1 := state.pop()
2257			ret := builder.AllocateInstruction().
2258				AsVIcmp(v1, v2, ssa.IntegerCmpCondEqual, lane).Insert(builder).Return()
2259			state.push(ret)
2260		case wasm.OpcodeVecI8x16Ne, wasm.OpcodeVecI16x8Ne, wasm.OpcodeVecI32x4Ne, wasm.OpcodeVecI64x2Ne:
2261			if state.unreachable {
2262				break
2263			}
2264			var lane ssa.VecLane
2265			switch vecOp {
2266			case wasm.OpcodeVecI8x16Ne:
2267				lane = ssa.VecLaneI8x16
2268			case wasm.OpcodeVecI16x8Ne:
2269				lane = ssa.VecLaneI16x8
2270			case wasm.OpcodeVecI32x4Ne:
2271				lane = ssa.VecLaneI32x4
2272			case wasm.OpcodeVecI64x2Ne:
2273				lane = ssa.VecLaneI64x2
2274			}
2275			v2 := state.pop()
2276			v1 := state.pop()
2277			ret := builder.AllocateInstruction().
2278				AsVIcmp(v1, v2, ssa.IntegerCmpCondNotEqual, lane).Insert(builder).Return()
2279			state.push(ret)
2280		case wasm.OpcodeVecI8x16LtS, wasm.OpcodeVecI16x8LtS, wasm.OpcodeVecI32x4LtS, wasm.OpcodeVecI64x2LtS:
2281			if state.unreachable {
2282				break
2283			}
2284			var lane ssa.VecLane
2285			switch vecOp {
2286			case wasm.OpcodeVecI8x16LtS:
2287				lane = ssa.VecLaneI8x16
2288			case wasm.OpcodeVecI16x8LtS:
2289				lane = ssa.VecLaneI16x8
2290			case wasm.OpcodeVecI32x4LtS:
2291				lane = ssa.VecLaneI32x4
2292			case wasm.OpcodeVecI64x2LtS:
2293				lane = ssa.VecLaneI64x2
2294			}
2295			v2 := state.pop()
2296			v1 := state.pop()
2297			ret := builder.AllocateInstruction().
2298				AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThan, lane).Insert(builder).Return()
2299			state.push(ret)
2300		case wasm.OpcodeVecI8x16LtU, wasm.OpcodeVecI16x8LtU, wasm.OpcodeVecI32x4LtU:
2301			if state.unreachable {
2302				break
2303			}
2304			var lane ssa.VecLane
2305			switch vecOp {
2306			case wasm.OpcodeVecI8x16LtU:
2307				lane = ssa.VecLaneI8x16
2308			case wasm.OpcodeVecI16x8LtU:
2309				lane = ssa.VecLaneI16x8
2310			case wasm.OpcodeVecI32x4LtU:
2311				lane = ssa.VecLaneI32x4
2312			}
2313			v2 := state.pop()
2314			v1 := state.pop()
2315			ret := builder.AllocateInstruction().
2316				AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThan, lane).Insert(builder).Return()
2317			state.push(ret)
2318		case wasm.OpcodeVecI8x16LeS, wasm.OpcodeVecI16x8LeS, wasm.OpcodeVecI32x4LeS, wasm.OpcodeVecI64x2LeS:
2319			if state.unreachable {
2320				break
2321			}
2322			var lane ssa.VecLane
2323			switch vecOp {
2324			case wasm.OpcodeVecI8x16LeS:
2325				lane = ssa.VecLaneI8x16
2326			case wasm.OpcodeVecI16x8LeS:
2327				lane = ssa.VecLaneI16x8
2328			case wasm.OpcodeVecI32x4LeS:
2329				lane = ssa.VecLaneI32x4
2330			case wasm.OpcodeVecI64x2LeS:
2331				lane = ssa.VecLaneI64x2
2332			}
2333			v2 := state.pop()
2334			v1 := state.pop()
2335			ret := builder.AllocateInstruction().
2336				AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedLessThanOrEqual, lane).Insert(builder).Return()
2337			state.push(ret)
2338		case wasm.OpcodeVecI8x16LeU, wasm.OpcodeVecI16x8LeU, wasm.OpcodeVecI32x4LeU:
2339			if state.unreachable {
2340				break
2341			}
2342			var lane ssa.VecLane
2343			switch vecOp {
2344			case wasm.OpcodeVecI8x16LeU:
2345				lane = ssa.VecLaneI8x16
2346			case wasm.OpcodeVecI16x8LeU:
2347				lane = ssa.VecLaneI16x8
2348			case wasm.OpcodeVecI32x4LeU:
2349				lane = ssa.VecLaneI32x4
2350			}
2351			v2 := state.pop()
2352			v1 := state.pop()
2353			ret := builder.AllocateInstruction().
2354				AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedLessThanOrEqual, lane).Insert(builder).Return()
2355			state.push(ret)
2356		case wasm.OpcodeVecI8x16GtS, wasm.OpcodeVecI16x8GtS, wasm.OpcodeVecI32x4GtS, wasm.OpcodeVecI64x2GtS:
2357			if state.unreachable {
2358				break
2359			}
2360			var lane ssa.VecLane
2361			switch vecOp {
2362			case wasm.OpcodeVecI8x16GtS:
2363				lane = ssa.VecLaneI8x16
2364			case wasm.OpcodeVecI16x8GtS:
2365				lane = ssa.VecLaneI16x8
2366			case wasm.OpcodeVecI32x4GtS:
2367				lane = ssa.VecLaneI32x4
2368			case wasm.OpcodeVecI64x2GtS:
2369				lane = ssa.VecLaneI64x2
2370			}
2371			v2 := state.pop()
2372			v1 := state.pop()
2373			ret := builder.AllocateInstruction().
2374				AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThan, lane).Insert(builder).Return()
2375			state.push(ret)
2376		case wasm.OpcodeVecI8x16GtU, wasm.OpcodeVecI16x8GtU, wasm.OpcodeVecI32x4GtU:
2377			if state.unreachable {
2378				break
2379			}
2380			var lane ssa.VecLane
2381			switch vecOp {
2382			case wasm.OpcodeVecI8x16GtU:
2383				lane = ssa.VecLaneI8x16
2384			case wasm.OpcodeVecI16x8GtU:
2385				lane = ssa.VecLaneI16x8
2386			case wasm.OpcodeVecI32x4GtU:
2387				lane = ssa.VecLaneI32x4
2388			}
2389			v2 := state.pop()
2390			v1 := state.pop()
2391			ret := builder.AllocateInstruction().
2392				AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThan, lane).Insert(builder).Return()
2393			state.push(ret)
2394		case wasm.OpcodeVecI8x16GeS, wasm.OpcodeVecI16x8GeS, wasm.OpcodeVecI32x4GeS, wasm.OpcodeVecI64x2GeS:
2395			if state.unreachable {
2396				break
2397			}
2398			var lane ssa.VecLane
2399			switch vecOp {
2400			case wasm.OpcodeVecI8x16GeS:
2401				lane = ssa.VecLaneI8x16
2402			case wasm.OpcodeVecI16x8GeS:
2403				lane = ssa.VecLaneI16x8
2404			case wasm.OpcodeVecI32x4GeS:
2405				lane = ssa.VecLaneI32x4
2406			case wasm.OpcodeVecI64x2GeS:
2407				lane = ssa.VecLaneI64x2
2408			}
2409			v2 := state.pop()
2410			v1 := state.pop()
2411			ret := builder.AllocateInstruction().
2412				AsVIcmp(v1, v2, ssa.IntegerCmpCondSignedGreaterThanOrEqual, lane).Insert(builder).Return()
2413			state.push(ret)
2414		case wasm.OpcodeVecI8x16GeU, wasm.OpcodeVecI16x8GeU, wasm.OpcodeVecI32x4GeU:
2415			if state.unreachable {
2416				break
2417			}
2418			var lane ssa.VecLane
2419			switch vecOp {
2420			case wasm.OpcodeVecI8x16GeU:
2421				lane = ssa.VecLaneI8x16
2422			case wasm.OpcodeVecI16x8GeU:
2423				lane = ssa.VecLaneI16x8
2424			case wasm.OpcodeVecI32x4GeU:
2425				lane = ssa.VecLaneI32x4
2426			}
2427			v2 := state.pop()
2428			v1 := state.pop()
2429			ret := builder.AllocateInstruction().
2430				AsVIcmp(v1, v2, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual, lane).Insert(builder).Return()
2431			state.push(ret)
2432		case wasm.OpcodeVecF32x4Max, wasm.OpcodeVecF64x2Max:
2433			if state.unreachable {
2434				break
2435			}
2436			var lane ssa.VecLane
2437			switch vecOp {
2438			case wasm.OpcodeVecF32x4Max:
2439				lane = ssa.VecLaneF32x4
2440			case wasm.OpcodeVecF64x2Max:
2441				lane = ssa.VecLaneF64x2
2442			}
2443			v2 := state.pop()
2444			v1 := state.pop()
2445			ret := builder.AllocateInstruction().AsVFmax(v1, v2, lane).Insert(builder).Return()
2446			state.push(ret)
2447		case wasm.OpcodeVecF32x4Abs, wasm.OpcodeVecF64x2Abs:
2448			if state.unreachable {
2449				break
2450			}
2451			var lane ssa.VecLane
2452			switch vecOp {
2453			case wasm.OpcodeVecF32x4Abs:
2454				lane = ssa.VecLaneF32x4
2455			case wasm.OpcodeVecF64x2Abs:
2456				lane = ssa.VecLaneF64x2
2457			}
2458			v1 := state.pop()
2459			ret := builder.AllocateInstruction().AsVFabs(v1, lane).Insert(builder).Return()
2460			state.push(ret)
2461		case wasm.OpcodeVecF32x4Min, wasm.OpcodeVecF64x2Min:
2462			if state.unreachable {
2463				break
2464			}
2465			var lane ssa.VecLane
2466			switch vecOp {
2467			case wasm.OpcodeVecF32x4Min:
2468				lane = ssa.VecLaneF32x4
2469			case wasm.OpcodeVecF64x2Min:
2470				lane = ssa.VecLaneF64x2
2471			}
2472			v2 := state.pop()
2473			v1 := state.pop()
2474			ret := builder.AllocateInstruction().AsVFmin(v1, v2, lane).Insert(builder).Return()
2475			state.push(ret)
2476		case wasm.OpcodeVecF32x4Neg, wasm.OpcodeVecF64x2Neg:
2477			if state.unreachable {
2478				break
2479			}
2480			var lane ssa.VecLane
2481			switch vecOp {
2482			case wasm.OpcodeVecF32x4Neg:
2483				lane = ssa.VecLaneF32x4
2484			case wasm.OpcodeVecF64x2Neg:
2485				lane = ssa.VecLaneF64x2
2486			}
2487			v1 := state.pop()
2488			ret := builder.AllocateInstruction().AsVFneg(v1, lane).Insert(builder).Return()
2489			state.push(ret)
2490		case wasm.OpcodeVecF32x4Sqrt, wasm.OpcodeVecF64x2Sqrt:
2491			if state.unreachable {
2492				break
2493			}
2494			var lane ssa.VecLane
2495			switch vecOp {
2496			case wasm.OpcodeVecF32x4Sqrt:
2497				lane = ssa.VecLaneF32x4
2498			case wasm.OpcodeVecF64x2Sqrt:
2499				lane = ssa.VecLaneF64x2
2500			}
2501			v1 := state.pop()
2502			ret := builder.AllocateInstruction().AsVSqrt(v1, lane).Insert(builder).Return()
2503			state.push(ret)
2504
2505		case wasm.OpcodeVecF32x4Add, wasm.OpcodeVecF64x2Add:
2506			if state.unreachable {
2507				break
2508			}
2509			var lane ssa.VecLane
2510			switch vecOp {
2511			case wasm.OpcodeVecF32x4Add:
2512				lane = ssa.VecLaneF32x4
2513			case wasm.OpcodeVecF64x2Add:
2514				lane = ssa.VecLaneF64x2
2515			}
2516			v2 := state.pop()
2517			v1 := state.pop()
2518			ret := builder.AllocateInstruction().AsVFadd(v1, v2, lane).Insert(builder).Return()
2519			state.push(ret)
2520		case wasm.OpcodeVecF32x4Sub, wasm.OpcodeVecF64x2Sub:
2521			if state.unreachable {
2522				break
2523			}
2524			var lane ssa.VecLane
2525			switch vecOp {
2526			case wasm.OpcodeVecF32x4Sub:
2527				lane = ssa.VecLaneF32x4
2528			case wasm.OpcodeVecF64x2Sub:
2529				lane = ssa.VecLaneF64x2
2530			}
2531			v2 := state.pop()
2532			v1 := state.pop()
2533			ret := builder.AllocateInstruction().AsVFsub(v1, v2, lane).Insert(builder).Return()
2534			state.push(ret)
2535		case wasm.OpcodeVecF32x4Mul, wasm.OpcodeVecF64x2Mul:
2536			if state.unreachable {
2537				break
2538			}
2539			var lane ssa.VecLane
2540			switch vecOp {
2541			case wasm.OpcodeVecF32x4Mul:
2542				lane = ssa.VecLaneF32x4
2543			case wasm.OpcodeVecF64x2Mul:
2544				lane = ssa.VecLaneF64x2
2545			}
2546			v2 := state.pop()
2547			v1 := state.pop()
2548			ret := builder.AllocateInstruction().AsVFmul(v1, v2, lane).Insert(builder).Return()
2549			state.push(ret)
2550		case wasm.OpcodeVecF32x4Div, wasm.OpcodeVecF64x2Div:
2551			if state.unreachable {
2552				break
2553			}
2554			var lane ssa.VecLane
2555			switch vecOp {
2556			case wasm.OpcodeVecF32x4Div:
2557				lane = ssa.VecLaneF32x4
2558			case wasm.OpcodeVecF64x2Div:
2559				lane = ssa.VecLaneF64x2
2560			}
2561			v2 := state.pop()
2562			v1 := state.pop()
2563			ret := builder.AllocateInstruction().AsVFdiv(v1, v2, lane).Insert(builder).Return()
2564			state.push(ret)
2565
2566		case wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S, wasm.OpcodeVecI16x8ExtaddPairwiseI8x16U:
2567			if state.unreachable {
2568				break
2569			}
2570			v := state.pop()
2571			signed := vecOp == wasm.OpcodeVecI16x8ExtaddPairwiseI8x16S
2572			ret := builder.AllocateInstruction().AsExtIaddPairwise(v, ssa.VecLaneI8x16, signed).Insert(builder).Return()
2573			state.push(ret)
2574
2575		case wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S, wasm.OpcodeVecI32x4ExtaddPairwiseI16x8U:
2576			if state.unreachable {
2577				break
2578			}
2579			v := state.pop()
2580			signed := vecOp == wasm.OpcodeVecI32x4ExtaddPairwiseI16x8S
2581			ret := builder.AllocateInstruction().AsExtIaddPairwise(v, ssa.VecLaneI16x8, signed).Insert(builder).Return()
2582			state.push(ret)
2583
2584		case wasm.OpcodeVecI16x8ExtMulLowI8x16S, wasm.OpcodeVecI16x8ExtMulLowI8x16U:
2585			if state.unreachable {
2586				break
2587			}
2588			v2 := state.pop()
2589			v1 := state.pop()
2590			ret := c.lowerExtMul(
2591				v1, v2,
2592				ssa.VecLaneI8x16, ssa.VecLaneI16x8,
2593				vecOp == wasm.OpcodeVecI16x8ExtMulLowI8x16S, true)
2594			state.push(ret)
2595
2596		case wasm.OpcodeVecI16x8ExtMulHighI8x16S, wasm.OpcodeVecI16x8ExtMulHighI8x16U:
2597			if state.unreachable {
2598				break
2599			}
2600			v2 := state.pop()
2601			v1 := state.pop()
2602			ret := c.lowerExtMul(
2603				v1, v2,
2604				ssa.VecLaneI8x16, ssa.VecLaneI16x8,
2605				vecOp == wasm.OpcodeVecI16x8ExtMulHighI8x16S, false)
2606			state.push(ret)
2607
2608		case wasm.OpcodeVecI32x4ExtMulLowI16x8S, wasm.OpcodeVecI32x4ExtMulLowI16x8U:
2609			if state.unreachable {
2610				break
2611			}
2612			v2 := state.pop()
2613			v1 := state.pop()
2614			ret := c.lowerExtMul(
2615				v1, v2,
2616				ssa.VecLaneI16x8, ssa.VecLaneI32x4,
2617				vecOp == wasm.OpcodeVecI32x4ExtMulLowI16x8S, true)
2618			state.push(ret)
2619
2620		case wasm.OpcodeVecI32x4ExtMulHighI16x8S, wasm.OpcodeVecI32x4ExtMulHighI16x8U:
2621			if state.unreachable {
2622				break
2623			}
2624			v2 := state.pop()
2625			v1 := state.pop()
2626			ret := c.lowerExtMul(
2627				v1, v2,
2628				ssa.VecLaneI16x8, ssa.VecLaneI32x4,
2629				vecOp == wasm.OpcodeVecI32x4ExtMulHighI16x8S, false)
2630			state.push(ret)
2631		case wasm.OpcodeVecI64x2ExtMulLowI32x4S, wasm.OpcodeVecI64x2ExtMulLowI32x4U:
2632			if state.unreachable {
2633				break
2634			}
2635			v2 := state.pop()
2636			v1 := state.pop()
2637			ret := c.lowerExtMul(
2638				v1, v2,
2639				ssa.VecLaneI32x4, ssa.VecLaneI64x2,
2640				vecOp == wasm.OpcodeVecI64x2ExtMulLowI32x4S, true)
2641			state.push(ret)
2642
2643		case wasm.OpcodeVecI64x2ExtMulHighI32x4S, wasm.OpcodeVecI64x2ExtMulHighI32x4U:
2644			if state.unreachable {
2645				break
2646			}
2647			v2 := state.pop()
2648			v1 := state.pop()
2649			ret := c.lowerExtMul(
2650				v1, v2,
2651				ssa.VecLaneI32x4, ssa.VecLaneI64x2,
2652				vecOp == wasm.OpcodeVecI64x2ExtMulHighI32x4S, false)
2653			state.push(ret)
2654
2655		case wasm.OpcodeVecI32x4DotI16x8S:
2656			if state.unreachable {
2657				break
2658			}
2659			v2 := state.pop()
2660			v1 := state.pop()
2661
2662			ret := builder.AllocateInstruction().AsWideningPairwiseDotProductS(v1, v2).Insert(builder).Return()
2663			state.push(ret)
2664
2665		case wasm.OpcodeVecF32x4Eq, wasm.OpcodeVecF64x2Eq:
2666			if state.unreachable {
2667				break
2668			}
2669			var lane ssa.VecLane
2670			switch vecOp {
2671			case wasm.OpcodeVecF32x4Eq:
2672				lane = ssa.VecLaneF32x4
2673			case wasm.OpcodeVecF64x2Eq:
2674				lane = ssa.VecLaneF64x2
2675			}
2676			v2 := state.pop()
2677			v1 := state.pop()
2678			ret := builder.AllocateInstruction().
2679				AsVFcmp(v1, v2, ssa.FloatCmpCondEqual, lane).Insert(builder).Return()
2680			state.push(ret)
2681		case wasm.OpcodeVecF32x4Ne, wasm.OpcodeVecF64x2Ne:
2682			if state.unreachable {
2683				break
2684			}
2685			var lane ssa.VecLane
2686			switch vecOp {
2687			case wasm.OpcodeVecF32x4Ne:
2688				lane = ssa.VecLaneF32x4
2689			case wasm.OpcodeVecF64x2Ne:
2690				lane = ssa.VecLaneF64x2
2691			}
2692			v2 := state.pop()
2693			v1 := state.pop()
2694			ret := builder.AllocateInstruction().
2695				AsVFcmp(v1, v2, ssa.FloatCmpCondNotEqual, lane).Insert(builder).Return()
2696			state.push(ret)
2697		case wasm.OpcodeVecF32x4Lt, wasm.OpcodeVecF64x2Lt:
2698			if state.unreachable {
2699				break
2700			}
2701			var lane ssa.VecLane
2702			switch vecOp {
2703			case wasm.OpcodeVecF32x4Lt:
2704				lane = ssa.VecLaneF32x4
2705			case wasm.OpcodeVecF64x2Lt:
2706				lane = ssa.VecLaneF64x2
2707			}
2708			v2 := state.pop()
2709			v1 := state.pop()
2710			ret := builder.AllocateInstruction().
2711				AsVFcmp(v1, v2, ssa.FloatCmpCondLessThan, lane).Insert(builder).Return()
2712			state.push(ret)
2713		case wasm.OpcodeVecF32x4Le, wasm.OpcodeVecF64x2Le:
2714			if state.unreachable {
2715				break
2716			}
2717			var lane ssa.VecLane
2718			switch vecOp {
2719			case wasm.OpcodeVecF32x4Le:
2720				lane = ssa.VecLaneF32x4
2721			case wasm.OpcodeVecF64x2Le:
2722				lane = ssa.VecLaneF64x2
2723			}
2724			v2 := state.pop()
2725			v1 := state.pop()
2726			ret := builder.AllocateInstruction().
2727				AsVFcmp(v1, v2, ssa.FloatCmpCondLessThanOrEqual, lane).Insert(builder).Return()
2728			state.push(ret)
2729		case wasm.OpcodeVecF32x4Gt, wasm.OpcodeVecF64x2Gt:
2730			if state.unreachable {
2731				break
2732			}
2733			var lane ssa.VecLane
2734			switch vecOp {
2735			case wasm.OpcodeVecF32x4Gt:
2736				lane = ssa.VecLaneF32x4
2737			case wasm.OpcodeVecF64x2Gt:
2738				lane = ssa.VecLaneF64x2
2739			}
2740			v2 := state.pop()
2741			v1 := state.pop()
2742			ret := builder.AllocateInstruction().
2743				AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThan, lane).Insert(builder).Return()
2744			state.push(ret)
2745		case wasm.OpcodeVecF32x4Ge, wasm.OpcodeVecF64x2Ge:
2746			if state.unreachable {
2747				break
2748			}
2749			var lane ssa.VecLane
2750			switch vecOp {
2751			case wasm.OpcodeVecF32x4Ge:
2752				lane = ssa.VecLaneF32x4
2753			case wasm.OpcodeVecF64x2Ge:
2754				lane = ssa.VecLaneF64x2
2755			}
2756			v2 := state.pop()
2757			v1 := state.pop()
2758			ret := builder.AllocateInstruction().
2759				AsVFcmp(v1, v2, ssa.FloatCmpCondGreaterThanOrEqual, lane).Insert(builder).Return()
2760			state.push(ret)
2761		case wasm.OpcodeVecF32x4Ceil, wasm.OpcodeVecF64x2Ceil:
2762			if state.unreachable {
2763				break
2764			}
2765			var lane ssa.VecLane
2766			switch vecOp {
2767			case wasm.OpcodeVecF32x4Ceil:
2768				lane = ssa.VecLaneF32x4
2769			case wasm.OpcodeVecF64x2Ceil:
2770				lane = ssa.VecLaneF64x2
2771			}
2772			v1 := state.pop()
2773			ret := builder.AllocateInstruction().AsVCeil(v1, lane).Insert(builder).Return()
2774			state.push(ret)
2775		case wasm.OpcodeVecF32x4Floor, wasm.OpcodeVecF64x2Floor:
2776			if state.unreachable {
2777				break
2778			}
2779			var lane ssa.VecLane
2780			switch vecOp {
2781			case wasm.OpcodeVecF32x4Floor:
2782				lane = ssa.VecLaneF32x4
2783			case wasm.OpcodeVecF64x2Floor:
2784				lane = ssa.VecLaneF64x2
2785			}
2786			v1 := state.pop()
2787			ret := builder.AllocateInstruction().AsVFloor(v1, lane).Insert(builder).Return()
2788			state.push(ret)
2789		case wasm.OpcodeVecF32x4Trunc, wasm.OpcodeVecF64x2Trunc:
2790			if state.unreachable {
2791				break
2792			}
2793			var lane ssa.VecLane
2794			switch vecOp {
2795			case wasm.OpcodeVecF32x4Trunc:
2796				lane = ssa.VecLaneF32x4
2797			case wasm.OpcodeVecF64x2Trunc:
2798				lane = ssa.VecLaneF64x2
2799			}
2800			v1 := state.pop()
2801			ret := builder.AllocateInstruction().AsVTrunc(v1, lane).Insert(builder).Return()
2802			state.push(ret)
2803		case wasm.OpcodeVecF32x4Nearest, wasm.OpcodeVecF64x2Nearest:
2804			if state.unreachable {
2805				break
2806			}
2807			var lane ssa.VecLane
2808			switch vecOp {
2809			case wasm.OpcodeVecF32x4Nearest:
2810				lane = ssa.VecLaneF32x4
2811			case wasm.OpcodeVecF64x2Nearest:
2812				lane = ssa.VecLaneF64x2
2813			}
2814			v1 := state.pop()
2815			ret := builder.AllocateInstruction().AsVNearest(v1, lane).Insert(builder).Return()
2816			state.push(ret)
2817		case wasm.OpcodeVecF32x4Pmin, wasm.OpcodeVecF64x2Pmin:
2818			if state.unreachable {
2819				break
2820			}
2821			var lane ssa.VecLane
2822			switch vecOp {
2823			case wasm.OpcodeVecF32x4Pmin:
2824				lane = ssa.VecLaneF32x4
2825			case wasm.OpcodeVecF64x2Pmin:
2826				lane = ssa.VecLaneF64x2
2827			}
2828			v2 := state.pop()
2829			v1 := state.pop()
2830			ret := builder.AllocateInstruction().AsVMinPseudo(v1, v2, lane).Insert(builder).Return()
2831			state.push(ret)
2832		case wasm.OpcodeVecF32x4Pmax, wasm.OpcodeVecF64x2Pmax:
2833			if state.unreachable {
2834				break
2835			}
2836			var lane ssa.VecLane
2837			switch vecOp {
2838			case wasm.OpcodeVecF32x4Pmax:
2839				lane = ssa.VecLaneF32x4
2840			case wasm.OpcodeVecF64x2Pmax:
2841				lane = ssa.VecLaneF64x2
2842			}
2843			v2 := state.pop()
2844			v1 := state.pop()
2845			ret := builder.AllocateInstruction().AsVMaxPseudo(v1, v2, lane).Insert(builder).Return()
2846			state.push(ret)
2847		case wasm.OpcodeVecI32x4TruncSatF32x4S, wasm.OpcodeVecI32x4TruncSatF32x4U:
2848			if state.unreachable {
2849				break
2850			}
2851			v1 := state.pop()
2852			ret := builder.AllocateInstruction().
2853				AsVFcvtToIntSat(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecI32x4TruncSatF32x4S).Insert(builder).Return()
2854			state.push(ret)
2855		case wasm.OpcodeVecI32x4TruncSatF64x2SZero, wasm.OpcodeVecI32x4TruncSatF64x2UZero:
2856			if state.unreachable {
2857				break
2858			}
2859			v1 := state.pop()
2860			ret := builder.AllocateInstruction().
2861				AsVFcvtToIntSat(v1, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecI32x4TruncSatF64x2SZero).Insert(builder).Return()
2862			state.push(ret)
2863		case wasm.OpcodeVecF32x4ConvertI32x4S, wasm.OpcodeVecF32x4ConvertI32x4U:
2864			if state.unreachable {
2865				break
2866			}
2867			v1 := state.pop()
2868			ret := builder.AllocateInstruction().
2869				AsVFcvtFromInt(v1, ssa.VecLaneF32x4, vecOp == wasm.OpcodeVecF32x4ConvertI32x4S).Insert(builder).Return()
2870			state.push(ret)
2871		case wasm.OpcodeVecF64x2ConvertLowI32x4S, wasm.OpcodeVecF64x2ConvertLowI32x4U:
2872			if state.unreachable {
2873				break
2874			}
2875			v1 := state.pop()
2876			if runtime.GOARCH == "arm64" {
2877				// TODO: this is weird. fix.
2878				v1 = builder.AllocateInstruction().
2879					AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S, true).Insert(builder).Return()
2880			}
2881			ret := builder.AllocateInstruction().
2882				AsVFcvtFromInt(v1, ssa.VecLaneF64x2, vecOp == wasm.OpcodeVecF64x2ConvertLowI32x4S).
2883				Insert(builder).Return()
2884			state.push(ret)
2885		case wasm.OpcodeVecI8x16NarrowI16x8S, wasm.OpcodeVecI8x16NarrowI16x8U:
2886			if state.unreachable {
2887				break
2888			}
2889			v2 := state.pop()
2890			v1 := state.pop()
2891			ret := builder.AllocateInstruction().
2892				AsNarrow(v1, v2, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI8x16NarrowI16x8S).
2893				Insert(builder).Return()
2894			state.push(ret)
2895		case wasm.OpcodeVecI16x8NarrowI32x4S, wasm.OpcodeVecI16x8NarrowI32x4U:
2896			if state.unreachable {
2897				break
2898			}
2899			v2 := state.pop()
2900			v1 := state.pop()
2901			ret := builder.AllocateInstruction().
2902				AsNarrow(v1, v2, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI16x8NarrowI32x4S).
2903				Insert(builder).Return()
2904			state.push(ret)
2905		case wasm.OpcodeVecI16x8ExtendLowI8x16S, wasm.OpcodeVecI16x8ExtendLowI8x16U:
2906			if state.unreachable {
2907				break
2908			}
2909			v1 := state.pop()
2910			ret := builder.AllocateInstruction().
2911				AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendLowI8x16S, true).
2912				Insert(builder).Return()
2913			state.push(ret)
2914		case wasm.OpcodeVecI16x8ExtendHighI8x16S, wasm.OpcodeVecI16x8ExtendHighI8x16U:
2915			if state.unreachable {
2916				break
2917			}
2918			v1 := state.pop()
2919			ret := builder.AllocateInstruction().
2920				AsWiden(v1, ssa.VecLaneI8x16, vecOp == wasm.OpcodeVecI16x8ExtendHighI8x16S, false).
2921				Insert(builder).Return()
2922			state.push(ret)
2923		case wasm.OpcodeVecI32x4ExtendLowI16x8S, wasm.OpcodeVecI32x4ExtendLowI16x8U:
2924			if state.unreachable {
2925				break
2926			}
2927			v1 := state.pop()
2928			ret := builder.AllocateInstruction().
2929				AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendLowI16x8S, true).
2930				Insert(builder).Return()
2931			state.push(ret)
2932		case wasm.OpcodeVecI32x4ExtendHighI16x8S, wasm.OpcodeVecI32x4ExtendHighI16x8U:
2933			if state.unreachable {
2934				break
2935			}
2936			v1 := state.pop()
2937			ret := builder.AllocateInstruction().
2938				AsWiden(v1, ssa.VecLaneI16x8, vecOp == wasm.OpcodeVecI32x4ExtendHighI16x8S, false).
2939				Insert(builder).Return()
2940			state.push(ret)
2941		case wasm.OpcodeVecI64x2ExtendLowI32x4S, wasm.OpcodeVecI64x2ExtendLowI32x4U:
2942			if state.unreachable {
2943				break
2944			}
2945			v1 := state.pop()
2946			ret := builder.AllocateInstruction().
2947				AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendLowI32x4S, true).
2948				Insert(builder).Return()
2949			state.push(ret)
2950		case wasm.OpcodeVecI64x2ExtendHighI32x4S, wasm.OpcodeVecI64x2ExtendHighI32x4U:
2951			if state.unreachable {
2952				break
2953			}
2954			v1 := state.pop()
2955			ret := builder.AllocateInstruction().
2956				AsWiden(v1, ssa.VecLaneI32x4, vecOp == wasm.OpcodeVecI64x2ExtendHighI32x4S, false).
2957				Insert(builder).Return()
2958			state.push(ret)
2959
2960		case wasm.OpcodeVecF64x2PromoteLowF32x4Zero:
2961			if state.unreachable {
2962				break
2963			}
2964			v1 := state.pop()
2965			ret := builder.AllocateInstruction().
2966				AsFvpromoteLow(v1, ssa.VecLaneF32x4).
2967				Insert(builder).Return()
2968			state.push(ret)
2969		case wasm.OpcodeVecF32x4DemoteF64x2Zero:
2970			if state.unreachable {
2971				break
2972			}
2973			v1 := state.pop()
2974			ret := builder.AllocateInstruction().
2975				AsFvdemote(v1, ssa.VecLaneF64x2).
2976				Insert(builder).Return()
2977			state.push(ret)
2978		case wasm.OpcodeVecI8x16Shl, wasm.OpcodeVecI16x8Shl, wasm.OpcodeVecI32x4Shl, wasm.OpcodeVecI64x2Shl:
2979			if state.unreachable {
2980				break
2981			}
2982			var lane ssa.VecLane
2983			switch vecOp {
2984			case wasm.OpcodeVecI8x16Shl:
2985				lane = ssa.VecLaneI8x16
2986			case wasm.OpcodeVecI16x8Shl:
2987				lane = ssa.VecLaneI16x8
2988			case wasm.OpcodeVecI32x4Shl:
2989				lane = ssa.VecLaneI32x4
2990			case wasm.OpcodeVecI64x2Shl:
2991				lane = ssa.VecLaneI64x2
2992			}
2993			v2 := state.pop()
2994			v1 := state.pop()
2995			ret := builder.AllocateInstruction().AsVIshl(v1, v2, lane).Insert(builder).Return()
2996			state.push(ret)
2997		case wasm.OpcodeVecI8x16ShrS, wasm.OpcodeVecI16x8ShrS, wasm.OpcodeVecI32x4ShrS, wasm.OpcodeVecI64x2ShrS:
2998			if state.unreachable {
2999				break
3000			}
3001			var lane ssa.VecLane
3002			switch vecOp {
3003			case wasm.OpcodeVecI8x16ShrS:
3004				lane = ssa.VecLaneI8x16
3005			case wasm.OpcodeVecI16x8ShrS:
3006				lane = ssa.VecLaneI16x8
3007			case wasm.OpcodeVecI32x4ShrS:
3008				lane = ssa.VecLaneI32x4
3009			case wasm.OpcodeVecI64x2ShrS:
3010				lane = ssa.VecLaneI64x2
3011			}
3012			v2 := state.pop()
3013			v1 := state.pop()
3014			ret := builder.AllocateInstruction().AsVSshr(v1, v2, lane).Insert(builder).Return()
3015			state.push(ret)
3016		case wasm.OpcodeVecI8x16ShrU, wasm.OpcodeVecI16x8ShrU, wasm.OpcodeVecI32x4ShrU, wasm.OpcodeVecI64x2ShrU:
3017			if state.unreachable {
3018				break
3019			}
3020			var lane ssa.VecLane
3021			switch vecOp {
3022			case wasm.OpcodeVecI8x16ShrU:
3023				lane = ssa.VecLaneI8x16
3024			case wasm.OpcodeVecI16x8ShrU:
3025				lane = ssa.VecLaneI16x8
3026			case wasm.OpcodeVecI32x4ShrU:
3027				lane = ssa.VecLaneI32x4
3028			case wasm.OpcodeVecI64x2ShrU:
3029				lane = ssa.VecLaneI64x2
3030			}
3031			v2 := state.pop()
3032			v1 := state.pop()
3033			ret := builder.AllocateInstruction().AsVUshr(v1, v2, lane).Insert(builder).Return()
3034			state.push(ret)
3035		case wasm.OpcodeVecI8x16ExtractLaneS, wasm.OpcodeVecI16x8ExtractLaneS:
3036			state.pc++
3037			if state.unreachable {
3038				break
3039			}
3040			var lane ssa.VecLane
3041			switch vecOp {
3042			case wasm.OpcodeVecI8x16ExtractLaneS:
3043				lane = ssa.VecLaneI8x16
3044			case wasm.OpcodeVecI16x8ExtractLaneS:
3045				lane = ssa.VecLaneI16x8
3046			}
3047			v1 := state.pop()
3048			index := c.wasmFunctionBody[state.pc]
3049			ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, true).Insert(builder).Return()
3050			state.push(ext)
3051		case wasm.OpcodeVecI8x16ExtractLaneU, wasm.OpcodeVecI16x8ExtractLaneU,
3052			wasm.OpcodeVecI32x4ExtractLane, wasm.OpcodeVecI64x2ExtractLane,
3053			wasm.OpcodeVecF32x4ExtractLane, wasm.OpcodeVecF64x2ExtractLane:
3054			state.pc++ // Skip the immediate value.
3055			if state.unreachable {
3056				break
3057			}
3058			var lane ssa.VecLane
3059			switch vecOp {
3060			case wasm.OpcodeVecI8x16ExtractLaneU:
3061				lane = ssa.VecLaneI8x16
3062			case wasm.OpcodeVecI16x8ExtractLaneU:
3063				lane = ssa.VecLaneI16x8
3064			case wasm.OpcodeVecI32x4ExtractLane:
3065				lane = ssa.VecLaneI32x4
3066			case wasm.OpcodeVecI64x2ExtractLane:
3067				lane = ssa.VecLaneI64x2
3068			case wasm.OpcodeVecF32x4ExtractLane:
3069				lane = ssa.VecLaneF32x4
3070			case wasm.OpcodeVecF64x2ExtractLane:
3071				lane = ssa.VecLaneF64x2
3072			}
3073			v1 := state.pop()
3074			index := c.wasmFunctionBody[state.pc]
3075			ext := builder.AllocateInstruction().AsExtractlane(v1, index, lane, false).Insert(builder).Return()
3076			state.push(ext)
3077		case wasm.OpcodeVecI8x16ReplaceLane, wasm.OpcodeVecI16x8ReplaceLane,
3078			wasm.OpcodeVecI32x4ReplaceLane, wasm.OpcodeVecI64x2ReplaceLane,
3079			wasm.OpcodeVecF32x4ReplaceLane, wasm.OpcodeVecF64x2ReplaceLane:
3080			state.pc++
3081			if state.unreachable {
3082				break
3083			}
3084			var lane ssa.VecLane
3085			switch vecOp {
3086			case wasm.OpcodeVecI8x16ReplaceLane:
3087				lane = ssa.VecLaneI8x16
3088			case wasm.OpcodeVecI16x8ReplaceLane:
3089				lane = ssa.VecLaneI16x8
3090			case wasm.OpcodeVecI32x4ReplaceLane:
3091				lane = ssa.VecLaneI32x4
3092			case wasm.OpcodeVecI64x2ReplaceLane:
3093				lane = ssa.VecLaneI64x2
3094			case wasm.OpcodeVecF32x4ReplaceLane:
3095				lane = ssa.VecLaneF32x4
3096			case wasm.OpcodeVecF64x2ReplaceLane:
3097				lane = ssa.VecLaneF64x2
3098			}
3099			v2 := state.pop()
3100			v1 := state.pop()
3101			index := c.wasmFunctionBody[state.pc]
3102			ret := builder.AllocateInstruction().AsInsertlane(v1, v2, index, lane).Insert(builder).Return()
3103			state.push(ret)
3104		case wasm.OpcodeVecV128i8x16Shuffle:
3105			state.pc++
3106			laneIndexes := c.wasmFunctionBody[state.pc : state.pc+16]
3107			state.pc += 15
3108			if state.unreachable {
3109				break
3110			}
3111			v2 := state.pop()
3112			v1 := state.pop()
3113			ret := builder.AllocateInstruction().AsShuffle(v1, v2, laneIndexes).Insert(builder).Return()
3114			state.push(ret)
3115
3116		case wasm.OpcodeVecI8x16Swizzle:
3117			if state.unreachable {
3118				break
3119			}
3120			v2 := state.pop()
3121			v1 := state.pop()
3122			ret := builder.AllocateInstruction().AsSwizzle(v1, v2, ssa.VecLaneI8x16).Insert(builder).Return()
3123			state.push(ret)
3124
3125		case wasm.OpcodeVecI8x16Splat,
3126			wasm.OpcodeVecI16x8Splat,
3127			wasm.OpcodeVecI32x4Splat,
3128			wasm.OpcodeVecI64x2Splat,
3129			wasm.OpcodeVecF32x4Splat,
3130			wasm.OpcodeVecF64x2Splat:
3131			if state.unreachable {
3132				break
3133			}
3134			var lane ssa.VecLane
3135			switch vecOp {
3136			case wasm.OpcodeVecI8x16Splat:
3137				lane = ssa.VecLaneI8x16
3138			case wasm.OpcodeVecI16x8Splat:
3139				lane = ssa.VecLaneI16x8
3140			case wasm.OpcodeVecI32x4Splat:
3141				lane = ssa.VecLaneI32x4
3142			case wasm.OpcodeVecI64x2Splat:
3143				lane = ssa.VecLaneI64x2
3144			case wasm.OpcodeVecF32x4Splat:
3145				lane = ssa.VecLaneF32x4
3146			case wasm.OpcodeVecF64x2Splat:
3147				lane = ssa.VecLaneF64x2
3148			}
3149			v1 := state.pop()
3150			ret := builder.AllocateInstruction().AsSplat(v1, lane).Insert(builder).Return()
3151			state.push(ret)
3152
3153		default:
3154			panic("TODO: unsupported vector instruction: " + wasm.VectorInstructionName(vecOp))
3155		}
3156	case wasm.OpcodeAtomicPrefix:
3157		state.pc++
3158		atomicOp := c.wasmFunctionBody[state.pc]
3159		switch atomicOp {
3160		case wasm.OpcodeAtomicMemoryWait32, wasm.OpcodeAtomicMemoryWait64:
3161			_, offset := c.readMemArg()
3162			if state.unreachable {
3163				break
3164			}
3165
3166			c.storeCallerModuleContext()
3167
3168			var opSize uint64
3169			var trampoline wazevoapi.Offset
3170			var sig *ssa.Signature
3171			switch atomicOp {
3172			case wasm.OpcodeAtomicMemoryWait32:
3173				opSize = 4
3174				trampoline = wazevoapi.ExecutionContextOffsetMemoryWait32TrampolineAddress
3175				sig = &c.memoryWait32Sig
3176			case wasm.OpcodeAtomicMemoryWait64:
3177				opSize = 8
3178				trampoline = wazevoapi.ExecutionContextOffsetMemoryWait64TrampolineAddress
3179				sig = &c.memoryWait64Sig
3180			}
3181
3182			timeout := state.pop()
3183			exp := state.pop()
3184			baseAddr := state.pop()
3185			addr := c.atomicMemOpSetup(baseAddr, uint64(offset), opSize)
3186
3187			memoryWaitPtr := builder.AllocateInstruction().
3188				AsLoad(c.execCtxPtrValue,
3189					trampoline.U32(),
3190					ssa.TypeI64,
3191				).Insert(builder).Return()
3192
3193			args := c.allocateVarLengthValues(3, c.execCtxPtrValue, timeout, exp, addr)
3194			memoryWaitRet := builder.AllocateInstruction().
3195				AsCallIndirect(memoryWaitPtr, sig, args).
3196				Insert(builder).Return()
3197			state.push(memoryWaitRet)
3198		case wasm.OpcodeAtomicMemoryNotify:
3199			_, offset := c.readMemArg()
3200			if state.unreachable {
3201				break
3202			}
3203
3204			c.storeCallerModuleContext()
3205			count := state.pop()
3206			baseAddr := state.pop()
3207			addr := c.atomicMemOpSetup(baseAddr, uint64(offset), 4)
3208
3209			memoryNotifyPtr := builder.AllocateInstruction().
3210				AsLoad(c.execCtxPtrValue,
3211					wazevoapi.ExecutionContextOffsetMemoryNotifyTrampolineAddress.U32(),
3212					ssa.TypeI64,
3213				).Insert(builder).Return()
3214			args := c.allocateVarLengthValues(2, c.execCtxPtrValue, count, addr)
3215			memoryNotifyRet := builder.AllocateInstruction().
3216				AsCallIndirect(memoryNotifyPtr, &c.memoryNotifySig, args).
3217				Insert(builder).Return()
3218			state.push(memoryNotifyRet)
3219		case wasm.OpcodeAtomicI32Load, wasm.OpcodeAtomicI64Load, wasm.OpcodeAtomicI32Load8U, wasm.OpcodeAtomicI32Load16U, wasm.OpcodeAtomicI64Load8U, wasm.OpcodeAtomicI64Load16U, wasm.OpcodeAtomicI64Load32U:
3220			_, offset := c.readMemArg()
3221			if state.unreachable {
3222				break
3223			}
3224
3225			baseAddr := state.pop()
3226
3227			var size uint64
3228			switch atomicOp {
3229			case wasm.OpcodeAtomicI64Load:
3230				size = 8
3231			case wasm.OpcodeAtomicI32Load, wasm.OpcodeAtomicI64Load32U:
3232				size = 4
3233			case wasm.OpcodeAtomicI32Load16U, wasm.OpcodeAtomicI64Load16U:
3234				size = 2
3235			case wasm.OpcodeAtomicI32Load8U, wasm.OpcodeAtomicI64Load8U:
3236				size = 1
3237			}
3238
3239			var typ ssa.Type
3240			switch atomicOp {
3241			case wasm.OpcodeAtomicI64Load, wasm.OpcodeAtomicI64Load32U, wasm.OpcodeAtomicI64Load16U, wasm.OpcodeAtomicI64Load8U:
3242				typ = ssa.TypeI64
3243			case wasm.OpcodeAtomicI32Load, wasm.OpcodeAtomicI32Load16U, wasm.OpcodeAtomicI32Load8U:
3244				typ = ssa.TypeI32
3245			}
3246
3247			addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size)
3248			res := builder.AllocateInstruction().AsAtomicLoad(addr, size, typ).Insert(builder).Return()
3249			state.push(res)
3250		case wasm.OpcodeAtomicI32Store, wasm.OpcodeAtomicI64Store, wasm.OpcodeAtomicI32Store8, wasm.OpcodeAtomicI32Store16, wasm.OpcodeAtomicI64Store8, wasm.OpcodeAtomicI64Store16, wasm.OpcodeAtomicI64Store32:
3251			_, offset := c.readMemArg()
3252			if state.unreachable {
3253				break
3254			}
3255
3256			val := state.pop()
3257			baseAddr := state.pop()
3258
3259			var size uint64
3260			switch atomicOp {
3261			case wasm.OpcodeAtomicI64Store:
3262				size = 8
3263			case wasm.OpcodeAtomicI32Store, wasm.OpcodeAtomicI64Store32:
3264				size = 4
3265			case wasm.OpcodeAtomicI32Store16, wasm.OpcodeAtomicI64Store16:
3266				size = 2
3267			case wasm.OpcodeAtomicI32Store8, wasm.OpcodeAtomicI64Store8:
3268				size = 1
3269			}
3270
3271			addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size)
3272			builder.AllocateInstruction().AsAtomicStore(addr, val, size).Insert(builder)
3273		case wasm.OpcodeAtomicI32RmwAdd, wasm.OpcodeAtomicI64RmwAdd, wasm.OpcodeAtomicI32Rmw8AddU, wasm.OpcodeAtomicI32Rmw16AddU, wasm.OpcodeAtomicI64Rmw8AddU, wasm.OpcodeAtomicI64Rmw16AddU, wasm.OpcodeAtomicI64Rmw32AddU,
3274			wasm.OpcodeAtomicI32RmwSub, wasm.OpcodeAtomicI64RmwSub, wasm.OpcodeAtomicI32Rmw8SubU, wasm.OpcodeAtomicI32Rmw16SubU, wasm.OpcodeAtomicI64Rmw8SubU, wasm.OpcodeAtomicI64Rmw16SubU, wasm.OpcodeAtomicI64Rmw32SubU,
3275			wasm.OpcodeAtomicI32RmwAnd, wasm.OpcodeAtomicI64RmwAnd, wasm.OpcodeAtomicI32Rmw8AndU, wasm.OpcodeAtomicI32Rmw16AndU, wasm.OpcodeAtomicI64Rmw8AndU, wasm.OpcodeAtomicI64Rmw16AndU, wasm.OpcodeAtomicI64Rmw32AndU,
3276			wasm.OpcodeAtomicI32RmwOr, wasm.OpcodeAtomicI64RmwOr, wasm.OpcodeAtomicI32Rmw8OrU, wasm.OpcodeAtomicI32Rmw16OrU, wasm.OpcodeAtomicI64Rmw8OrU, wasm.OpcodeAtomicI64Rmw16OrU, wasm.OpcodeAtomicI64Rmw32OrU,
3277			wasm.OpcodeAtomicI32RmwXor, wasm.OpcodeAtomicI64RmwXor, wasm.OpcodeAtomicI32Rmw8XorU, wasm.OpcodeAtomicI32Rmw16XorU, wasm.OpcodeAtomicI64Rmw8XorU, wasm.OpcodeAtomicI64Rmw16XorU, wasm.OpcodeAtomicI64Rmw32XorU,
3278			wasm.OpcodeAtomicI32RmwXchg, wasm.OpcodeAtomicI64RmwXchg, wasm.OpcodeAtomicI32Rmw8XchgU, wasm.OpcodeAtomicI32Rmw16XchgU, wasm.OpcodeAtomicI64Rmw8XchgU, wasm.OpcodeAtomicI64Rmw16XchgU, wasm.OpcodeAtomicI64Rmw32XchgU:
3279			_, offset := c.readMemArg()
3280			if state.unreachable {
3281				break
3282			}
3283
3284			val := state.pop()
3285			baseAddr := state.pop()
3286
3287			var rmwOp ssa.AtomicRmwOp
3288			var size uint64
3289			switch atomicOp {
3290			case wasm.OpcodeAtomicI32RmwAdd, wasm.OpcodeAtomicI64RmwAdd, wasm.OpcodeAtomicI32Rmw8AddU, wasm.OpcodeAtomicI32Rmw16AddU, wasm.OpcodeAtomicI64Rmw8AddU, wasm.OpcodeAtomicI64Rmw16AddU, wasm.OpcodeAtomicI64Rmw32AddU:
3291				rmwOp = ssa.AtomicRmwOpAdd
3292				switch atomicOp {
3293				case wasm.OpcodeAtomicI64RmwAdd:
3294					size = 8
3295				case wasm.OpcodeAtomicI32RmwAdd, wasm.OpcodeAtomicI64Rmw32AddU:
3296					size = 4
3297				case wasm.OpcodeAtomicI32Rmw16AddU, wasm.OpcodeAtomicI64Rmw16AddU:
3298					size = 2
3299				case wasm.OpcodeAtomicI32Rmw8AddU, wasm.OpcodeAtomicI64Rmw8AddU:
3300					size = 1
3301				}
3302			case wasm.OpcodeAtomicI32RmwSub, wasm.OpcodeAtomicI64RmwSub, wasm.OpcodeAtomicI32Rmw8SubU, wasm.OpcodeAtomicI32Rmw16SubU, wasm.OpcodeAtomicI64Rmw8SubU, wasm.OpcodeAtomicI64Rmw16SubU, wasm.OpcodeAtomicI64Rmw32SubU:
3303				rmwOp = ssa.AtomicRmwOpSub
3304				switch atomicOp {
3305				case wasm.OpcodeAtomicI64RmwSub:
3306					size = 8
3307				case wasm.OpcodeAtomicI32RmwSub, wasm.OpcodeAtomicI64Rmw32SubU:
3308					size = 4
3309				case wasm.OpcodeAtomicI32Rmw16SubU, wasm.OpcodeAtomicI64Rmw16SubU:
3310					size = 2
3311				case wasm.OpcodeAtomicI32Rmw8SubU, wasm.OpcodeAtomicI64Rmw8SubU:
3312					size = 1
3313				}
3314			case wasm.OpcodeAtomicI32RmwAnd, wasm.OpcodeAtomicI64RmwAnd, wasm.OpcodeAtomicI32Rmw8AndU, wasm.OpcodeAtomicI32Rmw16AndU, wasm.OpcodeAtomicI64Rmw8AndU, wasm.OpcodeAtomicI64Rmw16AndU, wasm.OpcodeAtomicI64Rmw32AndU:
3315				rmwOp = ssa.AtomicRmwOpAnd
3316				switch atomicOp {
3317				case wasm.OpcodeAtomicI64RmwAnd:
3318					size = 8
3319				case wasm.OpcodeAtomicI32RmwAnd, wasm.OpcodeAtomicI64Rmw32AndU:
3320					size = 4
3321				case wasm.OpcodeAtomicI32Rmw16AndU, wasm.OpcodeAtomicI64Rmw16AndU:
3322					size = 2
3323				case wasm.OpcodeAtomicI32Rmw8AndU, wasm.OpcodeAtomicI64Rmw8AndU:
3324					size = 1
3325				}
3326			case wasm.OpcodeAtomicI32RmwOr, wasm.OpcodeAtomicI64RmwOr, wasm.OpcodeAtomicI32Rmw8OrU, wasm.OpcodeAtomicI32Rmw16OrU, wasm.OpcodeAtomicI64Rmw8OrU, wasm.OpcodeAtomicI64Rmw16OrU, wasm.OpcodeAtomicI64Rmw32OrU:
3327				rmwOp = ssa.AtomicRmwOpOr
3328				switch atomicOp {
3329				case wasm.OpcodeAtomicI64RmwOr:
3330					size = 8
3331				case wasm.OpcodeAtomicI32RmwOr, wasm.OpcodeAtomicI64Rmw32OrU:
3332					size = 4
3333				case wasm.OpcodeAtomicI32Rmw16OrU, wasm.OpcodeAtomicI64Rmw16OrU:
3334					size = 2
3335				case wasm.OpcodeAtomicI32Rmw8OrU, wasm.OpcodeAtomicI64Rmw8OrU:
3336					size = 1
3337				}
3338			case wasm.OpcodeAtomicI32RmwXor, wasm.OpcodeAtomicI64RmwXor, wasm.OpcodeAtomicI32Rmw8XorU, wasm.OpcodeAtomicI32Rmw16XorU, wasm.OpcodeAtomicI64Rmw8XorU, wasm.OpcodeAtomicI64Rmw16XorU, wasm.OpcodeAtomicI64Rmw32XorU:
3339				rmwOp = ssa.AtomicRmwOpXor
3340				switch atomicOp {
3341				case wasm.OpcodeAtomicI64RmwXor:
3342					size = 8
3343				case wasm.OpcodeAtomicI32RmwXor, wasm.OpcodeAtomicI64Rmw32XorU:
3344					size = 4
3345				case wasm.OpcodeAtomicI32Rmw16XorU, wasm.OpcodeAtomicI64Rmw16XorU:
3346					size = 2
3347				case wasm.OpcodeAtomicI32Rmw8XorU, wasm.OpcodeAtomicI64Rmw8XorU:
3348					size = 1
3349				}
3350			case wasm.OpcodeAtomicI32RmwXchg, wasm.OpcodeAtomicI64RmwXchg, wasm.OpcodeAtomicI32Rmw8XchgU, wasm.OpcodeAtomicI32Rmw16XchgU, wasm.OpcodeAtomicI64Rmw8XchgU, wasm.OpcodeAtomicI64Rmw16XchgU, wasm.OpcodeAtomicI64Rmw32XchgU:
3351				rmwOp = ssa.AtomicRmwOpXchg
3352				switch atomicOp {
3353				case wasm.OpcodeAtomicI64RmwXchg:
3354					size = 8
3355				case wasm.OpcodeAtomicI32RmwXchg, wasm.OpcodeAtomicI64Rmw32XchgU:
3356					size = 4
3357				case wasm.OpcodeAtomicI32Rmw16XchgU, wasm.OpcodeAtomicI64Rmw16XchgU:
3358					size = 2
3359				case wasm.OpcodeAtomicI32Rmw8XchgU, wasm.OpcodeAtomicI64Rmw8XchgU:
3360					size = 1
3361				}
3362			}
3363
3364			addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size)
3365			res := builder.AllocateInstruction().AsAtomicRmw(rmwOp, addr, val, size).Insert(builder).Return()
3366			state.push(res)
3367		case wasm.OpcodeAtomicI32RmwCmpxchg, wasm.OpcodeAtomicI64RmwCmpxchg, wasm.OpcodeAtomicI32Rmw8CmpxchgU, wasm.OpcodeAtomicI32Rmw16CmpxchgU, wasm.OpcodeAtomicI64Rmw8CmpxchgU, wasm.OpcodeAtomicI64Rmw16CmpxchgU, wasm.OpcodeAtomicI64Rmw32CmpxchgU:
3368			_, offset := c.readMemArg()
3369			if state.unreachable {
3370				break
3371			}
3372
3373			repl := state.pop()
3374			exp := state.pop()
3375			baseAddr := state.pop()
3376
3377			var size uint64
3378			switch atomicOp {
3379			case wasm.OpcodeAtomicI64RmwCmpxchg:
3380				size = 8
3381			case wasm.OpcodeAtomicI32RmwCmpxchg, wasm.OpcodeAtomicI64Rmw32CmpxchgU:
3382				size = 4
3383			case wasm.OpcodeAtomicI32Rmw16CmpxchgU, wasm.OpcodeAtomicI64Rmw16CmpxchgU:
3384				size = 2
3385			case wasm.OpcodeAtomicI32Rmw8CmpxchgU, wasm.OpcodeAtomicI64Rmw8CmpxchgU:
3386				size = 1
3387			}
3388			addr := c.atomicMemOpSetup(baseAddr, uint64(offset), size)
3389			res := builder.AllocateInstruction().AsAtomicCas(addr, exp, repl, size).Insert(builder).Return()
3390			state.push(res)
3391		case wasm.OpcodeAtomicFence:
3392			order := c.readByte()
3393			if state.unreachable {
3394				break
3395			}
3396			if c.needMemory {
3397				builder.AllocateInstruction().AsFence(order).Insert(builder)
3398			}
3399		default:
3400			panic("TODO: unsupported atomic instruction: " + wasm.AtomicInstructionName(atomicOp))
3401		}
3402	case wasm.OpcodeRefFunc:
3403		funcIndex := c.readI32u()
3404		if state.unreachable {
3405			break
3406		}
3407
3408		c.storeCallerModuleContext()
3409
3410		funcIndexVal := builder.AllocateInstruction().AsIconst32(funcIndex).Insert(builder).Return()
3411
3412		refFuncPtr := builder.AllocateInstruction().
3413			AsLoad(c.execCtxPtrValue,
3414				wazevoapi.ExecutionContextOffsetRefFuncTrampolineAddress.U32(),
3415				ssa.TypeI64,
3416			).Insert(builder).Return()
3417
3418		args := c.allocateVarLengthValues(2, c.execCtxPtrValue, funcIndexVal)
3419		refFuncRet := builder.
3420			AllocateInstruction().
3421			AsCallIndirect(refFuncPtr, &c.refFuncSig, args).
3422			Insert(builder).Return()
3423		state.push(refFuncRet)
3424
3425	case wasm.OpcodeRefNull:
3426		c.loweringState.pc++ // skips the reference type as we treat both of them as i64(0).
3427		if state.unreachable {
3428			break
3429		}
3430		ret := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return()
3431		state.push(ret)
3432	case wasm.OpcodeRefIsNull:
3433		if state.unreachable {
3434			break
3435		}
3436		r := state.pop()
3437		zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder)
3438		icmp := builder.AllocateInstruction().
3439			AsIcmp(r, zero.Return(), ssa.IntegerCmpCondEqual).
3440			Insert(builder).
3441			Return()
3442		state.push(icmp)
3443	case wasm.OpcodeTableSet:
3444		tableIndex := c.readI32u()
3445		if state.unreachable {
3446			break
3447		}
3448		r := state.pop()
3449		targetOffsetInTable := state.pop()
3450
3451		elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable)
3452		builder.AllocateInstruction().AsStore(ssa.OpcodeStore, r, elementAddr, 0).Insert(builder)
3453
3454	case wasm.OpcodeTableGet:
3455		tableIndex := c.readI32u()
3456		if state.unreachable {
3457			break
3458		}
3459		targetOffsetInTable := state.pop()
3460		elementAddr := c.lowerAccessTableWithBoundsCheck(tableIndex, targetOffsetInTable)
3461		loaded := builder.AllocateInstruction().AsLoad(elementAddr, 0, ssa.TypeI64).Insert(builder).Return()
3462		state.push(loaded)
3463	default:
3464		panic("TODO: unsupported in wazevo yet: " + wasm.InstructionName(op))
3465	}
3466
3467	if wazevoapi.FrontEndLoggingEnabled {
3468		fmt.Println("--------- Translated " + wasm.InstructionName(op) + " --------")
3469		fmt.Println("state: " + c.loweringState.String())
3470		fmt.Println(c.formatBuilder())
3471		fmt.Println("--------------------------")
3472	}
3473	c.loweringState.pc++
3474}
3475
3476func (c *Compiler) lowerExtMul(v1, v2 ssa.Value, from, to ssa.VecLane, signed, low bool) ssa.Value {
3477	// TODO: The sequence `Widen; Widen; VIMul` can be substituted for a single instruction on some ISAs.
3478	builder := c.ssaBuilder
3479
3480	v1lo := builder.AllocateInstruction().AsWiden(v1, from, signed, low).Insert(builder).Return()
3481	v2lo := builder.AllocateInstruction().AsWiden(v2, from, signed, low).Insert(builder).Return()
3482
3483	return builder.AllocateInstruction().AsVImul(v1lo, v2lo, to).Insert(builder).Return()
3484}
3485
3486const (
3487	tableInstanceBaseAddressOffset = 0
3488	tableInstanceLenOffset         = tableInstanceBaseAddressOffset + 8
3489)
3490
3491func (c *Compiler) lowerAccessTableWithBoundsCheck(tableIndex uint32, elementOffsetInTable ssa.Value) (elementAddress ssa.Value) {
3492	builder := c.ssaBuilder
3493
3494	// Load the table.
3495	loadTableInstancePtr := builder.AllocateInstruction()
3496	loadTableInstancePtr.AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64)
3497	builder.InsertInstruction(loadTableInstancePtr)
3498	tableInstancePtr := loadTableInstancePtr.Return()
3499
3500	// Load the table's length.
3501	loadTableLen := builder.AllocateInstruction()
3502	loadTableLen.AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32)
3503	builder.InsertInstruction(loadTableLen)
3504	tableLen := loadTableLen.Return()
3505
3506	// Compare the length and the target, and trap if out of bounds.
3507	checkOOB := builder.AllocateInstruction()
3508	checkOOB.AsIcmp(elementOffsetInTable, tableLen, ssa.IntegerCmpCondUnsignedGreaterThanOrEqual)
3509	builder.InsertInstruction(checkOOB)
3510	exitIfOOB := builder.AllocateInstruction()
3511	exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds)
3512	builder.InsertInstruction(exitIfOOB)
3513
3514	// Get the base address of wasm.TableInstance.References.
3515	loadTableBaseAddress := builder.AllocateInstruction()
3516	loadTableBaseAddress.AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64)
3517	builder.InsertInstruction(loadTableBaseAddress)
3518	tableBase := loadTableBaseAddress.Return()
3519
3520	// Calculate the address of the target function. First we need to multiply targetOffsetInTable by 8 (pointer size).
3521	multiplyBy8 := builder.AllocateInstruction()
3522	three := builder.AllocateInstruction()
3523	three.AsIconst64(3)
3524	builder.InsertInstruction(three)
3525	multiplyBy8.AsIshl(elementOffsetInTable, three.Return())
3526	builder.InsertInstruction(multiplyBy8)
3527	targetOffsetInTableMultipliedBy8 := multiplyBy8.Return()
3528
3529	// Then add the multiplied value to the base which results in the address of the target function (*wazevo.functionInstance)
3530	calcElementAddressInTable := builder.AllocateInstruction()
3531	calcElementAddressInTable.AsIadd(tableBase, targetOffsetInTableMultipliedBy8)
3532	builder.InsertInstruction(calcElementAddressInTable)
3533	return calcElementAddressInTable.Return()
3534}
3535
3536func (c *Compiler) lowerCallIndirect(typeIndex, tableIndex uint32) {
3537	builder := c.ssaBuilder
3538	state := c.state()
3539
3540	elementOffsetInTable := state.pop()
3541	functionInstancePtrAddress := c.lowerAccessTableWithBoundsCheck(tableIndex, elementOffsetInTable)
3542	loadFunctionInstancePtr := builder.AllocateInstruction()
3543	loadFunctionInstancePtr.AsLoad(functionInstancePtrAddress, 0, ssa.TypeI64)
3544	builder.InsertInstruction(loadFunctionInstancePtr)
3545	functionInstancePtr := loadFunctionInstancePtr.Return()
3546
3547	// Check if it is not the null pointer.
3548	zero := builder.AllocateInstruction()
3549	zero.AsIconst64(0)
3550	builder.InsertInstruction(zero)
3551	checkNull := builder.AllocateInstruction()
3552	checkNull.AsIcmp(functionInstancePtr, zero.Return(), ssa.IntegerCmpCondEqual)
3553	builder.InsertInstruction(checkNull)
3554	exitIfNull := builder.AllocateInstruction()
3555	exitIfNull.AsExitIfTrueWithCode(c.execCtxPtrValue, checkNull.Return(), wazevoapi.ExitCodeIndirectCallNullPointer)
3556	builder.InsertInstruction(exitIfNull)
3557
3558	// We need to do the type check. First, load the target function instance's typeID.
3559	loadTypeID := builder.AllocateInstruction()
3560	loadTypeID.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceTypeIDOffset, ssa.TypeI32)
3561	builder.InsertInstruction(loadTypeID)
3562	actualTypeID := loadTypeID.Return()
3563
3564	// Next, we load the expected TypeID:
3565	loadTypeIDsBegin := builder.AllocateInstruction()
3566	loadTypeIDsBegin.AsLoad(c.moduleCtxPtrValue, c.offset.TypeIDs1stElement.U32(), ssa.TypeI64)
3567	builder.InsertInstruction(loadTypeIDsBegin)
3568	typeIDsBegin := loadTypeIDsBegin.Return()
3569
3570	loadExpectedTypeID := builder.AllocateInstruction()
3571	loadExpectedTypeID.AsLoad(typeIDsBegin, uint32(typeIndex)*4 /* size of wasm.FunctionTypeID */, ssa.TypeI32)
3572	builder.InsertInstruction(loadExpectedTypeID)
3573	expectedTypeID := loadExpectedTypeID.Return()
3574
3575	// Check if the type ID matches.
3576	checkTypeID := builder.AllocateInstruction()
3577	checkTypeID.AsIcmp(actualTypeID, expectedTypeID, ssa.IntegerCmpCondNotEqual)
3578	builder.InsertInstruction(checkTypeID)
3579	exitIfNotMatch := builder.AllocateInstruction()
3580	exitIfNotMatch.AsExitIfTrueWithCode(c.execCtxPtrValue, checkTypeID.Return(), wazevoapi.ExitCodeIndirectCallTypeMismatch)
3581	builder.InsertInstruction(exitIfNotMatch)
3582
3583	// Now ready to call the function. Load the executable and moduleContextOpaquePtr from the function instance.
3584	loadExecutablePtr := builder.AllocateInstruction()
3585	loadExecutablePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceExecutableOffset, ssa.TypeI64)
3586	builder.InsertInstruction(loadExecutablePtr)
3587	executablePtr := loadExecutablePtr.Return()
3588	loadModuleContextOpaquePtr := builder.AllocateInstruction()
3589	loadModuleContextOpaquePtr.AsLoad(functionInstancePtr, wazevoapi.FunctionInstanceModuleContextOpaquePtrOffset, ssa.TypeI64)
3590	builder.InsertInstruction(loadModuleContextOpaquePtr)
3591	moduleContextOpaquePtr := loadModuleContextOpaquePtr.Return()
3592
3593	typ := &c.m.TypeSection[typeIndex]
3594	tail := len(state.values) - len(typ.Params)
3595	vs := state.values[tail:]
3596	state.values = state.values[:tail]
3597	args := c.allocateVarLengthValues(2+len(vs), c.execCtxPtrValue, moduleContextOpaquePtr)
3598	args = args.Append(builder.VarLengthPool(), vs...)
3599
3600	// Before transfer the control to the callee, we have to store the current module's moduleContextPtr
3601	// into execContext.callerModuleContextPtr in case when the callee is a Go function.
3602	c.storeCallerModuleContext()
3603
3604	call := builder.AllocateInstruction()
3605	call.AsCallIndirect(executablePtr, c.signatures[typ], args)
3606	builder.InsertInstruction(call)
3607
3608	first, rest := call.Returns()
3609	if first.Valid() {
3610		state.push(first)
3611	}
3612	for _, v := range rest {
3613		state.push(v)
3614	}
3615
3616	c.reloadAfterCall()
3617}
3618
3619// memOpSetup inserts the bounds check and calculates the address of the memory operation (loads/stores).
3620func (c *Compiler) memOpSetup(baseAddr ssa.Value, constOffset, operationSizeInBytes uint64) (address ssa.Value) {
3621	address = ssa.ValueInvalid
3622	builder := c.ssaBuilder
3623
3624	baseAddrID := baseAddr.ID()
3625	ceil := constOffset + operationSizeInBytes
3626	if known := c.getKnownSafeBound(baseAddrID); known.valid() {
3627		// We reuse the calculated absolute address even if the bound is not known to be safe.
3628		address = known.absoluteAddr
3629		if ceil <= known.bound {
3630			if !address.Valid() {
3631				// This means that, the bound is known to be safe, but the memory base might have changed.
3632				// So, we re-calculate the address.
3633				memBase := c.getMemoryBaseValue(false)
3634				extBaseAddr := builder.AllocateInstruction().
3635					AsUExtend(baseAddr, 32, 64).
3636					Insert(builder).
3637					Return()
3638				address = builder.AllocateInstruction().
3639					AsIadd(memBase, extBaseAddr).Insert(builder).Return()
3640				known.absoluteAddr = address // Update the absolute address for the subsequent memory access.
3641			}
3642			return
3643		}
3644	}
3645
3646	ceilConst := builder.AllocateInstruction()
3647	ceilConst.AsIconst64(ceil)
3648	builder.InsertInstruction(ceilConst)
3649
3650	// We calculate the offset in 64-bit space.
3651	extBaseAddr := builder.AllocateInstruction().
3652		AsUExtend(baseAddr, 32, 64).
3653		Insert(builder).
3654		Return()
3655
3656	// Note: memLen is already zero extended to 64-bit space at the load time.
3657	memLen := c.getMemoryLenValue(false)
3658
3659	// baseAddrPlusCeil = baseAddr + ceil
3660	baseAddrPlusCeil := builder.AllocateInstruction()
3661	baseAddrPlusCeil.AsIadd(extBaseAddr, ceilConst.Return())
3662	builder.InsertInstruction(baseAddrPlusCeil)
3663
3664	// Check for out of bounds memory access: `memLen >= baseAddrPlusCeil`.
3665	cmp := builder.AllocateInstruction()
3666	cmp.AsIcmp(memLen, baseAddrPlusCeil.Return(), ssa.IntegerCmpCondUnsignedLessThan)
3667	builder.InsertInstruction(cmp)
3668	exitIfNZ := builder.AllocateInstruction()
3669	exitIfNZ.AsExitIfTrueWithCode(c.execCtxPtrValue, cmp.Return(), wazevoapi.ExitCodeMemoryOutOfBounds)
3670	builder.InsertInstruction(exitIfNZ)
3671
3672	// Load the value from memBase + extBaseAddr.
3673	if address == ssa.ValueInvalid { // Reuse the value if the memBase is already calculated at this point.
3674		memBase := c.getMemoryBaseValue(false)
3675		address = builder.AllocateInstruction().
3676			AsIadd(memBase, extBaseAddr).Insert(builder).Return()
3677	}
3678
3679	// Record the bound ceil for this baseAddr is known to be safe for the subsequent memory access in the same block.
3680	c.recordKnownSafeBound(baseAddrID, ceil, address)
3681	return
3682}
3683
3684// atomicMemOpSetup inserts the bounds check and calculates the address of the memory operation (loads/stores), including
3685// the constant offset and performs an alignment check on the final address.
3686func (c *Compiler) atomicMemOpSetup(baseAddr ssa.Value, constOffset, operationSizeInBytes uint64) (address ssa.Value) {
3687	builder := c.ssaBuilder
3688
3689	addrWithoutOffset := c.memOpSetup(baseAddr, constOffset, operationSizeInBytes)
3690	var addr ssa.Value
3691	if constOffset == 0 {
3692		addr = addrWithoutOffset
3693	} else {
3694		offset := builder.AllocateInstruction().AsIconst64(constOffset).Insert(builder).Return()
3695		addr = builder.AllocateInstruction().AsIadd(addrWithoutOffset, offset).Insert(builder).Return()
3696	}
3697
3698	c.memAlignmentCheck(addr, operationSizeInBytes)
3699
3700	return addr
3701}
3702
3703func (c *Compiler) memAlignmentCheck(addr ssa.Value, operationSizeInBytes uint64) {
3704	if operationSizeInBytes == 1 {
3705		return // No alignment restrictions when accessing a byte
3706	}
3707	var checkBits uint64
3708	switch operationSizeInBytes {
3709	case 2:
3710		checkBits = 0b1
3711	case 4:
3712		checkBits = 0b11
3713	case 8:
3714		checkBits = 0b111
3715	}
3716
3717	builder := c.ssaBuilder
3718
3719	mask := builder.AllocateInstruction().AsIconst64(checkBits).Insert(builder).Return()
3720	masked := builder.AllocateInstruction().AsBand(addr, mask).Insert(builder).Return()
3721	zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return()
3722	cmp := builder.AllocateInstruction().AsIcmp(masked, zero, ssa.IntegerCmpCondNotEqual).Insert(builder).Return()
3723	builder.AllocateInstruction().AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, wazevoapi.ExitCodeUnalignedAtomic).Insert(builder)
3724}
3725
3726func (c *Compiler) callMemmove(dst, src, size ssa.Value) {
3727	args := c.allocateVarLengthValues(3, dst, src, size)
3728	if size.Type() != ssa.TypeI64 {
3729		panic("TODO: memmove size must be i64")
3730	}
3731
3732	builder := c.ssaBuilder
3733	memmovePtr := builder.AllocateInstruction().
3734		AsLoad(c.execCtxPtrValue,
3735			wazevoapi.ExecutionContextOffsetMemmoveAddress.U32(),
3736			ssa.TypeI64,
3737		).Insert(builder).Return()
3738	builder.AllocateInstruction().AsCallGoRuntimeMemmove(memmovePtr, &c.memmoveSig, args).Insert(builder)
3739}
3740
3741func (c *Compiler) reloadAfterCall() {
3742	// Note that when these are not used in the following instructions, they will be optimized out.
3743	// So in any ways, we define them!
3744
3745	// After calling any function, memory buffer might have changed. So we need to re-define the variable.
3746	// However, if the memory is shared, we don't need to reload the memory base and length as the base will never change.
3747	if c.needMemory && !c.memoryShared {
3748		c.reloadMemoryBaseLen()
3749	}
3750
3751	// Also, any mutable Global can change.
3752	for _, index := range c.mutableGlobalVariablesIndexes {
3753		_ = c.getWasmGlobalValue(index, true)
3754	}
3755}
3756
3757func (c *Compiler) reloadMemoryBaseLen() {
3758	_ = c.getMemoryBaseValue(true)
3759	_ = c.getMemoryLenValue(true)
3760
3761	// This function being called means that the memory base might have changed.
3762	// Therefore, we need to clear the absolute addresses recorded in the known safe bounds
3763	// because we cache the absolute address of the memory access per each base offset.
3764	c.resetAbsoluteAddressInSafeBounds()
3765}
3766
3767func (c *Compiler) setWasmGlobalValue(index wasm.Index, v ssa.Value) {
3768	variable := c.globalVariables[index]
3769	opaqueOffset := c.offset.GlobalInstanceOffset(index)
3770
3771	builder := c.ssaBuilder
3772	if index < c.m.ImportGlobalCount {
3773		loadGlobalInstPtr := builder.AllocateInstruction()
3774		loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), ssa.TypeI64)
3775		builder.InsertInstruction(loadGlobalInstPtr)
3776
3777		store := builder.AllocateInstruction()
3778		store.AsStore(ssa.OpcodeStore, v, loadGlobalInstPtr.Return(), uint32(0))
3779		builder.InsertInstruction(store)
3780
3781	} else {
3782		store := builder.AllocateInstruction()
3783		store.AsStore(ssa.OpcodeStore, v, c.moduleCtxPtrValue, uint32(opaqueOffset))
3784		builder.InsertInstruction(store)
3785	}
3786
3787	// The value has changed to `v`, so we record it.
3788	builder.DefineVariableInCurrentBB(variable, v)
3789}
3790
3791func (c *Compiler) getWasmGlobalValue(index wasm.Index, forceLoad bool) ssa.Value {
3792	variable := c.globalVariables[index]
3793	typ := c.globalVariablesTypes[index]
3794	opaqueOffset := c.offset.GlobalInstanceOffset(index)
3795
3796	builder := c.ssaBuilder
3797	if !forceLoad {
3798		if v := builder.FindValueInLinearPath(variable); v.Valid() {
3799			return v
3800		}
3801	}
3802
3803	var load *ssa.Instruction
3804	if index < c.m.ImportGlobalCount {
3805		loadGlobalInstPtr := builder.AllocateInstruction()
3806		loadGlobalInstPtr.AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), ssa.TypeI64)
3807		builder.InsertInstruction(loadGlobalInstPtr)
3808		load = builder.AllocateInstruction().
3809			AsLoad(loadGlobalInstPtr.Return(), uint32(0), typ)
3810	} else {
3811		load = builder.AllocateInstruction().
3812			AsLoad(c.moduleCtxPtrValue, uint32(opaqueOffset), typ)
3813	}
3814
3815	v := load.Insert(builder).Return()
3816	builder.DefineVariableInCurrentBB(variable, v)
3817	return v
3818}
3819
3820const (
3821	memoryInstanceBufOffset     = 0
3822	memoryInstanceBufSizeOffset = memoryInstanceBufOffset + 8
3823)
3824
3825func (c *Compiler) getMemoryBaseValue(forceReload bool) ssa.Value {
3826	builder := c.ssaBuilder
3827	variable := c.memoryBaseVariable
3828	if !forceReload {
3829		if v := builder.FindValueInLinearPath(variable); v.Valid() {
3830			return v
3831		}
3832	}
3833
3834	var ret ssa.Value
3835	if c.offset.LocalMemoryBegin < 0 {
3836		loadMemInstPtr := builder.AllocateInstruction()
3837		loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64)
3838		builder.InsertInstruction(loadMemInstPtr)
3839		memInstPtr := loadMemInstPtr.Return()
3840
3841		loadBufPtr := builder.AllocateInstruction()
3842		loadBufPtr.AsLoad(memInstPtr, memoryInstanceBufOffset, ssa.TypeI64)
3843		builder.InsertInstruction(loadBufPtr)
3844		ret = loadBufPtr.Return()
3845	} else {
3846		load := builder.AllocateInstruction()
3847		load.AsLoad(c.moduleCtxPtrValue, c.offset.LocalMemoryBase().U32(), ssa.TypeI64)
3848		builder.InsertInstruction(load)
3849		ret = load.Return()
3850	}
3851
3852	builder.DefineVariableInCurrentBB(variable, ret)
3853	return ret
3854}
3855
3856func (c *Compiler) getMemoryLenValue(forceReload bool) ssa.Value {
3857	variable := c.memoryLenVariable
3858	builder := c.ssaBuilder
3859	if !forceReload && !c.memoryShared {
3860		if v := builder.FindValueInLinearPath(variable); v.Valid() {
3861			return v
3862		}
3863	}
3864
3865	var ret ssa.Value
3866	if c.offset.LocalMemoryBegin < 0 {
3867		loadMemInstPtr := builder.AllocateInstruction()
3868		loadMemInstPtr.AsLoad(c.moduleCtxPtrValue, c.offset.ImportedMemoryBegin.U32(), ssa.TypeI64)
3869		builder.InsertInstruction(loadMemInstPtr)
3870		memInstPtr := loadMemInstPtr.Return()
3871
3872		loadBufSizePtr := builder.AllocateInstruction()
3873		if c.memoryShared {
3874			sizeOffset := builder.AllocateInstruction().AsIconst64(memoryInstanceBufSizeOffset).Insert(builder).Return()
3875			addr := builder.AllocateInstruction().AsIadd(memInstPtr, sizeOffset).Insert(builder).Return()
3876			loadBufSizePtr.AsAtomicLoad(addr, 8, ssa.TypeI64)
3877		} else {
3878			loadBufSizePtr.AsLoad(memInstPtr, memoryInstanceBufSizeOffset, ssa.TypeI64)
3879		}
3880		builder.InsertInstruction(loadBufSizePtr)
3881
3882		ret = loadBufSizePtr.Return()
3883	} else {
3884		load := builder.AllocateInstruction()
3885		if c.memoryShared {
3886			lenOffset := builder.AllocateInstruction().AsIconst64(c.offset.LocalMemoryLen().U64()).Insert(builder).Return()
3887			addr := builder.AllocateInstruction().AsIadd(c.moduleCtxPtrValue, lenOffset).Insert(builder).Return()
3888			load.AsAtomicLoad(addr, 8, ssa.TypeI64)
3889		} else {
3890			load.AsExtLoad(ssa.OpcodeUload32, c.moduleCtxPtrValue, c.offset.LocalMemoryLen().U32(), true)
3891		}
3892		builder.InsertInstruction(load)
3893		ret = load.Return()
3894	}
3895
3896	builder.DefineVariableInCurrentBB(variable, ret)
3897	return ret
3898}
3899
3900func (c *Compiler) insertIcmp(cond ssa.IntegerCmpCond) {
3901	state, builder := c.state(), c.ssaBuilder
3902	y, x := state.pop(), state.pop()
3903	cmp := builder.AllocateInstruction()
3904	cmp.AsIcmp(x, y, cond)
3905	builder.InsertInstruction(cmp)
3906	value := cmp.Return()
3907	state.push(value)
3908}
3909
3910func (c *Compiler) insertFcmp(cond ssa.FloatCmpCond) {
3911	state, builder := c.state(), c.ssaBuilder
3912	y, x := state.pop(), state.pop()
3913	cmp := builder.AllocateInstruction()
3914	cmp.AsFcmp(x, y, cond)
3915	builder.InsertInstruction(cmp)
3916	value := cmp.Return()
3917	state.push(value)
3918}
3919
3920// storeCallerModuleContext stores the current module's moduleContextPtr into execContext.callerModuleContextPtr.
3921func (c *Compiler) storeCallerModuleContext() {
3922	builder := c.ssaBuilder
3923	execCtx := c.execCtxPtrValue
3924	store := builder.AllocateInstruction()
3925	store.AsStore(ssa.OpcodeStore,
3926		c.moduleCtxPtrValue, execCtx, wazevoapi.ExecutionContextOffsetCallerModuleContextPtr.U32())
3927	builder.InsertInstruction(store)
3928}
3929
3930func (c *Compiler) readByte() byte {
3931	v := c.wasmFunctionBody[c.loweringState.pc+1]
3932	c.loweringState.pc++
3933	return v
3934}
3935
3936func (c *Compiler) readI32u() uint32 {
3937	v, n, err := leb128.LoadUint32(c.wasmFunctionBody[c.loweringState.pc+1:])
3938	if err != nil {
3939		panic(err) // shouldn't be reached since compilation comes after validation.
3940	}
3941	c.loweringState.pc += int(n)
3942	return v
3943}
3944
3945func (c *Compiler) readI32s() int32 {
3946	v, n, err := leb128.LoadInt32(c.wasmFunctionBody[c.loweringState.pc+1:])
3947	if err != nil {
3948		panic(err) // shouldn't be reached since compilation comes after validation.
3949	}
3950	c.loweringState.pc += int(n)
3951	return v
3952}
3953
3954func (c *Compiler) readI64s() int64 {
3955	v, n, err := leb128.LoadInt64(c.wasmFunctionBody[c.loweringState.pc+1:])
3956	if err != nil {
3957		panic(err) // shouldn't be reached since compilation comes after validation.
3958	}
3959	c.loweringState.pc += int(n)
3960	return v
3961}
3962
3963func (c *Compiler) readF32() float32 {
3964	v := math.Float32frombits(binary.LittleEndian.Uint32(c.wasmFunctionBody[c.loweringState.pc+1:]))
3965	c.loweringState.pc += 4
3966	return v
3967}
3968
3969func (c *Compiler) readF64() float64 {
3970	v := math.Float64frombits(binary.LittleEndian.Uint64(c.wasmFunctionBody[c.loweringState.pc+1:]))
3971	c.loweringState.pc += 8
3972	return v
3973}
3974
3975// readBlockType reads the block type from the current position of the bytecode reader.
3976func (c *Compiler) readBlockType() *wasm.FunctionType {
3977	state := c.state()
3978
3979	c.br.Reset(c.wasmFunctionBody[state.pc+1:])
3980	bt, num, err := wasm.DecodeBlockType(c.m.TypeSection, c.br, api.CoreFeaturesV2)
3981	if err != nil {
3982		panic(err) // shouldn't be reached since compilation comes after validation.
3983	}
3984	state.pc += int(num)
3985
3986	return bt
3987}
3988
3989func (c *Compiler) readMemArg() (align, offset uint32) {
3990	state := c.state()
3991
3992	align, num, err := leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:])
3993	if err != nil {
3994		panic(fmt.Errorf("read memory align: %v", err))
3995	}
3996
3997	state.pc += int(num)
3998	offset, num, err = leb128.LoadUint32(c.wasmFunctionBody[state.pc+1:])
3999	if err != nil {
4000		panic(fmt.Errorf("read memory offset: %v", err))
4001	}
4002
4003	state.pc += int(num)
4004	return align, offset
4005}
4006
4007// insertJumpToBlock inserts a jump instruction to the given block in the current block.
4008func (c *Compiler) insertJumpToBlock(args ssa.Values, targetBlk ssa.BasicBlock) {
4009	if targetBlk.ReturnBlock() {
4010		if c.needListener {
4011			c.callListenerAfter()
4012		}
4013	}
4014
4015	builder := c.ssaBuilder
4016	jmp := builder.AllocateInstruction()
4017	jmp.AsJump(args, targetBlk)
4018	builder.InsertInstruction(jmp)
4019}
4020
4021func (c *Compiler) insertIntegerExtend(signed bool, from, to byte) {
4022	state := c.state()
4023	builder := c.ssaBuilder
4024	v := state.pop()
4025	extend := builder.AllocateInstruction()
4026	if signed {
4027		extend.AsSExtend(v, from, to)
4028	} else {
4029		extend.AsUExtend(v, from, to)
4030	}
4031	builder.InsertInstruction(extend)
4032	value := extend.Return()
4033	state.push(value)
4034}
4035
4036func (c *Compiler) switchTo(originalStackLen int, targetBlk ssa.BasicBlock) {
4037	if targetBlk.Preds() == 0 {
4038		c.loweringState.unreachable = true
4039	}
4040
4041	// Now we should adjust the stack and start translating the continuation block.
4042	c.loweringState.values = c.loweringState.values[:originalStackLen]
4043
4044	c.ssaBuilder.SetCurrentBlock(targetBlk)
4045
4046	// At this point, blocks params consist only of the Wasm-level parameters,
4047	// (since it's added only when we are trying to resolve variable *inside* this block).
4048	for i := 0; i < targetBlk.Params(); i++ {
4049		value := targetBlk.Param(i)
4050		c.loweringState.push(value)
4051	}
4052}
4053
4054// results returns the number of results of the current function.
4055func (c *Compiler) results() int {
4056	return len(c.wasmFunctionTyp.Results)
4057}
4058
4059func (c *Compiler) lowerBrTable(labels []uint32, index ssa.Value) {
4060	state := c.state()
4061	builder := c.ssaBuilder
4062
4063	f := state.ctrlPeekAt(int(labels[0]))
4064	var numArgs int
4065	if f.isLoop() {
4066		numArgs = len(f.blockType.Params)
4067	} else {
4068		numArgs = len(f.blockType.Results)
4069	}
4070
4071	varPool := builder.VarLengthPool()
4072	trampolineBlockIDs := varPool.Allocate(len(labels))
4073
4074	// We need trampoline blocks since depending on the target block structure, we might end up inserting moves before jumps,
4075	// which cannot be done with br_table. Instead, we can do such per-block moves in the trampoline blocks.
4076	// At the linking phase (very end of the backend), we can remove the unnecessary jumps, and therefore no runtime overhead.
4077	currentBlk := builder.CurrentBlock()
4078	for _, l := range labels {
4079		// Args are always on the top of the stack. Note that we should not share the args slice
4080		// among the jump instructions since the args are modified during passes (e.g. redundant phi elimination).
4081		args := c.nPeekDup(numArgs)
4082		targetBlk, _ := state.brTargetArgNumFor(l)
4083		trampoline := builder.AllocateBasicBlock()
4084		builder.SetCurrentBlock(trampoline)
4085		c.insertJumpToBlock(args, targetBlk)
4086		trampolineBlockIDs = trampolineBlockIDs.Append(builder.VarLengthPool(), ssa.Value(trampoline.ID()))
4087	}
4088	builder.SetCurrentBlock(currentBlk)
4089
4090	// If the target block has no arguments, we can just jump to the target block.
4091	brTable := builder.AllocateInstruction()
4092	brTable.AsBrTable(index, trampolineBlockIDs)
4093	builder.InsertInstruction(brTable)
4094
4095	for _, trampolineID := range trampolineBlockIDs.View() {
4096		builder.Seal(builder.BasicBlock(ssa.BasicBlockID(trampolineID)))
4097	}
4098}
4099
4100func (l *loweringState) brTargetArgNumFor(labelIndex uint32) (targetBlk ssa.BasicBlock, argNum int) {
4101	targetFrame := l.ctrlPeekAt(int(labelIndex))
4102	if targetFrame.isLoop() {
4103		targetBlk, argNum = targetFrame.blk, len(targetFrame.blockType.Params)
4104	} else {
4105		targetBlk, argNum = targetFrame.followingBlock, len(targetFrame.blockType.Results)
4106	}
4107	return
4108}
4109
4110func (c *Compiler) callListenerBefore() {
4111	c.storeCallerModuleContext()
4112
4113	builder := c.ssaBuilder
4114	beforeListeners1stElement := builder.AllocateInstruction().
4115		AsLoad(c.moduleCtxPtrValue,
4116			c.offset.BeforeListenerTrampolines1stElement.U32(),
4117			ssa.TypeI64,
4118		).Insert(builder).Return()
4119
4120	beforeListenerPtr := builder.AllocateInstruction().
4121		AsLoad(beforeListeners1stElement, uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64).Insert(builder).Return()
4122
4123	entry := builder.EntryBlock()
4124	ps := entry.Params()
4125
4126	args := c.allocateVarLengthValues(ps, c.execCtxPtrValue,
4127		builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return())
4128	for i := 2; i < ps; i++ {
4129		args = args.Append(builder.VarLengthPool(), entry.Param(i))
4130	}
4131
4132	beforeSig := c.listenerSignatures[c.wasmFunctionTyp][0]
4133	builder.AllocateInstruction().
4134		AsCallIndirect(beforeListenerPtr, beforeSig, args).
4135		Insert(builder)
4136}
4137
4138func (c *Compiler) callListenerAfter() {
4139	c.storeCallerModuleContext()
4140
4141	builder := c.ssaBuilder
4142	afterListeners1stElement := builder.AllocateInstruction().
4143		AsLoad(c.moduleCtxPtrValue,
4144			c.offset.AfterListenerTrampolines1stElement.U32(),
4145			ssa.TypeI64,
4146		).Insert(builder).Return()
4147
4148	afterListenerPtr := builder.AllocateInstruction().
4149		AsLoad(afterListeners1stElement,
4150			uint32(c.wasmFunctionTypeIndex)*8 /* 8 bytes per index */, ssa.TypeI64).
4151		Insert(builder).
4152		Return()
4153
4154	afterSig := c.listenerSignatures[c.wasmFunctionTyp][1]
4155	args := c.allocateVarLengthValues(
4156		c.results()+2,
4157		c.execCtxPtrValue,
4158		builder.AllocateInstruction().AsIconst32(c.wasmLocalFunctionIndex).Insert(builder).Return(),
4159	)
4160
4161	l := c.state()
4162	tail := len(l.values)
4163	args = args.Append(c.ssaBuilder.VarLengthPool(), l.values[tail-c.results():tail]...)
4164	builder.AllocateInstruction().
4165		AsCallIndirect(afterListenerPtr, afterSig, args).
4166		Insert(builder)
4167}
4168
4169const (
4170	elementOrDataInstanceLenOffset = 8
4171	elementOrDataInstanceSize      = 24
4172)
4173
4174// dropInstance inserts instructions to drop the element/data instance specified by the given index.
4175func (c *Compiler) dropDataOrElementInstance(index uint32, firstItemOffset wazevoapi.Offset) {
4176	builder := c.ssaBuilder
4177	instPtr := c.dataOrElementInstanceAddr(index, firstItemOffset)
4178
4179	zero := builder.AllocateInstruction().AsIconst64(0).Insert(builder).Return()
4180
4181	// Clear the instance.
4182	builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, 0).Insert(builder)
4183	builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset).Insert(builder)
4184	builder.AllocateInstruction().AsStore(ssa.OpcodeStore, zero, instPtr, elementOrDataInstanceLenOffset+8).Insert(builder)
4185}
4186
4187func (c *Compiler) dataOrElementInstanceAddr(index uint32, firstItemOffset wazevoapi.Offset) ssa.Value {
4188	builder := c.ssaBuilder
4189
4190	_1stItemPtr := builder.
4191		AllocateInstruction().
4192		AsLoad(c.moduleCtxPtrValue, firstItemOffset.U32(), ssa.TypeI64).
4193		Insert(builder).Return()
4194
4195	// Each data/element instance is a slice, so we need to multiply index by 16 to get the offset of the target instance.
4196	index = index * elementOrDataInstanceSize
4197	indexExt := builder.AllocateInstruction().AsIconst64(uint64(index)).Insert(builder).Return()
4198	// Then, add the offset to the address of the instance.
4199	instPtr := builder.AllocateInstruction().AsIadd(_1stItemPtr, indexExt).Insert(builder).Return()
4200	return instPtr
4201}
4202
4203func (c *Compiler) boundsCheckInDataOrElementInstance(instPtr, offsetInInstance, copySize ssa.Value, exitCode wazevoapi.ExitCode) {
4204	builder := c.ssaBuilder
4205	dataInstLen := builder.AllocateInstruction().
4206		AsLoad(instPtr, elementOrDataInstanceLenOffset, ssa.TypeI64).
4207		Insert(builder).Return()
4208	ceil := builder.AllocateInstruction().AsIadd(offsetInInstance, copySize).Insert(builder).Return()
4209	cmp := builder.AllocateInstruction().
4210		AsIcmp(dataInstLen, ceil, ssa.IntegerCmpCondUnsignedLessThan).
4211		Insert(builder).
4212		Return()
4213	builder.AllocateInstruction().
4214		AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, exitCode).
4215		Insert(builder)
4216}
4217
4218func (c *Compiler) boundsCheckInTable(tableIndex uint32, offset, size ssa.Value) (tableInstancePtr ssa.Value) {
4219	builder := c.ssaBuilder
4220	dstCeil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return()
4221
4222	// Load the table.
4223	tableInstancePtr = builder.AllocateInstruction().
4224		AsLoad(c.moduleCtxPtrValue, c.offset.TableOffset(int(tableIndex)).U32(), ssa.TypeI64).
4225		Insert(builder).Return()
4226
4227	// Load the table's length.
4228	tableLen := builder.AllocateInstruction().
4229		AsLoad(tableInstancePtr, tableInstanceLenOffset, ssa.TypeI32).Insert(builder).Return()
4230	tableLenExt := builder.AllocateInstruction().AsUExtend(tableLen, 32, 64).Insert(builder).Return()
4231
4232	// Compare the length and the target, and trap if out of bounds.
4233	checkOOB := builder.AllocateInstruction()
4234	checkOOB.AsIcmp(tableLenExt, dstCeil, ssa.IntegerCmpCondUnsignedLessThan)
4235	builder.InsertInstruction(checkOOB)
4236	exitIfOOB := builder.AllocateInstruction()
4237	exitIfOOB.AsExitIfTrueWithCode(c.execCtxPtrValue, checkOOB.Return(), wazevoapi.ExitCodeTableOutOfBounds)
4238	builder.InsertInstruction(exitIfOOB)
4239	return
4240}
4241
4242func (c *Compiler) loadTableBaseAddr(tableInstancePtr ssa.Value) ssa.Value {
4243	builder := c.ssaBuilder
4244	loadTableBaseAddress := builder.
4245		AllocateInstruction().
4246		AsLoad(tableInstancePtr, tableInstanceBaseAddressOffset, ssa.TypeI64).
4247		Insert(builder)
4248	return loadTableBaseAddress.Return()
4249}
4250
4251func (c *Compiler) boundsCheckInMemory(memLen, offset, size ssa.Value) {
4252	builder := c.ssaBuilder
4253	ceil := builder.AllocateInstruction().AsIadd(offset, size).Insert(builder).Return()
4254	cmp := builder.AllocateInstruction().
4255		AsIcmp(memLen, ceil, ssa.IntegerCmpCondUnsignedLessThan).
4256		Insert(builder).
4257		Return()
4258	builder.AllocateInstruction().
4259		AsExitIfTrueWithCode(c.execCtxPtrValue, cmp, wazevoapi.ExitCodeMemoryOutOfBounds).
4260		Insert(builder)
4261}