From fea39c191b3f2c6dd4bd0b3cac8fef8cdd6bcc8a Mon Sep 17 00:00:00 2001 From: Malcolm Still Date: Sun, 29 Jan 2023 13:03:13 +0000 Subject: [PATCH] WIP --- src/instance.zig | 6 +- src/instance/vm.zig | 1014 ++++++++++++++++++++--------------------- src/module.zig | 30 +- src/module/parser.zig | 39 +- src/rr.zig | 922 ++++++++++++++++++++++++------------- 5 files changed, 1134 insertions(+), 877 deletions(-) diff --git a/src/instance.zig b/src/instance.zig index fb7ce418..bd8add86 100644 --- a/src/instance.zig +++ b/src/instance.zig @@ -11,6 +11,7 @@ const Global = @import("store/global.zig").Global; const Elem = @import("store/elem.zig").Elem; const Data = @import("store/data.zig").Data; const VirtualMachine = @import("instance/vm.zig").VirtualMachine; +const Instruction = VirtualMachine.Instruction; const VirtualMachineOptions = struct { frame_stack_size: comptime_int = 1024, @@ -322,7 +323,7 @@ pub const Instance = struct { try vm.pushLabel(VirtualMachine.Label{ .return_arity = function.results.len, .op_stack_len = locals_start, - .branch_target = 0, + .branch_target = @ptrCast([*]Instruction, &self.module.instructions.items[0]), }); // 8. Execute our function @@ -371,7 +372,7 @@ pub const Instance = struct { try vm.pushLabel(VirtualMachine.Label{ .return_arity = 0, .op_stack_len = locals_start, - .branch_target = 0, + .branch_target = @ptrCast([*]Instruction, &self.module.instructions.items[0]), }); try vm.invoke(f.start); @@ -402,6 +403,7 @@ pub const Instance = struct { try vm.pushLabel(VirtualMachine.Label{ .return_arity = 1, .op_stack_len = locals_start, + .branch_target = @ptrCast([*]Instruction, &self.module.instructions.items[0]), }); try vm.invoke(start); diff --git a/src/instance/vm.zig b/src/instance/vm.zig index 0bf6a969..f989337c 100644 --- a/src/instance/vm.zig +++ b/src/instance/vm.zig @@ -6,6 +6,8 @@ const Module = @import("../module.zig").Module; const ValType = @import("../module.zig").ValType; const Instance = @import("../instance.zig").Instance; const Rr = @import("../rr.zig").Rr; +const rr = @import("../rr.zig"); +const immediate = rr.immediate; // VirtualMachine: // @@ -34,7 +36,7 @@ pub const VirtualMachine = struct { pub const Frame = struct { locals: []u64 = undefined, // TODO: we're in trouble if we move our stacks in memory - return_arity: usize = 0, + return_arity: usize, op_stack_len: usize, label_stack_len: usize, inst: *Instance, @@ -44,8 +46,8 @@ pub const VirtualMachine = struct { // // - code: the code we should interpret after `end` pub const Label = struct { - return_arity: usize = 0, - branch_target: usize = 0, + branch_target: [*]Instruction, + return_arity: usize, op_stack_len: usize, // u32? }; @@ -59,14 +61,14 @@ pub const VirtualMachine = struct { } pub fn invoke(self: *VirtualMachine, ip: usize) !void { - const instr = self.inst.module.instructions.items[ip]; + const instr = &self.inst.module.instructions.items[ip]; - try @call(.{}, instr, .{ self, ip, self.inst.module.parsed_code.items, @ptrCast([]Instruction, self.inst.module.instructions.items) }); + try @call(.auto, instr.*, .{ self, @ptrCast([*]Instruction, instr) }); } // To avoid a recursive definition, define similar function pointer type we will cast to / from - pub const Instruction = *const fn (*VirtualMachine, usize, []Rr, []*void) WasmError!void; - pub const InstructionFunction = *const fn (*VirtualMachine, usize, []Rr, []Instruction) WasmError!void; + pub const Instruction = *const fn (usize, usize) WasmError!void; + pub const InstructionFunction = *const fn (*VirtualMachine, *Instruction) WasmError!void; pub const lookup = [256]InstructionFunction{ @"unreachable", nop, block, loop, @"if", @"else", if_no_else, impl_ni, impl_ni, impl_ni, impl_ni, end, br, br_if, br_table, @"return", @@ -87,28 +89,26 @@ pub const VirtualMachine = struct { impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, impl_ni, misc, impl_ni, impl_ni, impl_ni, }; - inline fn dispatch(self: *VirtualMachine, next_ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const next_fn = instructions[next_ip]; - - return try @call(.{ .modifier = .always_tail }, @ptrCast(InstructionFunction, next_fn), .{ self, next_ip, code, instructions }); + inline fn dispatch(self: *VirtualMachine, next_ip: [*]Instruction) WasmError!void { + return try @call(.always_tail, @ptrCast(InstructionFunction, next_ip.*), .{ self, next_ip }); } pub const REF_NULL: u64 = 0xFFFF_FFFF_FFFF_FFFF; - pub fn impl_ni(_: *VirtualMachine, _: usize, _: []Rr, _: []Instruction) WasmError!void { + pub fn impl_ni(_: *VirtualMachine, _: Instruction) WasmError!void { return error.NotImplemented; } - pub fn @"unreachable"(_: *VirtualMachine, _: usize, _: []Rr, _: []Instruction) WasmError!void { + pub fn @"unreachable"(_: *VirtualMachine, _: Instruction) WasmError!void { return error.TrapUnreachable; } - pub fn nop(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - return dispatch(self, ip + 1, code, instructions); + pub fn nop(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + return dispatch(self, ip + 1); } - pub fn block(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].block; + pub fn block(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.block); try self.pushLabel(Label{ .return_arity = meta.return_arity, @@ -116,11 +116,11 @@ pub const VirtualMachine = struct { .branch_target = meta.branch_target, }); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn loop(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].loop; + pub fn loop(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.loop); try self.pushLabel(Label{ // note that we use block_params rather than block_returns for return arity: @@ -129,11 +129,11 @@ pub const VirtualMachine = struct { .branch_target = meta.branch_target, }); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"if"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"if"; + pub fn @"if"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"if"); const condition = self.popOperand(u32); try self.pushLabel(Label{ @@ -142,21 +142,21 @@ pub const VirtualMachine = struct { .branch_target = meta.branch_target, }); - return dispatch(self, if (condition == 0) meta.else_ip else ip + 1, code, instructions); + return dispatch(self, if (condition == 0) meta.else_ip else ip + 1); } - pub fn @"else"(self: *VirtualMachine, _: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"else"(self: *VirtualMachine, _: Instruction) WasmError!void { const label = self.popLabel(); - return dispatch(self, label.branch_target, code, instructions); + return dispatch(self, label.branch_target); } - pub fn if_no_else(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].if_no_else; + pub fn if_no_else(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.if_no_else); const condition = self.popOperand(u32); if (condition == 0) { - return dispatch(self, meta.branch_target, code, instructions); + return dispatch(self, meta.branch_target); } else { // We are inside the if branch try self.pushLabel(Label{ @@ -165,42 +165,42 @@ pub const VirtualMachine = struct { .branch_target = meta.branch_target, }); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } } - pub fn end(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn end(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { _ = self.popLabel(); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn br(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const next_ip = self.branch(code[ip].br); + pub fn br(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const next_ip = self.branch(immediate(ip, rr.br)); - return dispatch(self, next_ip, code, instructions); + return dispatch(self, next_ip); } - pub fn br_if(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn br_if(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const condition = self.popOperand(u32); - const next_ip = if (condition == 0) ip + 1 else self.branch(code[ip].br_if); + const next_ip = if (condition == 0) ip + 1 else self.branch(immediate(ip, rr.br_if)); - return dispatch(self, next_ip, code, instructions); + return dispatch(self, next_ip); } - pub fn br_table(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].br_table; + pub fn br_table(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.br_table); const i = self.popOperand(u32); const ls = self.inst.module.br_table_indices.items[meta.ls.offset .. meta.ls.offset + meta.ls.count]; const next_ip = if (i >= ls.len) self.branch(meta.ln) else self.branch(ls[i]); - return dispatch(self, next_ip, code, instructions); + return dispatch(self, next_ip); } - pub fn @"return"(self: *VirtualMachine, _: usize, _: []Rr, _: []Instruction) WasmError!void { + pub fn @"return"(self: *VirtualMachine, _: Instruction) WasmError!void { const frame = self.peekFrame(); const n = frame.return_arity; @@ -223,11 +223,11 @@ pub const VirtualMachine = struct { const previous_frame = self.peekFrame(); self.inst = previous_frame.inst; - return dispatch(self, label.branch_target, previous_frame.inst.module.parsed_code.items, @ptrCast([]Instruction, previous_frame.inst.module.instructions.items)); + return dispatch(self, label.branch_target); } - pub fn call(self: *VirtualMachine, ip: usize, code: []Rr, _: []Instruction) WasmError!void { - const funcidx = code[ip].call; + pub fn call(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const funcidx = immediate(ip, rr.call); const function = try self.inst.getFunc(funcidx); var next_ip = ip; @@ -265,11 +265,11 @@ pub const VirtualMachine = struct { }, } - return dispatch(self, next_ip, self.inst.module.parsed_code.items, @ptrCast([]Instruction, self.inst.module.instructions.items)); + return dispatch(self, next_ip); } - pub fn call_indirect(self: *VirtualMachine, ip: usize, code: []Rr, _: []Instruction) WasmError!void { - const call_indirect_instruction = code[ip].call_indirect; + pub fn call_indirect(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const call_indirect_instruction = immediate(ip, rr.call_indirect); var module = self.inst.module; const typeidx = call_indirect_instruction.typeidx; @@ -321,11 +321,11 @@ pub const VirtualMachine = struct { }, } - return dispatch(self, next_ip, self.inst.module.parsed_code.items, @ptrCast([]Instruction, self.inst.module.instructions.items)); + return dispatch(self, next_ip); } - pub fn fast_call(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const f = code[ip].fast_call; + pub fn fast_call(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const f = immediate(ip, rr.fast_call); // Check we have enough stack space try self.checkStackSpace(f.required_stack_space + f.locals); @@ -348,15 +348,15 @@ pub const VirtualMachine = struct { .branch_target = ip + 1, }); - return dispatch(self, f.start, code, instructions); + return dispatch(self, f.start); } - pub fn drop(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn drop(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { _ = self.popAnyOperand(); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn select(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn select(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const condition = self.popOperand(u32); const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); @@ -367,60 +367,60 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, c2); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"local.get"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const localidx = code[ip].@"local.get"; + pub fn @"local.get"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const localidx = immediate(ip, rr.@"local.get"); const frame = self.peekFrame(); self.pushOperandNoCheck(u64, frame.locals[localidx]); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"local.set"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const localidx = code[ip].@"local.set"; + pub fn @"local.set"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const localidx = immediate(ip, rr.@"local.set"); const frame = self.peekFrame(); frame.locals[localidx] = self.popOperand(u64); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"local.tee"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const localidx = code[ip].@"local.tee"; + pub fn @"local.tee"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const localidx = immediate(ip, rr.@"local.tee"); const frame = self.peekFrame(); frame.locals[localidx] = self.peekOperand(); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"global.get"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const globalidx = code[ip].@"global.get"; + pub fn @"global.get"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const globalidx = immediate(ip, rr.@"global.get"); const global = try self.inst.getGlobal(globalidx); self.pushOperandNoCheck(u64, global.value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"global.set"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const globalidx = code[ip].@"global.set"; + pub fn @"global.set"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const globalidx = immediate(ip, rr.@"global.set"); const value = self.popAnyOperand(); const global = try self.inst.getGlobal(globalidx); global.value = value; - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"table.get"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const tableidx = code[ip].@"table.get"; + pub fn @"table.get"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const tableidx = immediate(ip, rr.@"table.get"); const table = try self.inst.getTable(tableidx); const index = self.popOperand(u32); @@ -432,11 +432,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, REF_NULL); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"table.set"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const tableidx = code[ip].@"table.set"; + pub fn @"table.set"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const tableidx = immediate(ip, rr.@"table.set"); const table = try self.inst.getTable(tableidx); const ref = self.popOperand(u64); @@ -444,11 +444,11 @@ pub const VirtualMachine = struct { try table.set(index, ref); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.load"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.load"; + pub fn @"i32.load"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.load"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -456,11 +456,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u32, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.load"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.load"; + pub fn @"i64.load"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.load"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -468,11 +468,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.load"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"f32.load"; + pub fn @"f32.load"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"f32.load"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -480,11 +480,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f32, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.load"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"f64.load"; + pub fn @"f64.load"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"f64.load"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -492,11 +492,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.load8_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.load8_s"; + pub fn @"i32.load8_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.load8_s"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -504,11 +504,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.load8_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.load8_u"; + pub fn @"i32.load8_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.load8_u"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -516,11 +516,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u32, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.load16_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.load16_s"; + pub fn @"i32.load16_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.load16_s"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -528,11 +528,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.load16_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.load16_u"; + pub fn @"i32.load16_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.load16_u"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -540,11 +540,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u32, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.load8_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.load8_s"; + pub fn @"i64.load8_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.load8_s"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -552,11 +552,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.load8_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.load8_u"; + pub fn @"i64.load8_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.load8_u"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -564,11 +564,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.load16_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.load16_s"; + pub fn @"i64.load16_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.load16_s"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -576,11 +576,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.load16_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.load16_u"; + pub fn @"i64.load16_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.load16_u"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -588,11 +588,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.load32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.load32_s"; + pub fn @"i64.load32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.load32_s"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -600,11 +600,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.load32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.load32_u"; + pub fn @"i64.load32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.load32_u"); const memory = try self.inst.getMemory(0); const address = self.popOperand(u32); @@ -612,11 +612,11 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.store"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.store"; + pub fn @"i32.store"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.store"); const memory = try self.inst.getMemory(0); const value = self.popOperand(u32); @@ -624,11 +624,11 @@ pub const VirtualMachine = struct { try memory.write(u32, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.store"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.store"; + pub fn @"i64.store"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.store"); const memory = try self.inst.getMemory(0); const value = self.popOperand(u64); @@ -636,11 +636,11 @@ pub const VirtualMachine = struct { try memory.write(u64, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.store"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"f32.store"; + pub fn @"f32.store"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"f32.store"); const memory = try self.inst.getMemory(0); const value = self.popOperand(f32); @@ -648,11 +648,11 @@ pub const VirtualMachine = struct { try memory.write(f32, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.store"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"f64.store"; + pub fn @"f64.store"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"f64.store"); const memory = try self.inst.getMemory(0); const value = self.popOperand(f64); @@ -660,11 +660,11 @@ pub const VirtualMachine = struct { try memory.write(f64, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.store8"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.store8"; + pub fn @"i32.store8"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.store8"); const memory = try self.inst.getMemory(0); const value = @truncate(u8, self.popOperand(u32)); @@ -672,11 +672,11 @@ pub const VirtualMachine = struct { try memory.write(u8, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.store16"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i32.store16"; + pub fn @"i32.store16"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i32.store16"); const memory = try self.inst.getMemory(0); const value = @truncate(u16, self.popOperand(u32)); @@ -684,11 +684,11 @@ pub const VirtualMachine = struct { try memory.write(u16, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.store8"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.store8"; + pub fn @"i64.store8"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.store8"); const memory = try self.inst.getMemory(0); const value = @truncate(u8, self.popOperand(u64)); @@ -696,11 +696,11 @@ pub const VirtualMachine = struct { try memory.write(u8, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.store16"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.store16"; + pub fn @"i64.store16"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.store16"); const memory = try self.inst.getMemory(0); const value = @truncate(u16, self.popOperand(u64)); @@ -708,11 +708,11 @@ pub const VirtualMachine = struct { try memory.write(u16, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.store32"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].@"i64.store32"; + pub fn @"i64.store32"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.@"i64.store32"); const memory = try self.inst.getMemory(0); const value = @truncate(u32, self.popOperand(u64)); @@ -720,18 +720,18 @@ pub const VirtualMachine = struct { try memory.write(u32, meta.offset, address, value); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"memory.size"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"memory.size"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const memory = try self.inst.getMemory(0); self.pushOperandNoCheck(u32, @intCast(u32, memory.data.items.len)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"memory.grow"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"memory.grow"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const memory = try self.inst.getMemory(0); const num_pages = self.popOperand(u32); @@ -741,394 +741,394 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, @as(i32, -1)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.const"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const instr = code[ip]; + pub fn @"i32.const"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const instr = immediate(ip, rr.@"i32.const"); self.pushOperandNoCheck(i32, instr.@"i32.const"); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.const"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const instr = code[ip]; + pub fn @"i64.const"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const instr = immediate(ip, rr.@"i64.const"); self.pushOperandNoCheck(i64, instr.@"i64.const"); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.const"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const instr = code[ip]; + pub fn @"f32.const"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const instr = immediate(ip, rr.@"f32.const"); self.pushOperandNoCheck(f32, instr.@"f32.const"); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.const"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const instr = code[ip]; + pub fn @"f64.const"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const instr = immediate(ip, rr.@"f64.const"); self.pushOperandNoCheck(f64, instr.@"f64.const"); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.eqz"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.eqz"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @as(u32, if (c1 == 0) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.eq"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.eq"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @as(u32, if (c1 == c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.ne"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.ne"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @as(u32, if (c1 != c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.lt_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.lt_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i32); const c1 = self.popOperand(i32); self.pushOperandNoCheck(u32, @as(u32, if (c1 < c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.lt_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.lt_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @as(u32, if (c1 < c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.gt_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.gt_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i32); const c1 = self.popOperand(i32); self.pushOperandNoCheck(u32, @as(u32, if (c1 > c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.gt_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.gt_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @as(u32, if (c1 > c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.le_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.le_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i32); const c1 = self.popOperand(i32); self.pushOperandNoCheck(u32, @as(u32, if (c1 <= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.le_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.le_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @as(u32, if (c1 <= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.ge_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.ge_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i32); const c1 = self.popOperand(i32); self.pushOperandNoCheck(u32, @as(u32, if (c1 >= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.ge_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.ge_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @as(u32, if (c1 >= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.eqz"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.eqz"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @as(u64, if (c1 == 0) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.eq"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.eq"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @as(u64, if (c1 == c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.ne"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.ne"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @as(u64, if (c1 != c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.lt_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.lt_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i64); const c1 = self.popOperand(i64); self.pushOperandNoCheck(u64, @as(u64, if (c1 < c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.lt_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.lt_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @as(u64, if (c1 < c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.gt_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.gt_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i64); const c1 = self.popOperand(i64); self.pushOperandNoCheck(u64, @as(u64, if (c1 > c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.gt_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.gt_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @as(u64, if (c1 > c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.le_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.le_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i64); const c1 = self.popOperand(i64); self.pushOperandNoCheck(u64, @as(u64, if (c1 <= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.le_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.le_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @as(u64, if (c1 <= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.ge_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.ge_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i64); const c1 = self.popOperand(i64); self.pushOperandNoCheck(u64, @as(u64, if (c1 >= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.ge_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.ge_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @as(u64, if (c1 >= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.eq"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.eq"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(u64, @as(u64, if (c1 == c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.ne"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.ne"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(u64, @as(u64, if (c1 != c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.lt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.lt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(u64, @as(u64, if (c1 < c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.gt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.gt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(u64, @as(u64, if (c1 > c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.le"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.le"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(u64, @as(u64, if (c1 <= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.ge"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.ge"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(u64, @as(u64, if (c1 >= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.eq"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.eq"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(u64, @as(u64, if (c1 == c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.ne"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.ne"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(u64, @as(u64, if (c1 != c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.lt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.lt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(u64, @as(u64, if (c1 < c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.gt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.gt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(u64, @as(u64, if (c1 > c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.le"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.le"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(u64, @as(u64, if (c1 <= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.ge"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.ge"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(u64, @as(u64, if (c1 >= c2) 1 else 0)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.clz"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.clz"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @clz(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.ctz"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.ctz"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @ctz(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.popcnt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.popcnt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, @popCount(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.add"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.add"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, c1 +% c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.sub"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.sub"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, c1 -% c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.mul"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.mul"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, c1 *% c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.div_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.div_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i32); const c1 = self.popOperand(i32); @@ -1136,10 +1136,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, div); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.div_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.div_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); @@ -1147,10 +1147,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u32, div); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.rem_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.rem_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i32); const c1 = self.popOperand(i32); @@ -1159,10 +1159,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, rem); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.rem_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.rem_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); @@ -1170,46 +1170,46 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u32, rem); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.and"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.and"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, c1 & c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.or"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.or"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, c1 | c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.xor"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.xor"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, c1 ^ c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.shl"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.shl"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, math.shl(u32, c1, c2 % 32)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.shr_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.shr_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i32); const c1 = self.popOperand(i32); @@ -1217,85 +1217,85 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, math.shr(i32, c1, mod)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.shr_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.shr_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, math.shr(u32, c1, c2 % 32)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.rotl"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.rotl"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, math.rotl(u32, c1, c2 % 32)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.rotr"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.rotr"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u32); const c1 = self.popOperand(u32); self.pushOperandNoCheck(u32, math.rotr(u32, c1, c2 % 32)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.clz"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.clz"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @clz(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.ctz"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.ctz"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @ctz(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.popcnt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.popcnt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @popCount(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.add"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.add"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, c1 +% c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.sub"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.sub"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, c1 -% c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.mul"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.mul"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, c1 *% c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.div_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.div_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i64); const c1 = self.popOperand(i64); @@ -1303,10 +1303,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, div); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.div_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.div_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); @@ -1314,10 +1314,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, div); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.rem_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.rem_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i64); const c1 = self.popOperand(i64); @@ -1326,10 +1326,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, rem); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.rem_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.rem_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); @@ -1337,46 +1337,46 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, rem); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.and"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.and"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, c1 & c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.or"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.or"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, c1 | c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.xor"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.xor"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, c1 ^ c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.shl"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.shl"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, math.shl(u64, c1, c2 % 64)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.shr_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.shr_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(i64); const c1 = self.popOperand(i64); @@ -1384,77 +1384,77 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, math.shr(i64, c1, mod)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.shr_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.shr_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, math.shr(u64, c1, c2 % 64)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.rotl"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.rotl"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, math.rotl(u64, c1, c2 % 64)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.rotr"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.rotr"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(u64); const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, math.rotr(u64, c1, c2 % 64)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.abs"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.abs"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, math.fabs(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.neg"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.neg"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, -c1); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.ceil"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.ceil"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, @ceil(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.floor"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.floor"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, @floor(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.trunc"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.trunc"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, @trunc(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.nearest"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.nearest"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); const floor = @floor(c1); const ceil = @ceil(c1); @@ -1469,64 +1469,64 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f32, @round(c1)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.sqrt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.sqrt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, math.sqrt(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.add"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.add"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, c1 + c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.sub"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.sub"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, c1 - c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.mul"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.mul"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, c1 * c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.div"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.div"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); self.pushOperandNoCheck(f32, c1 / c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.min"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.min"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); if (math.isNan(c1)) { self.pushOperandNoCheck(f32, math.nan_f32); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (math.isNan(c2)) { self.pushOperandNoCheck(f32, math.nan_f32); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (c1 == 0.0 and c2 == 0.0) { @@ -1539,20 +1539,20 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f32, math.min(c1, c2)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.max"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.max"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); if (math.isNan(c1)) { self.pushOperandNoCheck(f32, math.nan_f32); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (math.isNan(c2)) { self.pushOperandNoCheck(f32, math.nan_f32); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (c1 == 0.0 and c2 == 0.0) { @@ -1565,10 +1565,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f32, math.max(c1, c2)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.copysign"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.copysign"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f32); const c1 = self.popOperand(f32); @@ -1578,50 +1578,50 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f32, math.fabs(c1)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.abs"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.abs"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, math.fabs(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.neg"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.neg"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, -c1); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.ceil"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.ceil"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, @ceil(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.floor"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.floor"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, @floor(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.trunc"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.trunc"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, @trunc(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.nearest"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.nearest"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); const floor = @floor(c1); const ceil = @ceil(c1); @@ -1636,60 +1636,60 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f64, @round(c1)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.sqrt"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.sqrt"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, math.sqrt(c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.add"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.add"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, c1 + c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.sub"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.sub"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, c1 - c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.mul"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.mul"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, c1 * c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.div"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.div"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); self.pushOperandNoCheck(f64, c1 / c2); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.min"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.min"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); if (math.isNan(c1) or math.isNan(c2)) { self.pushOperandNoCheck(f64, math.nan_f64); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (c1 == 0.0 and c2 == 0.0) { @@ -1702,16 +1702,16 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f64, math.min(c1, c2)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.max"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.max"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); if (math.isNan(c1) or math.isNan(c2)) { self.pushOperandNoCheck(f64, math.nan_f64); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (c1 == 0.0 and c2 == 0.0) { @@ -1724,10 +1724,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f64, math.max(c1, c2)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.copysign"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.copysign"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c2 = self.popOperand(f64); const c1 = self.popOperand(f64); @@ -1737,18 +1737,18 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(f64, math.fabs(c1)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.wrap_i64"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.wrap_i64"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(i32, @truncate(i32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.trunc_f32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_f32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); if (math.isNan(c1)) return error.InvalidConversion; @@ -1760,10 +1760,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, @floatToInt(i32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.trunc_f32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_f32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); if (math.isNan(c1)) return error.InvalidConversion; @@ -1775,10 +1775,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u32, @floatToInt(u32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.trunc_f64_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_f64_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); if (math.isNan(c1)) return error.InvalidConversion; @@ -1790,10 +1790,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, @floatToInt(i32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.trunc_f64_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_f64_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); if (math.isNan(c1)) return error.InvalidConversion; @@ -1805,26 +1805,26 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u32, @floatToInt(u32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.extend_i32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.extend_i32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(i64, @truncate(i32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.extend_i32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.extend_i32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u64); self.pushOperandNoCheck(u64, @truncate(u32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_f32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_f32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); if (math.isNan(c1)) return error.InvalidConversion; @@ -1836,10 +1836,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, @floatToInt(i64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_f32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_f32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); if (math.isNan(c1)) return error.InvalidConversion; @@ -1851,10 +1851,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, @floatToInt(u64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_f64_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_f64_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); if (math.isNan(c1)) return error.InvalidConversion; @@ -1866,10 +1866,10 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i64, @floatToInt(i64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_f64_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_f64_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); if (math.isNan(c1)) return error.InvalidConversion; @@ -1881,168 +1881,168 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, @floatToInt(u64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.convert_i32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.convert_i32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i32); self.pushOperandNoCheck(f32, @intToFloat(f32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.convert_i32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.convert_i32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u32); self.pushOperandNoCheck(f32, @intToFloat(f32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.convert_i64_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.convert_i64_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(f32, @intToFloat(f32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.convert_i64_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.convert_i64_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u64); self.pushOperandNoCheck(f32, @intToFloat(f32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.demote_f64"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.demote_f64"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(f32, @floatCast(f32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.convert_i32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.convert_i32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i32); self.pushOperandNoCheck(f64, @intToFloat(f64, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.convert_i32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.convert_i32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u32); self.pushOperandNoCheck(f64, @intToFloat(f64, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.convert_i64_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.convert_i64_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(f64, @intToFloat(f64, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.convert_i64_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.convert_i64_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(u64); self.pushOperandNoCheck(f64, @intToFloat(f64, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.promote_f32"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.promote_f32"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(f64, @floatCast(f64, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.reinterpret_f32"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.reinterpret_f32"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); self.pushOperandNoCheck(i32, @bitCast(i32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.reinterpret_f64"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.reinterpret_f64"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); self.pushOperandNoCheck(i64, @bitCast(i64, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f32.reinterpret_i32"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f32.reinterpret_i32"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i32); self.pushOperandNoCheck(f32, @bitCast(f32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"f64.reinterpret_i64"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"f64.reinterpret_i64"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(f64, @bitCast(f64, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.extend8_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.extend8_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i32); self.pushOperandNoCheck(i32, @truncate(i8, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.extend16_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.extend16_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i32); self.pushOperandNoCheck(i32, @truncate(i16, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.extend8_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.extend8_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(i64, @truncate(i8, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.extend16_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.extend16_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(i64, @truncate(i16, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.extend32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.extend32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(i64); self.pushOperandNoCheck(i64, @truncate(i32, c1)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"ref.null"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"ref.null"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { self.pushOperandNoCheck(u64, REF_NULL); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"ref.is_null"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"ref.is_null"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const value = self.popOperand(u64); if (value == REF_NULL) { @@ -2051,21 +2051,21 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(u64, 0); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"ref.func"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const funcidx = code[ip].@"ref.func"; + pub fn @"ref.func"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const funcidx = immediate(ip, rr.@"ref.func"); const ref = self.inst.funcaddrs.items[funcidx]; // Not sure about this at all, this could still coincidentally be zero? self.pushOperandNoCheck(u64, ref); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn misc(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - return miscDispatch(self, ip, code, instructions); + pub fn misc(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + return miscDispatch(self, ip); } const misc_lookup = [18]InstructionFunction{ @@ -2073,190 +2073,190 @@ pub const VirtualMachine = struct { @"table.size", @"table.fill", }; - inline fn miscDispatch(self: *VirtualMachine, next_ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const next_instr = code[next_ip].misc; + inline fn miscDispatch(self: *VirtualMachine, next_ip: [*]Instruction) WasmError!void { + // const next_instr = code[next_ip].misc; THIS IS WRONG - return try @call(.{ .modifier = .always_tail }, misc_lookup[@enumToInt(next_instr)], .{ self, next_ip, code, instructions }); + return try @call(.{ .modifier = .always_tail }, next_ip, .{ self, next_ip }); } - pub fn @"i32.trunc_sat_f32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_sat_f32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(i32, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f32, math.maxInt(i32))) { self.pushOperandNoCheck(i32, @bitCast(i32, @as(u32, 0x7fffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f32, math.minInt(i32))) { self.pushOperandNoCheck(i32, @bitCast(i32, @as(u32, 0x80000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(i32, @floatToInt(i32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.trunc_sat_f32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_sat_f32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(u32, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f32, math.maxInt(u32))) { self.pushOperandNoCheck(u32, @bitCast(u32, @as(u32, 0xffffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f32, math.minInt(u32))) { self.pushOperandNoCheck(u32, @bitCast(u32, @as(u32, 0x00000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(u32, @floatToInt(u32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.trunc_sat_f64_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_sat_f64_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(i32, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f64, math.maxInt(i32))) { self.pushOperandNoCheck(i32, @bitCast(i32, @as(u32, 0x7fffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f64, math.minInt(i32))) { self.pushOperandNoCheck(i32, @bitCast(i32, @as(u32, 0x80000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(i32, @floatToInt(i32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i32.trunc_sat_f64_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i32.trunc_sat_f64_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(u32, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f64, math.maxInt(u32))) { self.pushOperandNoCheck(u32, @bitCast(u32, @as(u32, 0xffffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f64, math.minInt(u32))) { self.pushOperandNoCheck(u32, @bitCast(u32, @as(u32, 0x00000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(u32, @floatToInt(u32, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_sat_f32_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_sat_f32_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(i64, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f32, math.maxInt(i64))) { self.pushOperandNoCheck(i64, @bitCast(i64, @as(u64, 0x7fffffffffffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f32, math.minInt(i64))) { self.pushOperandNoCheck(i64, @bitCast(i64, @as(u64, 0x8000000000000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(i64, @floatToInt(i64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_sat_f32_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_sat_f32_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f32); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(u64, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f32, math.maxInt(u64))) { self.pushOperandNoCheck(u64, @bitCast(u64, @as(u64, 0xffffffffffffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f32, math.minInt(u64))) { self.pushOperandNoCheck(u64, @bitCast(u64, @as(u64, 0x0000000000000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(u64, @floatToInt(u64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_sat_f64_s"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_sat_f64_s"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(i64, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f64, math.maxInt(i64))) { self.pushOperandNoCheck(i64, @bitCast(i64, @as(u64, 0x7fffffffffffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f64, math.minInt(i64))) { self.pushOperandNoCheck(i64, @bitCast(i64, @as(u64, 0x8000000000000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(i64, @floatToInt(i64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"i64.trunc_sat_f64_u"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"i64.trunc_sat_f64_u"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const c1 = self.popOperand(f64); const trunc = @trunc(c1); if (math.isNan(c1)) { self.pushOperandNoCheck(u64, 0); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc >= @intToFloat(f64, math.maxInt(u64))) { self.pushOperandNoCheck(u64, @bitCast(u64, @as(u64, 0xffffffffffffffff))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (trunc < @intToFloat(f64, math.minInt(u64))) { self.pushOperandNoCheck(u64, @bitCast(u64, @as(u64, 0x0000000000000000))); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } self.pushOperandNoCheck(u64, @floatToInt(u64, trunc)); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"memory.init"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].misc.@"memory.init"; + pub fn @"memory.init"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.misc.@"memory.init"); const n = self.popOperand(u32); const src = self.popOperand(u32); @@ -2269,7 +2269,7 @@ pub const VirtualMachine = struct { if (@as(u33, src) + @as(u33, n) > data.data.len) return error.OutOfBoundsMemoryAccess; if (@as(u33, dest) + @as(u33, n) > mem_size) return error.OutOfBoundsMemoryAccess; if (n == 0) { - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (data.dropped) return error.OutOfBoundsMemoryAccess; @@ -2279,18 +2279,18 @@ pub const VirtualMachine = struct { try memory.write(u8, 0, dest + i, data.data[src + i]); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"data.drop"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const dataidx = code[ip].misc.@"data.drop"; + pub fn @"data.drop"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const dataidx = immediate(ip, rr.misc.@"data.drop"); const data = try self.inst.getData(dataidx); data.dropped = true; - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"memory.copy"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"memory.copy"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const n = self.popOperand(u32); const src = self.popOperand(u32); const dest = self.popOperand(u32); @@ -2301,7 +2301,7 @@ pub const VirtualMachine = struct { if (@as(u33, src) + @as(u33, n) > mem_size) return error.OutOfBoundsMemoryAccess; if (@as(u33, dest) + @as(u33, n) > mem_size) return error.OutOfBoundsMemoryAccess; if (n == 0) { - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } if (dest <= src) { @@ -2320,10 +2320,10 @@ pub const VirtualMachine = struct { } } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"memory.fill"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { + pub fn @"memory.fill"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { const n = self.popOperand(u32); const value = self.popOperand(u32); const dest = self.popOperand(u32); @@ -2333,7 +2333,7 @@ pub const VirtualMachine = struct { if (@as(u33, dest) + @as(u33, n) > mem_size) return error.OutOfBoundsMemoryAccess; if (n == 0) { - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } var i: u32 = 0; @@ -2343,11 +2343,11 @@ pub const VirtualMachine = struct { try memory.write(u8, 0, dest + i, @truncate(u8, value)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"table.init"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].misc.@"table.init"; + pub fn @"table.init"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.misc.@"table.init"); const tableidx = meta.tableidx; const elemidx = meta.elemidx; @@ -2372,20 +2372,20 @@ pub const VirtualMachine = struct { try table.set(d + i, elem.elem[s + i]); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"elem.drop"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].misc.@"elem.drop"; + pub fn @"elem.drop"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.misc.@"elem.drop"); const elemidx = meta.elemidx; const elem = try self.inst.getElem(elemidx); elem.dropped = true; - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"table.copy"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].misc.@"table.copy"; + pub fn @"table.copy"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.misc.@"table.copy"); const dest_tableidx = meta.dest_tableidx; const src_tableidx = meta.src_tableidx; @@ -2415,11 +2415,11 @@ pub const VirtualMachine = struct { } } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"table.grow"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].misc.@"table.grow"; + pub fn @"table.grow"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.misc.@"table.grow"); const tableidx = meta.tableidx; const table = try self.inst.getTable(tableidx); @@ -2437,22 +2437,22 @@ pub const VirtualMachine = struct { self.pushOperandNoCheck(i32, @as(i32, -1)); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"table.size"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].misc.@"table.size"; + pub fn @"table.size"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.misc.@"table.size"); const tableidx = meta.tableidx; const table = try self.inst.getTable(tableidx); self.pushOperandNoCheck(u32, @intCast(u32, table.size())); - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } - pub fn @"table.fill"(self: *VirtualMachine, ip: usize, code: []Rr, instructions: []Instruction) WasmError!void { - const meta = code[ip].misc.@"table.fill"; + pub fn @"table.fill"(self: *VirtualMachine, ip: [*]Instruction) WasmError!void { + const meta = immediate(ip, rr.misc.@"table.fill"); const tableidx = meta.tableidx; const table = try self.inst.getTable(tableidx); @@ -2471,7 +2471,7 @@ pub const VirtualMachine = struct { try table.set(d + i, ref); } - return dispatch(self, ip + 1, code, instructions); + return dispatch(self, ip + 1); } // https://webassembly.github.io/spec/core/exec/instructions.html#xref-syntax-instructions-syntax-instr-control-mathsf-br-l diff --git a/src/module.zig b/src/module.zig index 3b9cbdae..3ceab3b6 100644 --- a/src/module.zig +++ b/src/module.zig @@ -5,7 +5,9 @@ const math = std.math; const unicode = std.unicode; const ArrayList = std.ArrayList; const VirtualMachine = @import("instance/vm.zig").VirtualMachine; -const Rr = @import("rr.zig").Rr; +const Instruction = VirtualMachine.Instruction; +const InstructionFunction = VirtualMachine.InstructionFunction; +// const Rr = @import("rr.zig").Rr; const RrOpcode = @import("rr.zig").RrOpcode; const Instance = @import("instance.zig").Instance; const Parser = @import("module/parser.zig").Parser; @@ -35,7 +37,6 @@ pub const Module = struct { function_index_start: ?usize = null, data_count: ?u32 = null, element_init_offsets: ArrayList(usize), - parsed_code: ArrayList(Rr), instructions: ArrayList(VirtualMachine.InstructionFunction), local_types: ArrayList(LocalType), br_table_indices: ArrayList(u32), @@ -58,7 +59,6 @@ pub const Module = struct { .codes = Section(Code).init(alloc), .datas = Section(DataSegment).init(alloc), .element_init_offsets = ArrayList(usize).init(alloc), - .parsed_code = ArrayList(Rr).init(alloc), .instructions = ArrayList(VirtualMachine.InstructionFunction).init(alloc), .local_types = ArrayList(LocalType).init(alloc), .br_table_indices = ArrayList(u32).init(alloc), @@ -80,7 +80,6 @@ pub const Module = struct { self.datas.deinit(); self.element_init_offsets.deinit(); - self.parsed_code.deinit(); self.instructions.deinit(); self.local_types.deinit(); self.br_table_indices.deinit(); @@ -100,8 +99,7 @@ pub const Module = struct { // Push an initial return instruction so we don't have to // track the end of a function to use its return on invoke // See https://github.com/malcolmstill/zware/pull/133 - try self.parsed_code.append(.@"return"); - try self.instructions.append(VirtualMachine.@"return"); + try self.instructions.append(@ptrCast(InstructionFunction, &VirtualMachine.@"return")); var i: usize = 0; while (true) : (i += 1) { @@ -479,10 +477,8 @@ pub const Module = struct { try self.references.append(funcidx); - const init_offset = self.parsed_code.items.len; - try self.parsed_code.append(Rr{ .@"ref.func" = funcidx }); + const init_offset = self.instructions.items.len; try self.instructions.append(VirtualMachine.@"ref.func"); - try self.parsed_code.append(Rr.@"return"); try self.instructions.append(VirtualMachine.@"return"); try self.element_init_offsets.append(init_offset); } @@ -512,10 +508,8 @@ pub const Module = struct { try self.references.append(funcidx); - const init_offset = self.parsed_code.items.len; - try self.parsed_code.append(Rr{ .@"ref.func" = funcidx }); + const init_offset = self.instructions.items.len; try self.instructions.append(VirtualMachine.@"ref.func"); - try self.parsed_code.append(Rr.@"return"); try self.instructions.append(VirtualMachine.@"return"); try self.element_init_offsets.append(init_offset); } @@ -547,10 +541,8 @@ pub const Module = struct { try self.references.append(funcidx); - const init_offset = self.parsed_code.items.len; - try self.parsed_code.append(Rr{ .@"ref.func" = funcidx }); + const init_offset = self.instructions.items.len; try self.instructions.append(VirtualMachine.@"ref.func"); - try self.parsed_code.append(Rr.@"return"); try self.instructions.append(VirtualMachine.@"return"); try self.element_init_offsets.append(init_offset); } @@ -579,10 +571,8 @@ pub const Module = struct { try self.references.append(funcidx); - const init_offset = self.parsed_code.items.len; - try self.parsed_code.append(Rr{ .@"ref.func" = funcidx }); + const init_offset = self.instructions.items.len; try self.instructions.append(VirtualMachine.@"ref.func"); - try self.parsed_code.append(Rr.@"return"); try self.instructions.append(VirtualMachine.@"return"); try self.element_init_offsets.append(init_offset); } @@ -628,7 +618,7 @@ pub const Module = struct { var j: usize = 0; while (j < expr_count) : (j += 1) { - const init_offset = self.parsed_code.items.len; + const init_offset = self.instructions.items.len; _ = try self.readConstantExpression(.FuncRef); try self.element_init_offsets.append(init_offset); } @@ -675,7 +665,7 @@ pub const Module = struct { const count = try self.readULEB128(u32); self.codes.count = count; - try self.parsed_code.ensureTotalCapacity(count * 32); + try self.instructions.ensureTotalCapacity(count * 32); if (count == 0) return; diff --git a/src/module/parser.zig b/src/module/parser.zig index 020bb5e4..7adac769 100644 --- a/src/module/parser.zig +++ b/src/module/parser.zig @@ -14,6 +14,7 @@ const Range = @import("../rr.zig").Range; const Rr = @import("../rr.zig").Rr; const MiscRr = @import("../rr.zig").MiscRr; const VirtualMachine = @import("../instance/vm.zig").VirtualMachine; +const Instruction = VirtualMachine.Instruction; pub const Parsed = struct { start: usize, @@ -23,19 +24,19 @@ pub const Parsed = struct { pub const Parser = struct { function: []const u8 = undefined, code: []const u8 = undefined, - code_ptr: usize, + code_ptr: [*]Instruction, module: *Module, validator: Validator = undefined, params: ?[]const ValType, locals: ?[]LocalType, - continuation_stack: [1024]usize = [_]usize{0} ** 1024, + continuation_stack: [1024][*]Instruction = undefined, continuation_stack_ptr: usize, is_constant: bool = false, scope: usize, pub fn init(module: *Module) Parser { return Parser{ - .code_ptr = module.parsed_code.items.len, + .code_ptr = @ptrCast([*]Instruction, &module.instructions.items[module.instructions.items.len - 1]), .module = module, .params = null, .locals = null, @@ -54,12 +55,11 @@ pub const Parser = struct { self.function = code; self.code = code; - const code_start = self.module.parsed_code.items.len; + const code_start = self.module.instructions.items.len; try self.pushFunction(locals, funcidx); while (try self.next()) |instr| { - try self.module.parsed_code.append(instr); try self.module.instructions.append(VirtualMachine.lookup[@enumToInt(instr)]); } @@ -67,7 +67,6 @@ pub const Parser = struct { _ = try self.module.readSlice(bytes_read); // Patch last end so that it is return - self.module.parsed_code.items[self.module.parsed_code.items.len - 1] = .@"return"; self.module.instructions.items[self.module.instructions.items.len - 1] = VirtualMachine.@"return"; return Parsed{ .start = code_start, .max_depth = self.validator.max_depth }; @@ -79,7 +78,7 @@ pub const Parser = struct { self.function = code; self.code = code; - const code_start = self.module.parsed_code.items.len; + const code_start = self.module.instructions.items.len; const in: [0]ValType = [_]ValType{} ** 0; const out: [1]ValType = [_]ValType{valtype} ** 1; @@ -103,7 +102,6 @@ pub const Parser = struct { => |_| {}, else => return error.ValidatorConstantExpressionRequired, } - try self.module.parsed_code.append(instr); try self.module.instructions.append(VirtualMachine.lookup[@enumToInt(instr)]); } @@ -111,7 +109,6 @@ pub const Parser = struct { _ = try self.module.readSlice(bytes_read); // Patch last end so that it is return - self.module.parsed_code.items[self.module.parsed_code.items.len - 1] = .@"return"; self.module.instructions.items[self.module.instructions.items.len - 1] = VirtualMachine.@"return"; return Parsed{ .start = code_start, .max_depth = self.validator.max_depth }; @@ -132,19 +129,19 @@ pub const Parser = struct { ); } - fn pushContinuationStack(self: *Parser, offset: usize) !void { + fn pushContinuationStack(self: *Parser, ip: [*]Instruction) !void { defer self.continuation_stack_ptr += 1; if (self.continuation_stack_ptr >= self.continuation_stack.len) return error.ContinuationStackOverflow; - self.continuation_stack[self.continuation_stack_ptr] = offset; + self.continuation_stack[self.continuation_stack_ptr] = ip; } - fn peekContinuationStack(self: *Parser) !usize { + fn peekContinuationStack(self: *Parser) ![*]Instruction { if (self.continuation_stack_ptr <= 0) return error.ContinuationStackUnderflow; // No test covering this return self.continuation_stack[self.continuation_stack_ptr - 1]; } - fn popContinuationStack(self: *Parser) !usize { + fn popContinuationStack(self: *Parser) ![*]Instruction { if (self.continuation_stack_ptr <= 0) return error.ContinuationStackUnderflow; self.continuation_stack_ptr -= 1; @@ -207,7 +204,7 @@ pub const Parser = struct { .block = .{ .param_arity = block_params, .return_arity = block_returns, - .branch_target = 0, + .branch_target = @ptrCast([*]Instruction, &self.module.instructions.items[0]), }, }; }, @@ -245,7 +242,7 @@ pub const Parser = struct { .loop = .{ .param_arity = block_params, .return_arity = block_params, - .branch_target = math.cast(u32, self.code_ptr) orelse return error.FailedCast, + .branch_target = self.code_ptr, }, }; }, @@ -288,7 +285,7 @@ pub const Parser = struct { .if_no_else = .{ .param_arity = block_params, .return_arity = block_returns, - .branch_target = 0, + .branch_target = @ptrCast([*]Instruction, &self.module.instructions.items[0]), }, }; }, @@ -301,8 +298,8 @@ pub const Parser = struct { .@"if" = .{ .param_arity = b.param_arity, .return_arity = b.return_arity, - .branch_target = 0, - .else_ip = math.cast(u32, self.code_ptr + 1) orelse return error.FailedCast, + .branch_target = @ptrCast([*]Instruction, &self.module.instructions.items[0]), + .else_ip = self.code_ptr + 1, }, }; self.module.instructions.items[parsed_code_offset] = VirtualMachine.@"if"; @@ -319,16 +316,16 @@ pub const Parser = struct { const parsed_code_offset = try self.popContinuationStack(); switch (self.module.parsed_code.items[parsed_code_offset]) { - .block => |*b| b.branch_target = math.cast(u32, self.code_ptr + 1) orelse return error.FailedCast, + .block => |*b| b.branch_target = self.code_ptr + 1, .loop => {}, .@"if" => |*b| { - b.branch_target = math.cast(u32, self.code_ptr + 1) orelse return error.FailedCast; + b.branch_target = self.code_ptr + 1; }, .if_no_else => |*b| { // We have an if with no else, check that this works arity-wise and replace with fast if if (b.param_arity -% b.return_arity != 0) return error.ValidatorElseBranchExpected; - b.branch_target = math.cast(u32, self.code_ptr + 1) orelse return error.FailedCast; + b.branch_target = self.code_ptr + 1; }, else => return error.UnexpectedInstruction, } diff --git a/src/rr.zig b/src/rr.zig index cf62a9fc..7e0149c1 100644 --- a/src/rr.zig +++ b/src/rr.zig @@ -1,5 +1,6 @@ const MiscOpcode = @import("opcode.zig").MiscOpcode; const RefType = @import("valtype.zig").RefType; +const Instruction = @import("instance/vm.zig").VirtualMachine.Instruction; pub const RrOpcode = enum(u8) { @"unreachable" = 0x0, @@ -51,10 +52,10 @@ pub const RrOpcode = enum(u8) { @"i64.store32" = 0x3e, @"memory.size" = 0x3f, @"memory.grow" = 0x40, - @"i32.const" = 0x41, - @"i64.const" = 0x42, - @"f32.const" = 0x43, - @"f64.const" = 0x44, + @"i32.pub const" = 0x41, + @"i64.pub const" = 0x42, + @"f32.pub const" = 0x43, + @"f64.pub const" = 0x44, @"i32.eqz" = 0x45, @"i32.eq" = 0x46, @"i32.ne" = 0x47, @@ -189,334 +190,601 @@ pub const RrOpcode = enum(u8) { misc = 0xfc, }; +pub fn nextIp(comptime Type: type) comptime_int { + const word_size_bits = 8 * @sizeOf(usize); + const bits = @bitSizeOf(Type); + const round_up_bits = if (bits % word_size_bits == 0) 0 else word_size_bits - (bits % word_size_bits); + return ((bits + round_up_bits) / word_size_bits) + 1; +} + +// fn nextIp(comptime Type: type) comptime_int { +// pub const word_size_bits = 8 * @sizeOf(usize); + +// comptime var bits = switch (@typeInfo(Type)) { +// .Struct => |info| blk: { +// comptime var i = 0; +// inline for (info.fields) |field| { +// switch (@typeInfo(field.type)) { +// .Int => |int| i += int.bits, +// else => @compileError("Expected int"), +// } +// } +// break :blk i; +// }, +// .Void => 0, +// .Int => |int| int.bits, +// else => @compileError("Unexpected type"), +// }; + +// pub const round_up_bits = if (bits % word_size_bits == 0) 0 else word_size_bits - (bits % word_size_bits); + +// return ((bits + round_up_bits) / word_size_bits) + 1; +// } + +pub fn meta(ip: [*]Instruction, comptime Type: type) type { + const start = ip + 1; + + return @bitCast(@TypeOf(Type), start); +} + +// fn getFieldType(comptime Type: type, field: []pub const u8) type { +// switch (@typeInfo(Type)) { +// .Struct => |info| blk: { +// comptime var i = 0; +// inline for (info.fields) |field| { +// switch (@typeInfo(field.type)) { +// .Int => |int| i += int.bits, +// else => @compileError("Expected int"), +// } +// } +// break :blk i; +// }, +// .Void => 0, +// .Int => |int| int.type, +// else => @compileError("Unexpected type"), +// }; +// } + +// fn immediate(comptime Struct: type, comptime field: []pub const u8) getFieldType(Struct, field) { +// pub const struct_info = switch (@typeInfo(Type)) { +// .Struct => |info| info, +// else => @compileError("Expected struct"), +// }; + +// comptime var i = 0; +// inline for (struct_info.fields) |field| { +// switch (@typeInfo(field.type)) { +// .Int => |int| i += int.bits, +// else => @compileError("Expected int"), +// } +// } +// } + +test { + const std = @import("std"); + const testing = std.testing; + + try testing.expectEqual(1, nextIp(@"unreachable")); + try testing.expectEqual(1, nextIp(nop)); + try testing.expectEqual(2, nextIp(block)); + try testing.expectEqual(2, nextIp(loop)); + try testing.expectEqual(3, nextIp(@"if")); + try testing.expectEqual(2, nextIp(if_no_else)); + try testing.expectEqual(3, nextIp(fast_call)); + try testing.expectEqual(2, nextIp(br)); +} + pub const Rr = union(RrOpcode) { - @"unreachable": void, - nop: void, - block: struct { - param_arity: u16, - return_arity: u16, - branch_target: u32, - }, - loop: struct { - param_arity: u16, - return_arity: u16, - branch_target: u32, - }, - @"if": struct { - param_arity: u16, - return_arity: u16, - branch_target: u32, - else_ip: u32, - }, - @"else": void, - if_no_else: struct { - param_arity: u16, - return_arity: u16, - branch_target: u32, - }, - end: void, - br: u32, - br_if: u32, - br_table: struct { - ls: Range, - ln: u32, - }, - @"return": void, - call: usize, // u32? - call_indirect: struct { - typeidx: u32, - tableidx: u32, - }, - fast_call: struct { - start: u32, - locals: u16, - params: u16, - results: u16, - required_stack_space: u16, - }, - drop: void, - select: void, - @"local.get": u32, - @"local.set": u32, - @"local.tee": u32, - @"global.get": u32, - @"global.set": u32, - @"table.get": u32, // tableidx - @"table.set": u32, // tableidx - @"i32.load": struct { - alignment: u32, - offset: u32, - }, - @"i64.load": struct { - alignment: u32, - offset: u32, - }, - @"f32.load": struct { - alignment: u32, - offset: u32, - }, - @"f64.load": struct { - alignment: u32, - offset: u32, - }, - @"i32.load8_s": struct { - alignment: u32, - offset: u32, - }, - @"i32.load8_u": struct { - alignment: u32, - offset: u32, - }, - @"i32.load16_s": struct { - alignment: u32, - offset: u32, - }, - @"i32.load16_u": struct { - alignment: u32, - offset: u32, - }, - @"i64.load8_s": struct { - alignment: u32, - offset: u32, - }, - @"i64.load8_u": struct { - alignment: u32, - offset: u32, - }, - @"i64.load16_s": struct { - alignment: u32, - offset: u32, - }, - @"i64.load16_u": struct { - alignment: u32, - offset: u32, - }, - @"i64.load32_s": struct { - alignment: u32, - offset: u32, - }, - @"i64.load32_u": struct { - alignment: u32, - offset: u32, - }, - @"i32.store": struct { - alignment: u32, - offset: u32, - }, - @"i64.store": struct { - alignment: u32, - offset: u32, - }, - @"f32.store": struct { - alignment: u32, - offset: u32, - }, - @"f64.store": struct { - alignment: u32, - offset: u32, - }, - @"i32.store8": struct { - alignment: u32, - offset: u32, - }, - @"i32.store16": struct { - alignment: u32, - offset: u32, - }, - @"i64.store8": struct { - alignment: u32, - offset: u32, - }, - @"i64.store16": struct { - alignment: u32, - offset: u32, - }, - @"i64.store32": struct { - alignment: u32, - offset: u32, - }, - @"memory.size": u32, - @"memory.grow": u32, - @"i32.const": i32, - @"i64.const": i64, - @"f32.const": f32, - @"f64.const": f64, - @"i32.eqz": void, - @"i32.eq": void, - @"i32.ne": void, - @"i32.lt_s": void, - @"i32.lt_u": void, - @"i32.gt_s": void, - @"i32.gt_u": void, - @"i32.le_s": void, - @"i32.le_u": void, - @"i32.ge_s": void, - @"i32.ge_u": void, - @"i64.eqz": void, - @"i64.eq": void, - @"i64.ne": void, - @"i64.lt_s": void, - @"i64.lt_u": void, - @"i64.gt_s": void, - @"i64.gt_u": void, - @"i64.le_s": void, - @"i64.le_u": void, - @"i64.ge_s": void, - @"i64.ge_u": void, - @"f32.eq": void, - @"f32.ne": void, - @"f32.lt": void, - @"f32.gt": void, - @"f32.le": void, - @"f32.ge": void, - @"f64.eq": void, - @"f64.ne": void, - @"f64.lt": void, - @"f64.gt": void, - @"f64.le": void, - @"f64.ge": void, - @"i32.clz": void, - @"i32.ctz": void, - @"i32.popcnt": void, - @"i32.add": void, - @"i32.sub": void, - @"i32.mul": void, - @"i32.div_s": void, - @"i32.div_u": void, - @"i32.rem_s": void, - @"i32.rem_u": void, - @"i32.and": void, - @"i32.or": void, - @"i32.xor": void, - @"i32.shl": void, - @"i32.shr_s": void, - @"i32.shr_u": void, - @"i32.rotl": void, - @"i32.rotr": void, - @"i64.clz": void, - @"i64.ctz": void, - @"i64.popcnt": void, - @"i64.add": void, - @"i64.sub": void, - @"i64.mul": void, - @"i64.div_s": void, - @"i64.div_u": void, - @"i64.rem_s": void, - @"i64.rem_u": void, - @"i64.and": void, - @"i64.or": void, - @"i64.xor": void, - @"i64.shl": void, - @"i64.shr_s": void, - @"i64.shr_u": void, - @"i64.rotl": void, - @"i64.rotr": void, - @"f32.abs": void, - @"f32.neg": void, - @"f32.ceil": void, - @"f32.floor": void, - @"f32.trunc": void, - @"f32.nearest": void, - @"f32.sqrt": void, - @"f32.add": void, - @"f32.sub": void, - @"f32.mul": void, - @"f32.div": void, - @"f32.min": void, - @"f32.max": void, - @"f32.copysign": void, - @"f64.abs": void, - @"f64.neg": void, - @"f64.ceil": void, - @"f64.floor": void, - @"f64.trunc": void, - @"f64.nearest": void, - @"f64.sqrt": void, - @"f64.add": void, - @"f64.sub": void, - @"f64.mul": void, - @"f64.div": void, - @"f64.min": void, - @"f64.max": void, - @"f64.copysign": void, - @"i32.wrap_i64": void, - @"i32.trunc_f32_s": void, - @"i32.trunc_f32_u": void, - @"i32.trunc_f64_s": void, - @"i32.trunc_f64_u": void, - @"i64.extend_i32_s": void, - @"i64.extend_i32_u": void, - @"i64.trunc_f32_s": void, - @"i64.trunc_f32_u": void, - @"i64.trunc_f64_s": void, - @"i64.trunc_f64_u": void, - @"f32.convert_i32_s": void, - @"f32.convert_i32_u": void, - @"f32.convert_i64_s": void, - @"f32.convert_i64_u": void, - @"f32.demote_f64": void, - @"f64.convert_i32_s": void, - @"f64.convert_i32_u": void, - @"f64.convert_i64_s": void, - @"f64.convert_i64_u": void, - @"f64.promote_f32": void, - @"i32.reinterpret_f32": void, - @"i64.reinterpret_f64": void, - @"f32.reinterpret_i32": void, - @"f64.reinterpret_i64": void, - @"i32.extend8_s": void, - @"i32.extend16_s": void, - @"i64.extend8_s": void, - @"i64.extend16_s": void, - @"i64.extend32_s": void, - @"ref.null": RefType, - @"ref.is_null": void, - @"ref.func": u32, - misc: MiscRr, + @"unreachable": @"unreachable", + nop: nop, + block: block, + loop: loop, + @"if": @"if", + @"else": @"else", + if_no_else: if_no_else, + end: end, + br: br, + br_if: br_if, + br_table: br_table, + @"return": @"return", + call: call, + call_indirect: call_indirect, + fast_call: fast_call, + drop: drop, + select: select, + @"local.get": @"local.get", + @"local.set": @"local.set", + @"local.tee": @"local.tee", + @"global.get": @"global.get", + @"global.set": @"global.set", + @"table.get": @"table.get", + @"table.set": @"table.set", + @"i32.load": @"i32.load", + @"i64.load": @"i64.load", + @"f32.load": @"f32.load", + @"f64.load": @"f64.load", + @"i32.load8_s": @"i32.load8_s", + @"i32.load8_u": @"i32.load8_u", + @"i32.load16_s": @"i32.load16_s", + @"i32.load16_u": @"i32.load16_u", + @"i64.load8_s": @"i64.load8_s", + @"i64.load8_u": @"i64.load8_u", + @"i64.load16_s": @"i64.load16_s", + @"i64.load16_u": @"i64.load16_u", + @"i64.load32_s": @"i64.load32_s", + @"i64.load32_u": @"i64.load32_u", + @"i32.store": @"i32.store", + @"i64.store": @"i64.store", + @"f32.store": @"f32.store", + @"f64.store": @"f64.store", + @"i32.store8": @"i32.store8", + @"i32.store16": @"i32.store16", + @"i64.store8": @"i64.store8", + @"i64.store16": @"i64.store16", + @"i64.store32": @"i64.store32", + @"memory.size": @"memory.size", + @"memory.grow": @"memory.grow", + @"i32.pub const": @"i32.pub const", + @"i64.pub const": @"i64.pub const", + @"f32.pub const": @"f32.pub const", + @"f64.pub const": @"f64.pub const", + @"i32.eqz": @"i32.eqz", + @"i32.eq": @"i32.eq", + @"i32.ne": @"i32.ne", + @"i32.lt_s": @"i32.lt_s", + @"i32.lt_u": @"i32.lt_u", + @"i32.gt_s": @"i32.gt_s", + @"i32.gt_u": @"i32.gt_u", + @"i32.le_s": @"i32.le_s", + @"i32.le_u": @"i32.le_u", + @"i32.ge_s": @"i32.ge_s", + @"i32.ge_u": @"i32.ge_u", + @"i64.eqz": @"i64.eqz", + @"i64.eq": @"i64.eq", + @"i64.ne": @"i64.ne", + @"i64.lt_s": @"i64.lt_s", + @"i64.lt_u": @"i64.lt_u", + @"i64.gt_s": @"i64.gt_s", + @"i64.gt_u": @"i64.gt_u", + @"i64.le_s": @"i64.le_s", + @"i64.le_u": @"i64.le_u", + @"i64.ge_s": @"i64.ge_s", + @"i64.ge_u": @"i64.ge_u", + @"f32.eq": @"f32.eq", + @"f32.ne": @"f32.ne", + @"f32.lt": @"f32.lt", + @"f32.gt": @"f32.gt", + @"f32.le": @"f32.le", + @"f32.ge": @"f32.ge", + @"f64.eq": @"f64.eq", + @"f64.ne": @"f64.ne", + @"f64.lt": @"f64.lt", + @"f64.gt": @"f64.gt", + @"f64.le": @"f64.le", + @"f64.ge": @"f64.ge", + @"i32.clz": @"i32.clz", + @"i32.ctz": @"i32.ctz", + @"i32.popcnt": @"i32.popcnt", + @"i32.add": @"i32.add", + @"i32.sub": @"i32.sub", + @"i32.mul": @"i32.mul", + @"i32.div_s": @"i32.div_s", + @"i32.div_u": @"i32.div_u", + @"i32.rem_s": @"i32.rem_s", + @"i32.rem_u": @"i32.rem_u", + @"i32.and": @"i32.and", + @"i32.or": @"i32.or", + @"i32.xor": @"i32.xor", + @"i32.shl": @"i32.shl", + @"i32.shr_s": @"i32.shr_s", + @"i32.shr_u": @"i32.shr_u", + @"i32.rotl": @"i32.rotl", + @"i32.rotr": @"i32.rotr", + @"i64.clz": @"i64.clz", + @"i64.ctz": @"i64.ctz", + @"i64.popcnt": @"i64.popcnt", + @"i64.add": @"i64.add", + @"i64.sub": @"i64.sub", + @"i64.mul": @"i64.mul", + @"i64.div_s": @"i64.div_s", + @"i64.div_u": @"i64.div_u", + @"i64.rem_s": @"i64.rem_s", + @"i64.rem_u": @"i64.rem_u", + @"i64.and": @"i64.and", + @"i64.or": @"i64.or", + @"i64.xor": @"i64.xor", + @"i64.shl": @"i64.shl", + @"i64.shr_s": @"i64.shr_s", + @"i64.shr_u": @"i64.shr_u", + @"i64.rotl": @"i64.rotl", + @"i64.rotr": @"i64.rotr", + @"f32.abs": @"f32.abs", + @"f32.neg": @"f32.neg", + @"f32.ceil": @"f32.ceil", + @"f32.floor": @"f32.floor", + @"f32.trunc": @"f32.trunc", + @"f32.nearest": @"f32.nearest", + @"f32.sqrt": @"f32.sqrt", + @"f32.add": @"f32.add", + @"f32.sub": @"f32.sub", + @"f32.mul": @"f32.mul", + @"f32.div": @"f32.div", + @"f32.min": @"f32.min", + @"f32.max": @"f32.max", + @"f32.copysign": @"f32.copysign", + @"f64.abs": @"f64.abs", + @"f64.neg": @"f64.neg", + @"f64.ceil": @"f64.ceil", + @"f64.floor": @"f64.floor", + @"f64.trunc": @"f64.trunc", + @"f64.nearest": @"f64.nearest", + @"f64.sqrt": @"f64.sqrt", + @"f64.add": @"f64.add", + @"f64.sub": @"f64.sub", + @"f64.mul": @"f64.mul", + @"f64.div": @"f64.div", + @"f64.min": @"f64.min", + @"f64.max": @"f64.max", + @"f64.copysign": @"f64.copysign", + @"i32.wrap_i64": @"i32.wrap_i64", + @"i32.trunc_f32_s": @"i32.trunc_f32_s", + @"i32.trunc_f32_u": @"i32.trunc_f32_u", + @"i32.trunc_f64_s": @"i32.trunc_f64_s", + @"i32.trunc_f64_u": @"i32.trunc_f64_u", + @"i64.extend_i32_s": @"i64.extend_i32_s", + @"i64.extend_i32_u": @"i64.extend_i32_u", + @"i64.trunc_f32_s": @"i64.trunc_f32_s", + @"i64.trunc_f32_u": @"i64.trunc_f32_u", + @"i64.trunc_f64_s": @"i64.trunc_f64_s", + @"i64.trunc_f64_u": @"i64.trunc_f64_u", + @"f32.convert_i32_s": @"f32.convert_i32_s", + @"f32.convert_i32_u": @"f32.convert_i32_u", + @"f32.convert_i64_s": @"f32.convert_i64_s", + @"f32.convert_i64_u": @"f32.convert_i64_u", + @"f32.demote_f64": @"f32.demote_f64", + @"f64.convert_i32_s": @"f64.convert_i32_s", + @"f64.convert_i32_u": @"f64.convert_i32_u", + @"f64.convert_i64_s": @"f64.convert_i64_s", + @"f64.convert_i64_u": @"f64.convert_i64_u", + @"f64.promote_f32": @"f64.promote_f32", + @"i32.reinterpret_f32": @"i32.reinterpret_f32", + @"i64.reinterpret_f64": @"i64.reinterpret_f64", + @"f32.reinterpret_i32": @"f32.reinterpret_i32", + @"f64.reinterpret_i64": @"f64.reinterpret_i64", + @"i32.extend8_s": @"i32.extend8_s", + @"i32.extend16_s": @"i32.extend16_s", + @"i64.extend8_s": @"i64.extend8_s", + @"i64.extend16_s": @"i64.extend16_s", + @"i64.extend32_s": @"i64.extend32_s", + @"ref.null": @"ref.null", + @"ref.is_null": @"ref.is_null", + @"ref.func": @"ref.func", + misc: void, }; -pub const MiscRr = union(MiscOpcode) { - @"i32.trunc_sat_f32_s": void, - @"i32.trunc_sat_f32_u": void, - @"i32.trunc_sat_f64_s": void, - @"i32.trunc_sat_f64_u": void, - @"i64.trunc_sat_f32_s": void, - @"i64.trunc_sat_f32_u": void, - @"i64.trunc_sat_f64_s": void, - @"i64.trunc_sat_f64_u": void, - @"memory.init": struct { - dataidx: u32, - memidx: u32, - }, - @"data.drop": u32, - @"memory.copy": struct { - src_memidx: u8, - dest_memidx: u8, - }, - @"memory.fill": u8, - @"table.init": struct { - tableidx: u32, - elemidx: u32, - }, - @"elem.drop": struct { - elemidx: u32, - }, - @"table.copy": struct { - dest_tableidx: u32, - src_tableidx: u32, - }, - @"table.grow": struct { - tableidx: u32, - }, - @"table.size": struct { - tableidx: u32, - }, - @"table.fill": struct { - tableidx: u32, - }, +pub const @"unreachable" = void; +pub const nop = void; +pub const block = packed struct { + branch_target: [*]Instruction, + param_arity: u16, + return_arity: u16, +}; +pub const loop = packed struct { + branch_target: [*]Instruction, + param_arity: u16, + return_arity: u16, +}; +pub const @"if" = packed struct { + branch_target: [*]Instruction, + else_ip: [*]Instruction, + param_arity: u16, + return_arity: u16, +}; +pub const @"else" = void; +pub const if_no_else = packed struct { + branch_target: [*]Instruction, + param_arity: u16, + return_arity: u16, +}; +pub const end = void; +pub const br = u32; +pub const br_if = u32; +pub const br_table = packed struct { + ls: Range, + ln: u32, +}; +pub const @"return" = void; +pub const call = *Instruction; +pub const call_indirect = packed struct { + typeidx: u32, + tableidx: u32, +}; +pub const fast_call = packed struct { + start: [*]Instruction, + locals: u16, + params: u16, + results: u16, + required_stack_space: u16, +}; +pub const drop = void; +pub const select = void; +pub const @"local.get" = u32; +pub const @"local.set" = u32; +pub const @"local.tee" = u32; +pub const @"global.get" = u32; +pub const @"global.set" = u32; +pub const @"table.get" = u32; // tableidx +pub const @"table.set" = u32; // tableidx +pub const @"i32.load" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.load" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"f32.load" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"f64.load" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i32.load8_s" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i32.load8_u" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i32.load16_s" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i32.load16_u" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.load8_s" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.load8_u" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.load16_s" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.load16_u" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.load32_s" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.load32_u" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i32.store" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.store" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"f32.store" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"f64.store" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i32.store8" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i32.store16" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.store8" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.store16" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"i64.store32" = packed struct { + alignment: u32, + offset: u32, +}; +pub const @"memory.size" = u32; +pub const @"memory.grow" = u32; +pub const @"i32.pub const" = i32; +pub const @"i64.pub const" = i64; +pub const @"f32.pub const" = f32; +pub const @"f64.pub const" = f64; +pub const @"i32.eqz" = void; +pub const @"i32.eq" = void; +pub const @"i32.ne" = void; +pub const @"i32.lt_s" = void; +pub const @"i32.lt_u" = void; +pub const @"i32.gt_s" = void; +pub const @"i32.gt_u" = void; +pub const @"i32.le_s" = void; +pub const @"i32.le_u" = void; +pub const @"i32.ge_s" = void; +pub const @"i32.ge_u" = void; +pub const @"i64.eqz" = void; +pub const @"i64.eq" = void; +pub const @"i64.ne" = void; +pub const @"i64.lt_s" = void; +pub const @"i64.lt_u" = void; +pub const @"i64.gt_s" = void; +pub const @"i64.gt_u" = void; +pub const @"i64.le_s" = void; +pub const @"i64.le_u" = void; +pub const @"i64.ge_s" = void; +pub const @"i64.ge_u" = void; +pub const @"f32.eq" = void; +pub const @"f32.ne" = void; +pub const @"f32.lt" = void; +pub const @"f32.gt" = void; +pub const @"f32.le" = void; +pub const @"f32.ge" = void; +pub const @"f64.eq" = void; +pub const @"f64.ne" = void; +pub const @"f64.lt" = void; +pub const @"f64.gt" = void; +pub const @"f64.le" = void; +pub const @"f64.ge" = void; +pub const @"i32.clz" = void; +pub const @"i32.ctz" = void; +pub const @"i32.popcnt" = void; +pub const @"i32.add" = void; +pub const @"i32.sub" = void; +pub const @"i32.mul" = void; +pub const @"i32.div_s" = void; +pub const @"i32.div_u" = void; +pub const @"i32.rem_s" = void; +pub const @"i32.rem_u" = void; +pub const @"i32.and" = void; +pub const @"i32.or" = void; +pub const @"i32.xor" = void; +pub const @"i32.shl" = void; +pub const @"i32.shr_s" = void; +pub const @"i32.shr_u" = void; +pub const @"i32.rotl" = void; +pub const @"i32.rotr" = void; +pub const @"i64.clz" = void; +pub const @"i64.ctz" = void; +pub const @"i64.popcnt" = void; +pub const @"i64.add" = void; +pub const @"i64.sub" = void; +pub const @"i64.mul" = void; +pub const @"i64.div_s" = void; +pub const @"i64.div_u" = void; +pub const @"i64.rem_s" = void; +pub const @"i64.rem_u" = void; +pub const @"i64.and" = void; +pub const @"i64.or" = void; +pub const @"i64.xor" = void; +pub const @"i64.shl" = void; +pub const @"i64.shr_s" = void; +pub const @"i64.shr_u" = void; +pub const @"i64.rotl" = void; +pub const @"i64.rotr" = void; +pub const @"f32.abs" = void; +pub const @"f32.neg" = void; +pub const @"f32.ceil" = void; +pub const @"f32.floor" = void; +pub const @"f32.trunc" = void; +pub const @"f32.nearest" = void; +pub const @"f32.sqrt" = void; +pub const @"f32.add" = void; +pub const @"f32.sub" = void; +pub const @"f32.mul" = void; +pub const @"f32.div" = void; +pub const @"f32.min" = void; +pub const @"f32.max" = void; +pub const @"f32.copysign" = void; +pub const @"f64.abs" = void; +pub const @"f64.neg" = void; +pub const @"f64.ceil" = void; +pub const @"f64.floor" = void; +pub const @"f64.trunc" = void; +pub const @"f64.nearest" = void; +pub const @"f64.sqrt" = void; +pub const @"f64.add" = void; +pub const @"f64.sub" = void; +pub const @"f64.mul" = void; +pub const @"f64.div" = void; +pub const @"f64.min" = void; +pub const @"f64.max" = void; +pub const @"f64.copysign" = void; +pub const @"i32.wrap_i64" = void; +pub const @"i32.trunc_f32_s" = void; +pub const @"i32.trunc_f32_u" = void; +pub const @"i32.trunc_f64_s" = void; +pub const @"i32.trunc_f64_u" = void; +pub const @"i64.extend_i32_s" = void; +pub const @"i64.extend_i32_u" = void; +pub const @"i64.trunc_f32_s" = void; +pub const @"i64.trunc_f32_u" = void; +pub const @"i64.trunc_f64_s" = void; +pub const @"i64.trunc_f64_u" = void; +pub const @"f32.convert_i32_s" = void; +pub const @"f32.convert_i32_u" = void; +pub const @"f32.convert_i64_s" = void; +pub const @"f32.convert_i64_u" = void; +pub const @"f32.demote_f64" = void; +pub const @"f64.convert_i32_s" = void; +pub const @"f64.convert_i32_u" = void; +pub const @"f64.convert_i64_s" = void; +pub const @"f64.convert_i64_u" = void; +pub const @"f64.promote_f32" = void; +pub const @"i32.reinterpret_f32" = void; +pub const @"i64.reinterpret_f64" = void; +pub const @"f32.reinterpret_i32" = void; +pub const @"f64.reinterpret_i64" = void; +pub const @"i32.extend8_s" = void; +pub const @"i32.extend16_s" = void; +pub const @"i64.extend8_s" = void; +pub const @"i64.extend16_s" = void; +pub const @"i64.extend32_s" = void; +pub const @"ref.null" = RefType; +pub const @"ref.is_null" = void; +pub const @"ref.func" = u32; +pub const misc = void; +pub const @"i32.trunc_sat_f32_s" = void; +pub const @"i32.trunc_sat_f32_u" = void; +pub const @"i32.trunc_sat_f64_s" = void; +pub const @"i32.trunc_sat_f64_u" = void; +pub const @"i64.trunc_sat_f32_s" = void; +pub const @"i64.trunc_sat_f32_u" = void; +pub const @"i64.trunc_sat_f64_s" = void; +pub const @"i64.trunc_sat_f64_u" = void; +pub const @"memory.init" = packed struct { + dataidx: u32, + memidx: u32, +}; +pub const @"data.drop" = u32; +pub const @"memory.copy" = packed struct { + src_memidx: u8, + dest_memidx: u8, +}; +pub const @"memory.fill" = u8; +pub const @"table.init" = packed struct { + tableidx: u32, + elemidx: u32, +}; +pub const @"elem.drop" = packed struct { + elemidx: u32, +}; +pub const @"table.copy" = packed struct { + dest_tableidx: u32, + src_tableidx: u32, +}; +pub const @"table.grow" = packed struct { + tableidx: u32, +}; +pub const @"table.size" = packed struct { + tableidx: u32, +}; +pub const @"table.fill" = packed struct { + tableidx: u32, }; -pub const Range = struct { +pub const Range = packed struct { offset: usize = 0, count: usize = 0, };