aboutsummaryrefslogtreecommitdiff
path: root/src/codegen/wasm.zig
diff options
context:
space:
mode:
Diffstat (limited to 'src/codegen/wasm.zig')
-rw-r--r--src/codegen/wasm.zig535
1 files changed, 293 insertions, 242 deletions
diff --git a/src/codegen/wasm.zig b/src/codegen/wasm.zig
index 3476ab2ce6..41397f55f4 100644
--- a/src/codegen/wasm.zig
+++ b/src/codegen/wasm.zig
@@ -9,14 +9,14 @@ const wasm = std.wasm;
const Module = @import("../Module.zig");
const Decl = Module.Decl;
-const ir = @import("../air.zig");
-const Inst = ir.Inst;
const Type = @import("../type.zig").Type;
const Value = @import("../value.zig").Value;
const Compilation = @import("../Compilation.zig");
const LazySrcLoc = Module.LazySrcLoc;
const link = @import("../link.zig");
const TypedValue = @import("../TypedValue.zig");
+const Air = @import("../Air.zig");
+const Liveness = @import("../Liveness.zig");
/// Wasm Value, created when generating an instruction
const WValue = union(enum) {
@@ -24,8 +24,8 @@ const WValue = union(enum) {
none: void,
/// Index of the local variable
local: u32,
- /// Instruction holding a constant `Value`
- constant: *Inst,
+ /// Holds a memoized typed value
+ constant: TypedValue,
/// Offset position in the list of bytecode instructions
code_offset: usize,
/// Used for variables that create multiple locals on the stack when allocated
@@ -483,8 +483,8 @@ pub const Result = union(enum) {
externally_managed: []const u8,
};
-/// Hashmap to store generated `WValue` for each `Inst`
-pub const ValueTable = std.AutoHashMapUnmanaged(*Inst, WValue);
+/// Hashmap to store generated `WValue` for each `Air.Inst.Ref`
+pub const ValueTable = std.AutoHashMapUnmanaged(Air.Inst.Index, WValue);
/// Code represents the `Code` section of wasm that
/// belongs to a function
@@ -492,11 +492,13 @@ pub const Context = struct {
/// Reference to the function declaration the code
/// section belongs to
decl: *Decl,
+ air: Air,
+ liveness: Liveness,
gpa: *mem.Allocator,
- /// Table to save `WValue`'s generated by an `Inst`
+ /// Table to save `WValue`'s generated by an `Air.Inst`
values: ValueTable,
- /// Mapping from *Inst.Block to block ids
- blocks: std.AutoArrayHashMapUnmanaged(*Inst.Block, u32) = .{},
+ /// Mapping from Air.Inst.Index to block ids
+ blocks: std.AutoArrayHashMapUnmanaged(Air.Inst.Index, u32) = .{},
/// `bytes` contains the wasm bytecode belonging to the 'code' section.
code: ArrayList(u8),
/// Contains the generated function type bytecode for the current function
@@ -536,7 +538,8 @@ pub const Context = struct {
}
/// Sets `err_msg` on `Context` and returns `error.CodegemFail` which is caught in link/Wasm.zig
- fn fail(self: *Context, src: LazySrcLoc, comptime fmt: []const u8, args: anytype) InnerError {
+ fn fail(self: *Context, comptime fmt: []const u8, args: anytype) InnerError {
+ const src: LazySrcLoc = .{ .node_offset = 0 };
const src_loc = src.toSrcLocWithDecl(self.decl);
self.err_msg = try Module.ErrorMsg.create(self.gpa, src_loc, fmt, args);
return error.CodegenFail;
@@ -544,59 +547,66 @@ pub const Context = struct {
/// Resolves the `WValue` for the given instruction `inst`
/// When the given instruction has a `Value`, it returns a constant instead
- fn resolveInst(self: Context, inst: *Inst) WValue {
- if (!inst.ty.hasCodeGenBits()) return .none;
+ fn resolveInst(self: Context, ref: Air.Inst.Ref) WValue {
+ const inst_index = Air.refToIndex(ref) orelse {
+ const tv = Air.Inst.Ref.typed_value_map[@enumToInt(ref)];
+ if (!tv.ty.hasCodeGenBits()) {
+ return WValue.none;
+ }
+ return WValue{ .constant = tv };
+ };
+
+ const inst_type = self.air.typeOfIndex(inst_index);
+ if (!inst_type.hasCodeGenBits()) return .none;
- if (inst.value()) |_| {
- return WValue{ .constant = inst };
+ if (self.air.instructions.items(.tag)[inst_index] == .constant) {
+ const ty_pl = self.air.instructions.items(.data)[inst_index].ty_pl;
+ return WValue{ .constant = .{ .ty = inst_type, .val = self.air.values[ty_pl.payload] } };
}
- return self.values.get(inst).?; // Instruction does not dominate all uses!
+ return self.values.get(inst_index).?; // Instruction does not dominate all uses!
}
/// Using a given `Type`, returns the corresponding wasm Valtype
- fn typeToValtype(self: *Context, src: LazySrcLoc, ty: Type) InnerError!wasm.Valtype {
+ fn typeToValtype(self: *Context, ty: Type) InnerError!wasm.Valtype {
return switch (ty.zigTypeTag()) {
.Float => blk: {
const bits = ty.floatBits(self.target);
if (bits == 16 or bits == 32) break :blk wasm.Valtype.f32;
if (bits == 64) break :blk wasm.Valtype.f64;
- return self.fail(src, "Float bit size not supported by wasm: '{d}'", .{bits});
+ return self.fail("Float bit size not supported by wasm: '{d}'", .{bits});
},
.Int => blk: {
const info = ty.intInfo(self.target);
if (info.bits <= 32) break :blk wasm.Valtype.i32;
if (info.bits > 32 and info.bits <= 64) break :blk wasm.Valtype.i64;
- return self.fail(src, "Integer bit size not supported by wasm: '{d}'", .{info.bits});
+ return self.fail("Integer bit size not supported by wasm: '{d}'", .{info.bits});
},
.Enum => switch (ty.tag()) {
.enum_simple => wasm.Valtype.i32,
- else => self.typeToValtype(
- src,
- ty.cast(Type.Payload.EnumFull).?.data.tag_ty,
- ),
+ else => self.typeToValtype(ty.cast(Type.Payload.EnumFull).?.data.tag_ty),
},
.Bool,
.Pointer,
.ErrorSet,
=> wasm.Valtype.i32,
.Struct, .ErrorUnion => unreachable, // Multi typed, must be handled individually.
- else => self.fail(src, "TODO - Wasm valtype for type '{s}'", .{ty.zigTypeTag()}),
+ else => self.fail("TODO - Wasm valtype for type '{s}'", .{ty.zigTypeTag()}),
};
}
/// Using a given `Type`, returns the byte representation of its wasm value type
- fn genValtype(self: *Context, src: LazySrcLoc, ty: Type) InnerError!u8 {
- return wasm.valtype(try self.typeToValtype(src, ty));
+ fn genValtype(self: *Context, ty: Type) InnerError!u8 {
+ return wasm.valtype(try self.typeToValtype(ty));
}
/// Using a given `Type`, returns the corresponding wasm value type
/// Differently from `genValtype` this also allows `void` to create a block
/// with no return type
- fn genBlockType(self: *Context, src: LazySrcLoc, ty: Type) InnerError!u8 {
+ fn genBlockType(self: *Context, ty: Type) InnerError!u8 {
return switch (ty.tag()) {
.void, .noreturn => wasm.block_empty,
- else => self.genValtype(src, ty),
+ else => self.genValtype(ty),
};
}
@@ -610,7 +620,7 @@ pub const Context = struct {
try writer.writeByte(wasm.opcode(.local_get));
try leb.writeULEB128(writer, idx);
},
- .constant => |inst| try self.emitConstant(inst.src, inst.value().?, inst.ty), // creates a new constant onto the stack
+ .constant => |tv| try self.emitConstant(tv.val, tv.ty), // Creates a new constant on the stack
}
}
@@ -626,10 +636,7 @@ pub const Context = struct {
const fields_len = @intCast(u32, struct_data.fields.count());
try self.locals.ensureCapacity(self.gpa, self.locals.items.len + fields_len);
for (struct_data.fields.values()) |*value| {
- const val_type = try self.genValtype(
- .{ .node_offset = struct_data.node_offset },
- value.ty,
- );
+ const val_type = try self.genValtype(value.ty);
self.locals.appendAssumeCapacity(val_type);
self.local_index += 1;
}
@@ -640,7 +647,7 @@ pub const Context = struct {
},
.ErrorUnion => {
const payload_type = ty.errorUnionChild();
- const val_type = try self.genValtype(.{ .node_offset = 0 }, payload_type);
+ const val_type = try self.genValtype(payload_type);
// we emit the error value as the first local, and the payload as the following.
// The first local is also used to find the index of the error and payload.
@@ -657,7 +664,7 @@ pub const Context = struct {
} };
},
else => {
- const valtype = try self.genValtype(.{ .node_offset = 0 }, ty);
+ const valtype = try self.genValtype(ty);
try self.locals.append(self.gpa, valtype);
self.local_index += 1;
return WValue{ .local = initial_index };
@@ -680,7 +687,7 @@ pub const Context = struct {
ty.fnParamTypes(params);
for (params) |param_type| {
// Can we maybe get the source index of each param?
- const val_type = try self.genValtype(.{ .node_offset = 0 }, param_type);
+ const val_type = try self.genValtype(param_type);
try writer.writeByte(val_type);
}
}
@@ -689,13 +696,10 @@ pub const Context = struct {
const return_type = ty.fnReturnType();
switch (return_type.zigTypeTag()) {
.Void, .NoReturn => try leb.writeULEB128(writer, @as(u32, 0)),
- .Struct => return self.fail(.{ .node_offset = 0 }, "TODO: Implement struct as return type for wasm", .{}),
- .Optional => return self.fail(.{ .node_offset = 0 }, "TODO: Implement optionals as return type for wasm", .{}),
+ .Struct => return self.fail("TODO: Implement struct as return type for wasm", .{}),
+ .Optional => return self.fail("TODO: Implement optionals as return type for wasm", .{}),
.ErrorUnion => {
- const val_type = try self.genValtype(
- .{ .node_offset = 0 },
- return_type.errorUnionChild(),
- );
+ const val_type = try self.genValtype(return_type.errorUnionChild());
// write down the amount of return values
try leb.writeULEB128(writer, @as(u32, 2));
@@ -705,58 +709,57 @@ pub const Context = struct {
else => {
try leb.writeULEB128(writer, @as(u32, 1));
// Can we maybe get the source index of the return type?
- const val_type = try self.genValtype(.{ .node_offset = 0 }, return_type);
+ const val_type = try self.genValtype(return_type);
try writer.writeByte(val_type);
},
}
}
- /// Generates the wasm bytecode for the function declaration belonging to `Context`
- pub fn gen(self: *Context, typed_value: TypedValue) InnerError!Result {
- switch (typed_value.ty.zigTypeTag()) {
- .Fn => {
- try self.genFunctype();
+ pub fn genFunc(self: *Context) InnerError!Result {
+ try self.genFunctype();
+ // TODO: check for and handle death of instructions
- // Write instructions
- // TODO: check for and handle death of instructions
- const mod_fn = blk: {
- if (typed_value.val.castTag(.function)) |func| break :blk func.data;
- if (typed_value.val.castTag(.extern_fn)) |_| return Result.appended; // don't need code body for extern functions
- unreachable;
- };
-
- // Reserve space to write the size after generating the code as well as space for locals count
- try self.code.resize(10);
-
- try self.genBody(mod_fn.body);
-
- // finally, write our local types at the 'offset' position
- {
- leb.writeUnsignedFixed(5, self.code.items[5..10], @intCast(u32, self.locals.items.len));
-
- // offset into 'code' section where we will put our locals types
- var local_offset: usize = 10;
-
- // emit the actual locals amount
- for (self.locals.items) |local| {
- var buf: [6]u8 = undefined;
- leb.writeUnsignedFixed(5, buf[0..5], @as(u32, 1));
- buf[5] = local;
- try self.code.insertSlice(local_offset, &buf);
- local_offset += 6;
- }
- }
+ // Reserve space to write the size after generating the code as well as space for locals count
+ try self.code.resize(10);
+
+ try self.genBody(self.air.getMainBody());
- const writer = self.code.writer();
- try writer.writeByte(wasm.opcode(.end));
+ // finally, write our local types at the 'offset' position
+ {
+ leb.writeUnsignedFixed(5, self.code.items[5..10], @intCast(u32, self.locals.items.len));
- // Fill in the size of the generated code to the reserved space at the
- // beginning of the buffer.
- const size = self.code.items.len - 5 + self.decl.fn_link.wasm.idx_refs.items.len * 5;
- leb.writeUnsignedFixed(5, self.code.items[0..5], @intCast(u32, size));
+ // offset into 'code' section where we will put our locals types
+ var local_offset: usize = 10;
- // codegen data has been appended to `code`
- return Result.appended;
+ // emit the actual locals amount
+ for (self.locals.items) |local| {
+ var buf: [6]u8 = undefined;
+ leb.writeUnsignedFixed(5, buf[0..5], @as(u32, 1));
+ buf[5] = local;
+ try self.code.insertSlice(local_offset, &buf);
+ local_offset += 6;
+ }
+ }
+
+ const writer = self.code.writer();
+ try writer.writeByte(wasm.opcode(.end));
+
+ // Fill in the size of the generated code to the reserved space at the
+ // beginning of the buffer.
+ const size = self.code.items.len - 5 + self.decl.fn_link.wasm.idx_refs.items.len * 5;
+ leb.writeUnsignedFixed(5, self.code.items[0..5], @intCast(u32, size));
+
+ // codegen data has been appended to `code`
+ return Result.appended;
+ }
+
+ /// Generates the wasm bytecode for the declaration belonging to `Context`
+ pub fn gen(self: *Context, typed_value: TypedValue) InnerError!Result {
+ switch (typed_value.ty.zigTypeTag()) {
+ .Fn => {
+ try self.genFunctype();
+ if (typed_value.val.castTag(.extern_fn)) |_| return Result.appended; // don't need code body for extern functions
+ return self.fail("TODO implement wasm codegen for function pointers", .{});
},
.Array => {
if (typed_value.val.castTag(.bytes)) |payload| {
@@ -775,7 +778,7 @@ pub const Context = struct {
}
}
return Result{ .externally_managed = payload.data };
- } else return self.fail(.{ .node_offset = 0 }, "TODO implement gen for more kinds of arrays", .{});
+ } else return self.fail("TODO implement gen for more kinds of arrays", .{});
},
.Int => {
const info = typed_value.ty.intInfo(self.target);
@@ -784,85 +787,91 @@ pub const Context = struct {
try self.code.append(@intCast(u8, int_byte));
return Result.appended;
}
- return self.fail(.{ .node_offset = 0 }, "TODO: Implement codegen for int type: '{}'", .{typed_value.ty});
+ return self.fail("TODO: Implement codegen for int type: '{}'", .{typed_value.ty});
},
- else => |tag| return self.fail(.{ .node_offset = 0 }, "TODO: Implement zig type codegen for type: '{s}'", .{tag}),
+ else => |tag| return self.fail("TODO: Implement zig type codegen for type: '{s}'", .{tag}),
}
}
- fn genInst(self: *Context, inst: *Inst) InnerError!WValue {
- return switch (inst.tag) {
- .add => self.genBinOp(inst.castTag(.add).?, .add),
- .alloc => self.genAlloc(inst.castTag(.alloc).?),
- .arg => self.genArg(inst.castTag(.arg).?),
- .bit_and => self.genBinOp(inst.castTag(.bit_and).?, .@"and"),
- .bitcast => self.genBitcast(inst.castTag(.bitcast).?),
- .bit_or => self.genBinOp(inst.castTag(.bit_or).?, .@"or"),
- .block => self.genBlock(inst.castTag(.block).?),
- .bool_and => self.genBinOp(inst.castTag(.bool_and).?, .@"and"),
- .bool_or => self.genBinOp(inst.castTag(.bool_or).?, .@"or"),
- .breakpoint => self.genBreakpoint(inst.castTag(.breakpoint).?),
- .br => self.genBr(inst.castTag(.br).?),
- .call => self.genCall(inst.castTag(.call).?),
- .cmp_eq => self.genCmp(inst.castTag(.cmp_eq).?, .eq),
- .cmp_gte => self.genCmp(inst.castTag(.cmp_gte).?, .gte),
- .cmp_gt => self.genCmp(inst.castTag(.cmp_gt).?, .gt),
- .cmp_lte => self.genCmp(inst.castTag(.cmp_lte).?, .lte),
- .cmp_lt => self.genCmp(inst.castTag(.cmp_lt).?, .lt),
- .cmp_neq => self.genCmp(inst.castTag(.cmp_neq).?, .neq),
- .condbr => self.genCondBr(inst.castTag(.condbr).?),
+ fn genInst(self: *Context, inst: Air.Inst.Index) !WValue {
+ const air_tags = self.air.instructions.items(.tag);
+ return switch (air_tags[inst]) {
+ .add => self.airBinOp(inst, .add),
+ .sub => self.airBinOp(inst, .sub),
+ .mul => self.airBinOp(inst, .mul),
+ .div => self.airBinOp(inst, .div),
+ .bit_and => self.airBinOp(inst, .@"and"),
+ .bit_or => self.airBinOp(inst, .@"or"),
+ .bool_and => self.airBinOp(inst, .@"and"),
+ .bool_or => self.airBinOp(inst, .@"or"),
+ .xor => self.airBinOp(inst, .xor),
+
+ .cmp_eq => self.airCmp(inst, .eq),
+ .cmp_gte => self.airCmp(inst, .gte),
+ .cmp_gt => self.airCmp(inst, .gt),
+ .cmp_lte => self.airCmp(inst, .lte),
+ .cmp_lt => self.airCmp(inst, .lt),
+ .cmp_neq => self.airCmp(inst, .neq),
+
+ .alloc => self.airAlloc(inst),
+ .arg => self.airArg(inst),
+ .bitcast => self.airBitcast(inst),
+ .block => self.airBlock(inst),
+ .breakpoint => self.airBreakpoint(inst),
+ .br => self.airBr(inst),
+ .call => self.airCall(inst),
+ .cond_br => self.airCondBr(inst),
.constant => unreachable,
.dbg_stmt => WValue.none,
- .div => self.genBinOp(inst.castTag(.div).?, .div),
- .is_err => self.genIsErr(inst.castTag(.is_err).?, .i32_ne),
- .is_non_err => self.genIsErr(inst.castTag(.is_non_err).?, .i32_eq),
- .load => self.genLoad(inst.castTag(.load).?),
- .loop => self.genLoop(inst.castTag(.loop).?),
- .mul => self.genBinOp(inst.castTag(.mul).?, .mul),
- .not => self.genNot(inst.castTag(.not).?),
- .ret => self.genRet(inst.castTag(.ret).?),
- .retvoid => WValue.none,
- .store => self.genStore(inst.castTag(.store).?),
- .struct_field_ptr => self.genStructFieldPtr(inst.castTag(.struct_field_ptr).?),
- .sub => self.genBinOp(inst.castTag(.sub).?, .sub),
- .switchbr => self.genSwitchBr(inst.castTag(.switchbr).?),
- .unreach => self.genUnreachable(inst.castTag(.unreach).?),
- .unwrap_errunion_payload => self.genUnwrapErrUnionPayload(inst.castTag(.unwrap_errunion_payload).?),
- .wrap_errunion_payload => self.genWrapErrUnionPayload(inst.castTag(.wrap_errunion_payload).?),
- .xor => self.genBinOp(inst.castTag(.xor).?, .xor),
- else => self.fail(.{ .node_offset = 0 }, "TODO: Implement wasm inst: {s}", .{inst.tag}),
+ .is_err => self.airIsErr(inst, .i32_ne),
+ .is_non_err => self.airIsErr(inst, .i32_eq),
+ .load => self.airLoad(inst),
+ .loop => self.airLoop(inst),
+ .not => self.airNot(inst),
+ .ret => self.airRet(inst),
+ .store => self.airStore(inst),
+ .struct_field_ptr => self.airStructFieldPtr(inst),
+ .switch_br => self.airSwitchBr(inst),
+ .unreach => self.airUnreachable(inst),
+ .unwrap_errunion_payload => self.airUnwrapErrUnionPayload(inst),
+ .wrap_errunion_payload => self.airWrapErrUnionPayload(inst),
+ else => |tag| self.fail("TODO: Implement wasm inst: {s}", .{@tagName(tag)}),
};
}
- fn genBody(self: *Context, body: ir.Body) InnerError!void {
- for (body.instructions) |inst| {
+ fn genBody(self: *Context, body: []const Air.Inst.Index) InnerError!void {
+ for (body) |inst| {
const result = try self.genInst(inst);
try self.values.putNoClobber(self.gpa, inst, result);
}
}
- fn genRet(self: *Context, inst: *Inst.UnOp) InnerError!WValue {
- // TODO: Implement tail calls
- const operand = self.resolveInst(inst.operand);
+ fn airRet(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const un_op = self.air.instructions.items(.data)[inst].un_op;
+ const operand = self.resolveInst(un_op);
try self.emitWValue(operand);
try self.code.append(wasm.opcode(.@"return"));
return .none;
}
- fn genCall(self: *Context, inst: *Inst.Call) InnerError!WValue {
- const func_val = inst.func.value().?;
+ fn airCall(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const pl_op = self.air.instructions.items(.data)[inst].pl_op;
+ const extra = self.air.extraData(Air.Call, pl_op.payload);
+ const args = self.air.extra[extra.end..][0..extra.data.args_len];
const target: *Decl = blk: {
+ const func_val = self.air.value(pl_op.operand).?;
+
if (func_val.castTag(.function)) |func| {
break :blk func.data.owner_decl;
} else if (func_val.castTag(.extern_fn)) |ext_fn| {
break :blk ext_fn.data;
}
- return self.fail(inst.base.src, "Expected a function, but instead found type '{s}'", .{func_val.tag()});
+ return self.fail("Expected a function, but instead found type '{s}'", .{func_val.tag()});
};
- for (inst.args) |arg| {
- const arg_val = self.resolveInst(arg);
+ for (args) |arg| {
+ const arg_val = self.resolveInst(@intToEnum(Air.Inst.Ref, arg));
try self.emitWValue(arg_val);
}
@@ -878,16 +887,17 @@ pub const Context = struct {
return .none;
}
- fn genAlloc(self: *Context, inst: *Inst.NoOp) InnerError!WValue {
- const elem_type = inst.base.ty.elemType();
+ fn airAlloc(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const elem_type = self.air.typeOfIndex(inst).elemType();
return self.allocLocal(elem_type);
}
- fn genStore(self: *Context, inst: *Inst.BinOp) InnerError!WValue {
+ fn airStore(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const bin_op = self.air.instructions.items(.data)[inst].bin_op;
const writer = self.code.writer();
- const lhs = self.resolveInst(inst.lhs);
- const rhs = self.resolveInst(inst.rhs);
+ const lhs = self.resolveInst(bin_op.lhs);
+ const rhs = self.resolveInst(bin_op.rhs);
switch (lhs) {
.multi_value => |multi_value| switch (rhs) {
@@ -895,7 +905,7 @@ pub const Context = struct {
// we simply assign the local_index to the rhs one.
// This allows us to update struct fields without having to individually
// set each local as each field's index will be calculated off the struct's base index
- .multi_value => self.values.put(self.gpa, inst.lhs, rhs) catch unreachable, // Instruction does not dominate all uses!
+ .multi_value => self.values.put(self.gpa, Air.refToIndex(bin_op.lhs).?, rhs) catch unreachable, // Instruction does not dominate all uses!
.constant, .none => {
// emit all values onto the stack if constant
try self.emitWValue(rhs);
@@ -921,20 +931,22 @@ pub const Context = struct {
return .none;
}
- fn genLoad(self: *Context, inst: *Inst.UnOp) InnerError!WValue {
- return self.resolveInst(inst.operand);
+ fn airLoad(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ return self.resolveInst(ty_op.operand);
}
- fn genArg(self: *Context, inst: *Inst.Arg) InnerError!WValue {
+ fn airArg(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
_ = inst;
// arguments share the index with locals
defer self.local_index += 1;
return WValue{ .local = self.local_index };
}
- fn genBinOp(self: *Context, inst: *Inst.BinOp, op: Op) InnerError!WValue {
- const lhs = self.resolveInst(inst.lhs);
- const rhs = self.resolveInst(inst.rhs);
+ fn airBinOp(self: *Context, inst: Air.Inst.Index, op: Op) InnerError!WValue {
+ const bin_op = self.air.instructions.items(.data)[inst].bin_op;
+ const lhs = self.resolveInst(bin_op.lhs);
+ const rhs = self.resolveInst(bin_op.rhs);
// it's possible for both lhs and/or rhs to return an offset as well,
// in which case we return the first offset occurance we find.
@@ -947,23 +959,24 @@ pub const Context = struct {
try self.emitWValue(lhs);
try self.emitWValue(rhs);
+ const bin_ty = self.air.typeOf(bin_op.lhs);
const opcode: wasm.Opcode = buildOpcode(.{
.op = op,
- .valtype1 = try self.typeToValtype(inst.base.src, inst.base.ty),
- .signedness = if (inst.base.ty.isSignedInt()) .signed else .unsigned,
+ .valtype1 = try self.typeToValtype(bin_ty),
+ .signedness = if (bin_ty.isSignedInt()) .signed else .unsigned,
});
try self.code.append(wasm.opcode(opcode));
return WValue{ .code_offset = offset };
}
- fn emitConstant(self: *Context, src: LazySrcLoc, value: Value, ty: Type) InnerError!void {
+ fn emitConstant(self: *Context, value: Value, ty: Type) InnerError!void {
const writer = self.code.writer();
switch (ty.zigTypeTag()) {
.Int => {
// write opcode
const opcode: wasm.Opcode = buildOpcode(.{
.op = .@"const",
- .valtype1 = try self.typeToValtype(src, ty),
+ .valtype1 = try self.typeToValtype(ty),
});
try writer.writeByte(wasm.opcode(opcode));
// write constant
@@ -982,14 +995,14 @@ pub const Context = struct {
// write opcode
const opcode: wasm.Opcode = buildOpcode(.{
.op = .@"const",
- .valtype1 = try self.typeToValtype(src, ty),
+ .valtype1 = try self.typeToValtype(ty),
});
try writer.writeByte(wasm.opcode(opcode));
// write constant
switch (ty.floatBits(self.target)) {
0...32 => try writer.writeIntLittle(u32, @bitCast(u32, value.toFloat(f32))),
64 => try writer.writeIntLittle(u64, @bitCast(u64, value.toFloat(f64))),
- else => |bits| return self.fail(src, "Wasm TODO: emitConstant for float with {d} bits", .{bits}),
+ else => |bits| return self.fail("Wasm TODO: emitConstant for float with {d} bits", .{bits}),
}
},
.Pointer => {
@@ -1006,7 +1019,7 @@ pub const Context = struct {
try writer.writeByte(wasm.opcode(.i32_load));
try leb.writeULEB128(writer, @as(u32, 0));
try leb.writeULEB128(writer, @as(u32, 0));
- } else return self.fail(src, "Wasm TODO: emitConstant for other const pointer tag {s}", .{value.tag()});
+ } else return self.fail("Wasm TODO: emitConstant for other const pointer tag {s}", .{value.tag()});
},
.Void => {},
.Enum => {
@@ -1020,7 +1033,7 @@ pub const Context = struct {
const enum_full = ty.cast(Type.Payload.EnumFull).?.data;
if (enum_full.values.count() != 0) {
const tag_val = enum_full.values.keys()[field_index.data];
- try self.emitConstant(src, tag_val, enum_full.tag_ty);
+ try self.emitConstant(tag_val, enum_full.tag_ty);
} else {
try writer.writeByte(wasm.opcode(.i32_const));
try leb.writeULEB128(writer, field_index.data);
@@ -1031,7 +1044,7 @@ pub const Context = struct {
} else {
var int_tag_buffer: Type.Payload.Bits = undefined;
const int_tag_ty = ty.intTagType(&int_tag_buffer);
- try self.emitConstant(src, value, int_tag_ty);
+ try self.emitConstant(value, int_tag_ty);
}
},
.ErrorSet => {
@@ -1045,12 +1058,12 @@ pub const Context = struct {
const payload_type = ty.errorUnionChild();
if (value.getError()) |_| {
// write the error value
- try self.emitConstant(src, data, error_type);
+ try self.emitConstant(data, error_type);
// no payload, so write a '0' const
const opcode: wasm.Opcode = buildOpcode(.{
.op = .@"const",
- .valtype1 = try self.typeToValtype(src, payload_type),
+ .valtype1 = try self.typeToValtype(payload_type),
});
try writer.writeByte(wasm.opcode(opcode));
try leb.writeULEB128(writer, @as(u32, 0));
@@ -1059,21 +1072,24 @@ pub const Context = struct {
try writer.writeByte(wasm.opcode(.i32_const));
try leb.writeULEB128(writer, @as(u32, 0));
// after the error code, we emit the payload
- try self.emitConstant(src, data, payload_type);
+ try self.emitConstant(data, payload_type);
}
},
- else => |zig_type| return self.fail(src, "Wasm TODO: emitConstant for zigTypeTag {s}", .{zig_type}),
+ else => |zig_type| return self.fail("Wasm TODO: emitConstant for zigTypeTag {s}", .{zig_type}),
}
}
- fn genBlock(self: *Context, block: *Inst.Block) InnerError!WValue {
- const block_ty = try self.genBlockType(block.base.src, block.base.ty);
+ fn airBlock(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
+ const block_ty = try self.genBlockType(self.air.getRefType(ty_pl.ty));
+ const extra = self.air.extraData(Air.Block, ty_pl.payload);
+ const body = self.air.extra[extra.end..][0..extra.data.body_len];
try self.startBlock(.block, block_ty, null);
// Here we set the current block idx, so breaks know the depth to jump
// to when breaking out.
- try self.blocks.putNoClobber(self.gpa, block, self.block_depth);
- try self.genBody(block.body);
+ try self.blocks.putNoClobber(self.gpa, inst, self.block_depth);
+ try self.genBody(body);
try self.endBlock();
return .none;
@@ -1097,11 +1113,15 @@ pub const Context = struct {
self.block_depth -= 1;
}
- fn genLoop(self: *Context, loop: *Inst.Loop) InnerError!WValue {
- const loop_ty = try self.genBlockType(loop.base.src, loop.base.ty);
+ fn airLoop(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
+ const loop = self.air.extraData(Air.Block, ty_pl.payload);
+ const body = self.air.extra[loop.end..][0..loop.data.body_len];
- try self.startBlock(.loop, loop_ty, null);
- try self.genBody(loop.body);
+ // result type of loop is always 'noreturn', meaning we can always
+ // emit the wasm type 'block_empty'.
+ try self.startBlock(.loop, wasm.block_empty, null);
+ try self.genBody(body);
// breaking to the index of a loop block will continue the loop instead
try self.code.append(wasm.opcode(.br));
@@ -1112,8 +1132,12 @@ pub const Context = struct {
return .none;
}
- fn genCondBr(self: *Context, condbr: *Inst.CondBr) InnerError!WValue {
- const condition = self.resolveInst(condbr.condition);
+ fn airCondBr(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const pl_op = self.air.instructions.items(.data)[inst].pl_op;
+ const condition = self.resolveInst(pl_op.operand);
+ const extra = self.air.extraData(Air.CondBr, pl_op.payload);
+ const then_body = self.air.extra[extra.end..][0..extra.data.then_body_len];
+ const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len];
const writer = self.code.writer();
// TODO: Handle death instructions for then and else body
@@ -1128,8 +1152,9 @@ pub const Context = struct {
break :blk offset;
},
};
- const block_ty = try self.genBlockType(condbr.base.src, condbr.base.ty);
- try self.startBlock(.block, block_ty, offset);
+
+ // result type is always noreturn, so use `block_empty` as type.
+ try self.startBlock(.block, wasm.block_empty, offset);
// we inserted the block in front of the condition
// so now check if condition matches. If not, break outside this block
@@ -1137,35 +1162,37 @@ pub const Context = struct {
try writer.writeByte(wasm.opcode(.br_if));
try leb.writeULEB128(writer, @as(u32, 0));
- try self.genBody(condbr.else_body);
+ try self.genBody(else_body);
try self.endBlock();
// Outer block that matches the condition
- try self.genBody(condbr.then_body);
+ try self.genBody(then_body);
return .none;
}
- fn genCmp(self: *Context, inst: *Inst.BinOp, op: std.math.CompareOperator) InnerError!WValue {
+ fn airCmp(self: *Context, inst: Air.Inst.Index, op: std.math.CompareOperator) InnerError!WValue {
// save offset, so potential conditions can insert blocks in front of
// the comparison that we can later jump back to
const offset = self.code.items.len;
- const lhs = self.resolveInst(inst.lhs);
- const rhs = self.resolveInst(inst.rhs);
+ const data: Air.Inst.Data = self.air.instructions.items(.data)[inst];
+ const lhs = self.resolveInst(data.bin_op.lhs);
+ const rhs = self.resolveInst(data.bin_op.rhs);
+ const lhs_ty = self.air.typeOf(data.bin_op.lhs);
try self.emitWValue(lhs);
try self.emitWValue(rhs);
const signedness: std.builtin.Signedness = blk: {
// by default we tell the operand type is unsigned (i.e. bools and enum values)
- if (inst.lhs.ty.zigTypeTag() != .Int) break :blk .unsigned;
+ if (lhs_ty.zigTypeTag() != .Int) break :blk .unsigned;
// incase of an actual integer, we emit the correct signedness
- break :blk inst.lhs.ty.intInfo(self.target).signedness;
+ break :blk lhs_ty.intInfo(self.target).signedness;
};
const opcode: wasm.Opcode = buildOpcode(.{
- .valtype1 = try self.typeToValtype(inst.base.src, inst.lhs.ty),
+ .valtype1 = try self.typeToValtype(lhs_ty),
.op = switch (op) {
.lt => .lt,
.lte => .le,
@@ -1180,16 +1207,17 @@ pub const Context = struct {
return WValue{ .code_offset = offset };
}
- fn genBr(self: *Context, br: *Inst.Br) InnerError!WValue {
+ fn airBr(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const br = self.air.instructions.items(.data)[inst].br;
+
// if operand has codegen bits we should break with a value
- if (br.operand.ty.hasCodeGenBits()) {
- const operand = self.resolveInst(br.operand);
- try self.emitWValue(operand);
+ if (self.air.typeOf(br.operand).hasCodeGenBits()) {
+ try self.emitWValue(self.resolveInst(br.operand));
}
// We map every block to its block index.
// We then determine how far we have to jump to it by substracting it from current block depth
- const idx: u32 = self.block_depth - self.blocks.get(br.block).?;
+ const idx: u32 = self.block_depth - self.blocks.get(br.block_inst).?;
const writer = self.code.writer();
try writer.writeByte(wasm.opcode(.br));
try leb.writeULEB128(writer, idx);
@@ -1197,10 +1225,11 @@ pub const Context = struct {
return .none;
}
- fn genNot(self: *Context, not: *Inst.UnOp) InnerError!WValue {
+ fn airNot(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const offset = self.code.items.len;
- const operand = self.resolveInst(not.operand);
+ const operand = self.resolveInst(ty_op.operand);
try self.emitWValue(operand);
// wasm does not have booleans nor the `not` instruction, therefore compare with 0
@@ -1214,73 +1243,93 @@ pub const Context = struct {
return WValue{ .code_offset = offset };
}
- fn genBreakpoint(self: *Context, breakpoint: *Inst.NoOp) InnerError!WValue {
+ fn airBreakpoint(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
_ = self;
- _ = breakpoint;
+ _ = inst;
// unsupported by wasm itself. Can be implemented once we support DWARF
// for wasm
return .none;
}
- fn genUnreachable(self: *Context, unreach: *Inst.NoOp) InnerError!WValue {
- _ = unreach;
+ fn airUnreachable(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ _ = inst;
try self.code.append(wasm.opcode(.@"unreachable"));
return .none;
}
- fn genBitcast(self: *Context, bitcast: *Inst.UnOp) InnerError!WValue {
- return self.resolveInst(bitcast.operand);
+ fn airBitcast(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ return self.resolveInst(ty_op.operand);
}
- fn genStructFieldPtr(self: *Context, inst: *Inst.StructFieldPtr) InnerError!WValue {
- const struct_ptr = self.resolveInst(inst.struct_ptr);
+ fn airStructFieldPtr(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
+ const extra = self.air.extraData(Air.StructField, ty_pl.payload);
+ const struct_ptr = self.resolveInst(extra.data.struct_ptr);
- return WValue{ .local = struct_ptr.multi_value.index + @intCast(u32, inst.field_index) };
+ return WValue{ .local = struct_ptr.multi_value.index + @intCast(u32, extra.data.field_index) };
}
- fn genSwitchBr(self: *Context, inst: *Inst.SwitchBr) InnerError!WValue {
- const target = self.resolveInst(inst.target);
- const target_ty = inst.target.ty;
- const valtype = try self.typeToValtype(.{ .node_offset = 0 }, target_ty);
- const blocktype = try self.genBlockType(inst.base.src, inst.base.ty);
-
- const signedness: std.builtin.Signedness = blk: {
- // by default we tell the operand type is unsigned (i.e. bools and enum values)
- if (target_ty.zigTypeTag() != .Int) break :blk .unsigned;
-
- // incase of an actual integer, we emit the correct signedness
- break :blk target_ty.intInfo(self.target).signedness;
- };
- for (inst.cases) |case| {
- // create a block for each case, when the condition does not match we break out of it
- try self.startBlock(.block, blocktype, null);
- try self.emitWValue(target);
- try self.emitConstant(.{ .node_offset = 0 }, case.item, target_ty);
- const opcode = buildOpcode(.{
- .valtype1 = valtype,
- .op = .ne, // not equal because we jump out the block if it does not match the condition
- .signedness = signedness,
- });
- try self.code.append(wasm.opcode(opcode));
- try self.code.append(wasm.opcode(.br_if));
- try leb.writeULEB128(self.code.writer(), @as(u32, 0));
-
- // emit our block code
- try self.genBody(case.body);
-
- // end the block we created earlier
- try self.endBlock();
- }
-
- // finally, emit the else case if it exists. Here we will not have to
- // check for a condition, so also no need to emit a block.
- try self.genBody(inst.else_body);
-
- return .none;
+ fn airSwitchBr(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const pl_op = self.air.instructions.items(.data)[inst].pl_op;
+ const extra = self.air.extraData(Air.SwitchBr, pl_op.payload);
+ const cases = self.air.extra[extra.end..][0..extra.data.cases_len];
+ const else_body = self.air.extra[extra.end + cases.len ..][0..extra.data.else_body_len];
+
+ const target = self.resolveInst(pl_op.operand);
+ const target_ty = self.air.typeOf(pl_op.operand);
+ const valtype = try self.typeToValtype(target_ty);
+ // result type is always 'noreturn'
+ const blocktype = wasm.block_empty;
+
+ _ = valtype;
+ _ = blocktype;
+ _ = target;
+ _ = else_body;
+ return self.fail("TODO implement wasm codegen for switch", .{});
+ //const signedness: std.builtin.Signedness = blk: {
+ // // by default we tell the operand type is unsigned (i.e. bools and enum values)
+ // if (target_ty.zigTypeTag() != .Int) break :blk .unsigned;
+
+ // // incase of an actual integer, we emit the correct signedness
+ // break :blk target_ty.intInfo(self.target).signedness;
+ //};
+ //for (cases) |case_idx| {
+ // const case = self.air.extraData(Air.SwitchBr.Case, case_idx);
+ // const case_body = self.air.extra[case.end..][0..case.data.body_len];
+
+ // // create a block for each case, when the condition does not match we break out of it
+ // try self.startBlock(.block, blocktype, null);
+ // try self.emitWValue(target);
+
+ // const val = self.air.value(case.data.item).?;
+ // try self.emitConstant(val, target_ty);
+ // const opcode = buildOpcode(.{
+ // .valtype1 = valtype,
+ // .op = .ne, // not equal because we jump out the block if it does not match the condition
+ // .signedness = signedness,
+ // });
+ // try self.code.append(wasm.opcode(opcode));
+ // try self.code.append(wasm.opcode(.br_if));
+ // try leb.writeULEB128(self.code.writer(), @as(u32, 0));
+
+ // // emit our block code
+ // try self.genBody(case_body);
+
+ // // end the block we created earlier
+ // try self.endBlock();
+ //}
+
+ //// finally, emit the else case if it exists. Here we will not have to
+ //// check for a condition, so also no need to emit a block.
+ //try self.genBody(else_body);
+
+ //return .none;
}
- fn genIsErr(self: *Context, inst: *Inst.UnOp, opcode: wasm.Opcode) InnerError!WValue {
- const operand = self.resolveInst(inst.operand);
+ fn airIsErr(self: *Context, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerError!WValue {
+ const un_op = self.air.instructions.items(.data)[inst].un_op;
+ const operand = self.resolveInst(un_op);
const offset = self.code.items.len;
const writer = self.code.writer();
@@ -1295,8 +1344,9 @@ pub const Context = struct {
return WValue{ .code_offset = offset };
}
- fn genUnwrapErrUnionPayload(self: *Context, inst: *Inst.UnOp) InnerError!WValue {
- const operand = self.resolveInst(inst.operand);
+ fn airUnwrapErrUnionPayload(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const operand = self.resolveInst(ty_op.operand);
// The index of multi_value contains the error code. To get the initial index of the payload we get
// the following index. Next, convert it to a `WValue.local`
//
@@ -1304,7 +1354,8 @@ pub const Context = struct {
return WValue{ .local = operand.multi_value.index + 1 };
}
- fn genWrapErrUnionPayload(self: *Context, inst: *Inst.UnOp) InnerError!WValue {
- return self.resolveInst(inst.operand);
+ fn airWrapErrUnionPayload(self: *Context, inst: Air.Inst.Index) InnerError!WValue {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ return self.resolveInst(ty_op.operand);
}
};