diff options
| -rw-r--r-- | build.zig | 5 | ||||
| -rw-r--r-- | lib/std/heap/general_purpose_allocator.zig | 9 | ||||
| -rw-r--r-- | lib/std/special/init-exe/src/main.zig | 2 | ||||
| -rw-r--r-- | lib/std/special/test_runner.zig | 2 | ||||
| -rw-r--r-- | lib/std/zig.zig | 101 | ||||
| -rw-r--r-- | src-self-hosted/Module.zig | 67 | ||||
| -rw-r--r-- | src-self-hosted/astgen.zig | 364 | ||||
| -rw-r--r-- | src-self-hosted/codegen.zig | 4 | ||||
| -rw-r--r-- | src-self-hosted/codegen/wasm.zig | 96 | ||||
| -rw-r--r-- | src-self-hosted/link.zig | 7 | ||||
| -rw-r--r-- | src-self-hosted/link/Wasm.zig | 518 | ||||
| -rw-r--r-- | src-self-hosted/test.zig | 2 | ||||
| -rw-r--r-- | src-self-hosted/type.zig | 194 | ||||
| -rw-r--r-- | src-self-hosted/value.zig | 35 | ||||
| -rw-r--r-- | src-self-hosted/zir.zig | 58 | ||||
| -rw-r--r-- | src-self-hosted/zir_sema.zig | 80 | ||||
| -rw-r--r-- | test/stage2/compare_output.zig | 59 | ||||
| -rw-r--r-- | test/stage2/zir.zig | 18 |
18 files changed, 1121 insertions, 500 deletions
@@ -77,7 +77,10 @@ pub fn build(b: *Builder) !void { } const tracy = b.option([]const u8, "tracy", "Enable Tracy integration. Supply path to Tracy source"); const link_libc = b.option(bool, "force-link-libc", "Force self-hosted compiler to link libc") orelse false; - if (link_libc) exe.linkLibC(); + if (link_libc) { + exe.linkLibC(); + test_stage2.linkLibC(); + } const log_scopes = b.option([]const []const u8, "log", "Which log scopes to enable") orelse &[0][]const u8{}; diff --git a/lib/std/heap/general_purpose_allocator.zig b/lib/std/heap/general_purpose_allocator.zig index 91a01bb837..cb53af113e 100644 --- a/lib/std/heap/general_purpose_allocator.zig +++ b/lib/std/heap/general_purpose_allocator.zig @@ -142,6 +142,11 @@ pub const Config = struct { /// Whether the allocator may be used simultaneously from multiple threads. thread_safe: bool = !std.builtin.single_threaded, + + /// This is a temporary debugging trick you can use to turn segfaults into more helpful + /// logged error messages with stack trace details. The downside is that every allocation + /// will be leaked! + never_unmap: bool = false, }; pub fn GeneralPurposeAllocator(comptime config: Config) type { @@ -416,7 +421,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type { bucket.prev.next = bucket.next; self.buckets[bucket_index] = bucket.prev; } - self.backing_allocator.free(bucket.page[0..page_size]); + if (!config.never_unmap) { + self.backing_allocator.free(bucket.page[0..page_size]); + } const bucket_size = bucketSize(size_class); const bucket_slice = @ptrCast([*]align(@alignOf(BucketHeader)) u8, bucket)[0..bucket_size]; self.backing_allocator.free(bucket_slice); diff --git a/lib/std/special/init-exe/src/main.zig b/lib/std/special/init-exe/src/main.zig index c6a70af56d..d29869ff88 100644 --- a/lib/std/special/init-exe/src/main.zig +++ b/lib/std/special/init-exe/src/main.zig @@ -1,5 +1,5 @@ const std = @import("std"); pub fn main() anyerror!void { - std.debug.warn("All your codebase are belong to us.\n", .{}); + std.log.info("All your codebase are belong to us.", .{}); } diff --git a/lib/std/special/test_runner.zig b/lib/std/special/test_runner.zig index f4c8c6de9d..4267151638 100644 --- a/lib/std/special/test_runner.zig +++ b/lib/std/special/test_runner.zig @@ -23,7 +23,7 @@ pub fn main() anyerror!void { var leaks: usize = 0; for (test_fn_list) |test_fn, i| { - std.testing.allocator_instance = std.heap.GeneralPurposeAllocator(.{}){}; + std.testing.allocator_instance = .{}; defer { if (std.testing.allocator_instance.deinit()) { leaks += 1; diff --git a/lib/std/zig.zig b/lib/std/zig.zig index b070fbdcd5..36dfe74086 100644 --- a/lib/std/zig.zig +++ b/lib/std/zig.zig @@ -80,6 +80,107 @@ pub fn binNameAlloc( } } +/// Only validates escape sequence characters. +/// Slice must be valid utf8 starting and ending with "'" and exactly one codepoint in between. +pub fn parseCharLiteral( + slice: []const u8, + bad_index: *usize, // populated if error.InvalidCharacter is returned) +) error{InvalidCharacter}!u32 { + std.debug.assert(slice.len >= 3 and slice[0] == '\'' and slice[slice.len - 1] == '\''); + + if (slice[1] == '\\') { + switch (slice[2]) { + 'n' => return '\n', + 'r' => return '\r', + '\\' => return '\\', + 't' => return '\t', + '\'' => return '\'', + '"' => return '"', + 'x' => { + if (slice.len != 6) { + bad_index.* = slice.len - 2; + return error.InvalidCharacter; + } + + var value: u32 = 0; + for (slice[3..5]) |c, i| { + switch (slice[3]) { + '0'...'9' => { + value *= 16; + value += c - '0'; + }, + 'a'...'f' => { + value *= 16; + value += c - 'a'; + }, + 'A'...'F' => { + value *= 16; + value += c - 'a'; + }, + else => { + bad_index.* = i; + return error.InvalidCharacter; + }, + } + } + return value; + }, + 'u' => { + if (slice.len < 6 or slice[3] != '{') { + bad_index.* = 2; + return error.InvalidCharacter; + } + var value: u32 = 0; + for (slice[4..]) |c, i| { + if (value > 0x10ffff) { + bad_index.* = i; + return error.InvalidCharacter; + } + switch (c) { + '0'...'9' => { + value *= 16; + value += c - '0'; + }, + 'a'...'f' => { + value *= 16; + value += c - 'a'; + }, + 'A'...'F' => { + value *= 16; + value += c - 'A'; + }, + '}' => break, + else => { + bad_index.* = i; + return error.InvalidCharacter; + }, + } + } + return value; + }, + else => { + bad_index.* = 2; + return error.InvalidCharacter; + } + } + } + return std.unicode.utf8Decode(slice[1 .. slice.len - 1]) catch unreachable; +} + +test "parseCharLiteral" { + var bad_index: usize = undefined; + std.testing.expectEqual(try parseCharLiteral("'a'", &bad_index), 'a'); + std.testing.expectEqual(try parseCharLiteral("'ä'", &bad_index), 'ä'); + std.testing.expectEqual(try parseCharLiteral("'\\x00'", &bad_index), 0); + std.testing.expectEqual(try parseCharLiteral("'ぁ'", &bad_index), 0x3041); + std.testing.expectEqual(try parseCharLiteral("'\\u{3041}'", &bad_index), 0x3041); + + std.testing.expectError(error.InvalidCharacter, parseCharLiteral("'\\x0'", &bad_index)); + std.testing.expectError(error.InvalidCharacter, parseCharLiteral("'\\y'", &bad_index)); + std.testing.expectError(error.InvalidCharacter, parseCharLiteral("'\\u'", &bad_index)); + std.testing.expectError(error.InvalidCharacter, parseCharLiteral("'\\u{FFFFFF}'", &bad_index)); +} + test "" { @import("std").meta.refAllDecls(@This()); } diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index 6e33101e76..3c31c7da44 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -2902,7 +2902,7 @@ pub fn floatSub(self: *Module, scope: *Scope, float_type: Type, src: usize, lhs: return Value.initPayload(val_payload); } -pub fn singlePtrType(self: *Module, scope: *Scope, src: usize, mutable: bool, elem_ty: Type) error{OutOfMemory}!Type { +pub fn singlePtrType(self: *Module, scope: *Scope, src: usize, mutable: bool, elem_ty: Type) Allocator.Error!Type { const type_payload = try scope.arena().create(Type.Payload.Pointer); type_payload.* = .{ .base = .{ .tag = if (mutable) .single_mut_pointer else .single_const_pointer }, @@ -2911,6 +2911,71 @@ pub fn singlePtrType(self: *Module, scope: *Scope, src: usize, mutable: bool, el return Type.initPayload(&type_payload.base); } +pub fn optionalType(self: *Module, scope: *Scope, child_type: Type) Allocator.Error!Type { + return Type.initPayload(switch (child_type.tag()) { + .single_const_pointer => blk: { + const payload = try scope.arena().create(Type.Payload.Pointer); + payload.* = .{ + .base = .{ .tag = .optional_single_const_pointer }, + .pointee_type = child_type.elemType(), + }; + break :blk &payload.base; + }, + .single_mut_pointer => blk: { + const payload = try scope.arena().create(Type.Payload.Pointer); + payload.* = .{ + .base = .{ .tag = .optional_single_mut_pointer }, + .pointee_type = child_type.elemType(), + }; + break :blk &payload.base; + }, + else => blk: { + const payload = try scope.arena().create(Type.Payload.Optional); + payload.* = .{ + .child_type = child_type, + }; + break :blk &payload.base; + }, + }); +} + +pub fn arrayType(self: *Module, scope: *Scope, len: u64, sentinel: ?Value, elem_type: Type) Allocator.Error!Type { + if (elem_type.eql(Type.initTag(.u8))) { + if (sentinel) |some| { + if (some.eql(Value.initTag(.zero))) { + const payload = try scope.arena().create(Type.Payload.Array_u8_Sentinel0); + payload.* = .{ + .len = len, + }; + return Type.initPayload(&payload.base); + } + } else { + const payload = try scope.arena().create(Type.Payload.Array_u8); + payload.* = .{ + .len = len, + }; + return Type.initPayload(&payload.base); + } + } + + if (sentinel) |some| { + const payload = try scope.arena().create(Type.Payload.ArraySentinel); + payload.* = .{ + .len = len, + .sentinel = some, + .elem_type = elem_type, + }; + return Type.initPayload(&payload.base); + } + + const payload = try scope.arena().create(Type.Payload.Array); + payload.* = .{ + .len = len, + .elem_type = elem_type, + }; + return Type.initPayload(&payload.base); +} + pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void { const zir_module = scope.namespace(); const source = zir_module.getSource(self) catch @panic("dumpInst failed to get source"); diff --git a/src-self-hosted/astgen.zig b/src-self-hosted/astgen.zig index 3a9c7e6c59..9d40c7899a 100644 --- a/src-self-hosted/astgen.zig +++ b/src-self-hosted/astgen.zig @@ -20,6 +20,8 @@ pub const ResultLoc = union(enum) { /// The expression must generate a pointer rather than a value. For example, the left hand side /// of an assignment uses an "LValue" result location. lvalue, + /// The expression must generate a pointer + ref, /// The expression will be type coerced into this type, but it will be evaluated as an rvalue. ty: *zir.Inst, /// The expression must store its result into this typed pointer. @@ -46,6 +48,132 @@ pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*z /// Turn Zig AST into untyped ZIR istructions. pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst { + if (rl == .lvalue) { + switch (node.tag) { + .Root => unreachable, + .Use => unreachable, + .TestDecl => unreachable, + .DocComment => unreachable, + .VarDecl => unreachable, + .SwitchCase => unreachable, + .SwitchElse => unreachable, + .Else => unreachable, + .Payload => unreachable, + .PointerPayload => unreachable, + .PointerIndexPayload => unreachable, + .ErrorTag => unreachable, + .FieldInitializer => unreachable, + .ContainerField => unreachable, + + .Assign, + .AssignBitAnd, + .AssignBitOr, + .AssignBitShiftLeft, + .AssignBitShiftRight, + .AssignBitXor, + .AssignDiv, + .AssignSub, + .AssignSubWrap, + .AssignMod, + .AssignAdd, + .AssignAddWrap, + .AssignMul, + .AssignMulWrap, + .Add, + .AddWrap, + .Sub, + .SubWrap, + .Mul, + .MulWrap, + .Div, + .Mod, + .BitAnd, + .BitOr, + .BitShiftLeft, + .BitShiftRight, + .BitXor, + .BangEqual, + .EqualEqual, + .GreaterThan, + .GreaterOrEqual, + .LessThan, + .LessOrEqual, + .ArrayCat, + .ArrayMult, + .BoolAnd, + .BoolOr, + .Asm, + .StringLiteral, + .IntegerLiteral, + .Call, + .Unreachable, + .Return, + .If, + .While, + .BoolNot, + .AddressOf, + .FloatLiteral, + .UndefinedLiteral, + .BoolLiteral, + .NullLiteral, + .OptionalType, + .Block, + .LabeledBlock, + .Break, + .PtrType, + .GroupedExpression, + .ArrayType, + .ArrayTypeSentinel, + .EnumLiteral, + .MultilineStringLiteral, + .CharLiteral, + .Defer, + .Catch, + .ErrorUnion, + .MergeErrorSets, + .Range, + .OrElse, + .Await, + .BitNot, + .Negation, + .NegationWrap, + .Resume, + .Try, + .SliceType, + .Slice, + .ArrayInitializer, + .ArrayInitializerDot, + .StructInitializer, + .StructInitializerDot, + .Switch, + .For, + .Suspend, + .Continue, + .AnyType, + .ErrorType, + .FnProto, + .AnyFrameType, + .ErrorSetDecl, + .ContainerDecl, + .Comptime, + .Nosuspend, + => return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}), + + // @field can be assigned to + .BuiltinCall => { + const call = node.castTag(.BuiltinCall).?; + const tree = scope.tree(); + const builtin_name = tree.tokenSlice(call.builtin_token); + + if (!mem.eql(u8, builtin_name, "@field")) { + return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}); + } + }, + + // can be assigned to + .UnwrapOptional, .Deref, .Period, .ArrayAccess, .Identifier => {}, + } + } switch (node.tag) { .Root => unreachable, // Top-level declaration. .Use => unreachable, // Top-level declaration. @@ -60,6 +188,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .PointerIndexPayload => unreachable, // Handled explicitly. .ErrorTag => unreachable, // Handled explicitly. .FieldInitializer => unreachable, // Handled explicitly. + .ContainerField => unreachable, // Handled explicitly. .Assign => return rlWrapVoid(mod, scope, rl, node, try assign(mod, scope, node.castTag(.Assign).?)), .AssignBitAnd => return rlWrapVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitAnd).?, .bitand)), @@ -100,6 +229,9 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .ArrayCat => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayCat).?, .array_cat), .ArrayMult => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayMult).?, .array_mul), + .BoolAnd => return boolBinOp(mod, scope, rl, node.castTag(.BoolAnd).?), + .BoolOr => return boolBinOp(mod, scope, rl, node.castTag(.BoolOr).?), + .Identifier => return try identifier(mod, scope, rl, node.castTag(.Identifier).?), .Asm => return rlWrap(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)), .StringLiteral => return rlWrap(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)), @@ -124,11 +256,15 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?), .Break => return rlWrap(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)), .PtrType => return rlWrap(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)), + .GroupedExpression => return expr(mod, scope, rl, node.castTag(.GroupedExpression).?.expr), + .ArrayType => return rlWrap(mod, scope, rl, try arrayType(mod, scope, node.castTag(.ArrayType).?)), + .ArrayTypeSentinel => return rlWrap(mod, scope, rl, try arrayTypeSentinel(mod, scope, node.castTag(.ArrayTypeSentinel).?)), + .EnumLiteral => return rlWrap(mod, scope, rl, try enumLiteral(mod, scope, node.castTag(.EnumLiteral).?)), + .MultilineStringLiteral => return rlWrap(mod, scope, rl, try multilineStrLiteral(mod, scope, node.castTag(.MultilineStringLiteral).?)), + .CharLiteral => return rlWrap(mod, scope, rl, try charLiteral(mod, scope, node.castTag(.CharLiteral).?)), .Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}), .Catch => return mod.failNode(scope, node, "TODO implement astgen.expr for .Catch", .{}), - .BoolAnd => return mod.failNode(scope, node, "TODO implement astgen.expr for .BoolAnd", .{}), - .BoolOr => return mod.failNode(scope, node, "TODO implement astgen.expr for .BoolOr", .{}), .ErrorUnion => return mod.failNode(scope, node, "TODO implement astgen.expr for .ErrorUnion", .{}), .MergeErrorSets => return mod.failNode(scope, node, "TODO implement astgen.expr for .MergeErrorSets", .{}), .Range => return mod.failNode(scope, node, "TODO implement astgen.expr for .Range", .{}), @@ -139,8 +275,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .NegationWrap => return mod.failNode(scope, node, "TODO implement astgen.expr for .NegationWrap", .{}), .Resume => return mod.failNode(scope, node, "TODO implement astgen.expr for .Resume", .{}), .Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}), - .ArrayType => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayType", .{}), - .ArrayTypeSentinel => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayTypeSentinel", .{}), .SliceType => return mod.failNode(scope, node, "TODO implement astgen.expr for .SliceType", .{}), .Slice => return mod.failNode(scope, node, "TODO implement astgen.expr for .Slice", .{}), .ArrayAccess => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayAccess", .{}), @@ -156,15 +290,10 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr .ErrorType => return mod.failNode(scope, node, "TODO implement astgen.expr for .ErrorType", .{}), .FnProto => return mod.failNode(scope, node, "TODO implement astgen.expr for .FnProto", .{}), .AnyFrameType => return mod.failNode(scope, node, "TODO implement astgen.expr for .AnyFrameType", .{}), - .EnumLiteral => return mod.failNode(scope, node, "TODO implement astgen.expr for .EnumLiteral", .{}), - .MultilineStringLiteral => return mod.failNode(scope, node, "TODO implement astgen.expr for .MultilineStringLiteral", .{}), - .CharLiteral => return mod.failNode(scope, node, "TODO implement astgen.expr for .CharLiteral", .{}), - .GroupedExpression => return mod.failNode(scope, node, "TODO implement astgen.expr for .GroupedExpression", .{}), .ErrorSetDecl => return mod.failNode(scope, node, "TODO implement astgen.expr for .ErrorSetDecl", .{}), .ContainerDecl => return mod.failNode(scope, node, "TODO implement astgen.expr for .ContainerDecl", .{}), .Comptime => return mod.failNode(scope, node, "TODO implement astgen.expr for .Comptime", .{}), .Nosuspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Nosuspend", .{}), - .ContainerField => return mod.failNode(scope, node, "TODO implement astgen.expr for .ContainerField", .{}), } } @@ -187,7 +316,7 @@ fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpr // proper type inference requires peer type resolution on the block's // break operand expressions. const branch_rl: ResultLoc = switch (label.result_loc) { - .discard, .none, .ty, .ptr, .lvalue => label.result_loc, + .discard, .none, .ty, .ptr, .lvalue, .ref => label.result_loc, .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = label.block_inst }, }; const operand = try expr(mod, parent_scope, branch_rl, rhs); @@ -426,7 +555,7 @@ fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerErr } fn addressOf(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst { - return expr(mod, scope, .lvalue, node.rhs); + return expr(mod, scope, .ref, node.rhs); } fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst { @@ -484,13 +613,65 @@ fn ptrType(mod: *Module, scope: *Scope, node: *ast.Node.PtrType) InnerError!*zir return addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args); } +fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.ArrayType) !*zir.Inst { + const tree = scope.tree(); + const src = tree.token_locs[node.op_token].start; + const meta_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.type_type), + }); + const usize_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.usize_type), + }); + + // TODO check for [_]T + const len = try expr(mod, scope, .{ .ty = usize_type }, node.len_expr); + const child_type = try expr(mod, scope, .{ .ty = meta_type }, node.rhs); + + return addZIRBinOp(mod, scope, src, .array_type, len, child_type); +} + +fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.ArrayTypeSentinel) !*zir.Inst { + const tree = scope.tree(); + const src = tree.token_locs[node.op_token].start; + const meta_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.type_type), + }); + const usize_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.usize_type), + }); + + // TODO check for [_]T + const len = try expr(mod, scope, .{ .ty = usize_type }, node.len_expr); + const sentinel_uncasted = try expr(mod, scope, .none, node.sentinel); + const elem_type = try expr(mod, scope, .{ .ty = meta_type }, node.rhs); + const sentinel = try addZIRBinOp(mod, scope, src, .as, elem_type, sentinel_uncasted); + + return addZIRInst(mod, scope, src, zir.Inst.ArrayTypeSentinel, .{ + .len = len, + .sentinel = sentinel, + .elem_type = elem_type, + }, .{}); +} + +fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.EnumLiteral) !*zir.Inst { + const tree = scope.tree(); + const src = tree.token_locs[node.name].start; + const name = try identifierTokenString(mod, scope, node.name); + + return addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{}); +} + fn unwrapOptional(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst { const tree = scope.tree(); const src = tree.token_locs[node.rtoken].start; - const operand = try expr(mod, scope, .lvalue, node.lhs); + const operand = try expr(mod, scope, .ref, node.lhs); const unwrapped_ptr = try addZIRUnOp(mod, scope, src, .unwrap_optional_safe, operand); - if (rl == .lvalue) return unwrapped_ptr; + if (rl == .lvalue or rl == .ref) return unwrapped_ptr; return rlWrap(mod, scope, rl, try addZIRUnOp(mod, scope, src, .deref, unwrapped_ptr)); } @@ -568,6 +749,88 @@ fn simpleBinOp( return rlWrap(mod, scope, rl, result); } +fn boolBinOp( + mod: *Module, + scope: *Scope, + rl: ResultLoc, + infix_node: *ast.Node.SimpleInfixOp, +) InnerError!*zir.Inst { + const tree = scope.tree(); + const src = tree.token_locs[infix_node.op_token].start; + const bool_type = try addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.bool_type), + }); + + var block_scope: Scope.GenZIR = .{ + .parent = scope, + .decl = scope.decl().?, + .arena = scope.arena(), + .instructions = .{}, + }; + defer block_scope.instructions.deinit(mod.gpa); + + const lhs = try expr(mod, scope, .{ .ty = bool_type }, infix_node.lhs); + const condbr = try addZIRInstSpecial(mod, &block_scope.base, src, zir.Inst.CondBr, .{ + .condition = lhs, + .then_body = undefined, // populated below + .else_body = undefined, // populated below + }, .{}); + + const block = try addZIRInstBlock(mod, scope, src, .{ + .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items), + }); + + var rhs_scope: Scope.GenZIR = .{ + .parent = scope, + .decl = block_scope.decl, + .arena = block_scope.arena, + .instructions = .{}, + }; + defer rhs_scope.instructions.deinit(mod.gpa); + + const rhs = try expr(mod, &rhs_scope.base, .{ .ty = bool_type }, infix_node.rhs); + _ = try addZIRInst(mod, &rhs_scope.base, src, zir.Inst.Break, .{ + .block = block, + .operand = rhs, + }, .{}); + + var const_scope: Scope.GenZIR = .{ + .parent = scope, + .decl = block_scope.decl, + .arena = block_scope.arena, + .instructions = .{}, + }; + defer const_scope.instructions.deinit(mod.gpa); + + const is_bool_and = infix_node.base.tag == .BoolAnd; + _ = try addZIRInst(mod, &const_scope.base, src, zir.Inst.Break, .{ + .block = block, + .operand = try addZIRInstConst(mod, &const_scope.base, src, .{ + .ty = Type.initTag(.bool), + .val = if (is_bool_and) Value.initTag(.bool_false) else Value.initTag(.bool_true), + }), + }, .{}); + + if (is_bool_and) { + // if lhs // AND + // break rhs + // else + // break false + condbr.positionals.then_body = .{ .instructions = try rhs_scope.arena.dupe(*zir.Inst, rhs_scope.instructions.items) }; + condbr.positionals.else_body = .{ .instructions = try const_scope.arena.dupe(*zir.Inst, const_scope.instructions.items) }; + } else { + // if lhs // OR + // break true + // else + // break rhs + condbr.positionals.then_body = .{ .instructions = try const_scope.arena.dupe(*zir.Inst, const_scope.instructions.items) }; + condbr.positionals.else_body = .{ .instructions = try rhs_scope.arena.dupe(*zir.Inst, rhs_scope.instructions.items) }; + } + + return rlWrap(mod, scope, rl, &block.base); +} + const CondKind = union(enum) { bool, optional: ?*zir.Inst, @@ -583,13 +846,13 @@ const CondKind = union(enum) { return try expr(mod, &block_scope.base, .{ .ty = bool_type }, cond_node); }, .optional => { - const cond_ptr = try expr(mod, &block_scope.base, .lvalue, cond_node); + const cond_ptr = try expr(mod, &block_scope.base, .ref, cond_node); self.* = .{ .optional = cond_ptr }; const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, cond_ptr); return try addZIRUnOp(mod, &block_scope.base, src, .isnonnull, result); }, .err_union => { - const err_ptr = try expr(mod, &block_scope.base, .lvalue, cond_node); + const err_ptr = try expr(mod, &block_scope.base, .ref, cond_node); self.* = .{ .err_union = err_ptr }; const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, err_ptr); return try addZIRUnOp(mod, &block_scope.base, src, .iserr, result); @@ -600,7 +863,11 @@ const CondKind = union(enum) { fn thenSubScope(self: CondKind, mod: *Module, then_scope: *Scope.GenZIR, src: usize, payload_node: ?*ast.Node) !*Scope { if (self == .bool) return &then_scope.base; - const payload = payload_node.?.castTag(.PointerPayload).?; + const payload = payload_node.?.castTag(.PointerPayload) orelse { + // condition is error union and payload is not explicitly ignored + _ = try addZIRUnOp(mod, &then_scope.base, src, .ensure_err_payload_void, self.err_union.?); + return &then_scope.base; + }; const is_ptr = payload.ptr_token != null; const ident_node = payload.value_symbol.castTag(.Identifier).?; @@ -680,7 +947,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn // proper type inference requires peer type resolution on the if's // branches. const branch_rl: ResultLoc = switch (rl) { - .discard, .none, .ty, .ptr, .lvalue => rl, + .discard, .none, .ty, .ptr, .lvalue, .ref => rl, .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block }, }; @@ -810,7 +1077,7 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W // proper type inference requires peer type resolution on the while's // branches. const branch_rl: ResultLoc = switch (rl) { - .discard, .none, .ty, .ptr, .lvalue => rl, + .discard, .none, .ty, .ptr, .lvalue, .ref => rl, .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = while_block }, }; @@ -941,7 +1208,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo .local_ptr => { const local_ptr = s.cast(Scope.LocalPtr).?; if (mem.eql(u8, local_ptr.name, ident_name)) { - if (rl == .lvalue) { + if (rl == .lvalue or rl == .ref) { return local_ptr.ptr; } else { const result = try addZIRUnOp(mod, scope, src, .deref, local_ptr.ptr); @@ -983,6 +1250,53 @@ fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.OneToken) Inner return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{}); } +fn multilineStrLiteral(mod: *Module, scope: *Scope, node: *ast.Node.MultilineStringLiteral) !*zir.Inst { + const tree = scope.tree(); + const lines = node.linesConst(); + const src = tree.token_locs[lines[0]].start; + + // line lengths and new lines + var len = lines.len - 1; + for (lines) |line| { + len += tree.tokenSlice(line).len - 2; + } + + const bytes = try scope.arena().alloc(u8, len); + var i: usize = 0; + for (lines) |line, line_i| { + if (line_i != 0) { + bytes[i] = '\n'; + i += 1; + } + const slice = tree.tokenSlice(line)[2..]; + mem.copy(u8, bytes[i..], slice); + i += slice.len; + } + + return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{}); +} + +fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst { + const tree = scope.tree(); + const src = tree.token_locs[node.token].start; + const slice = tree.tokenSlice(node.token); + + var bad_index: usize = undefined; + const value = std.zig.parseCharLiteral(slice, &bad_index) catch |err| switch (err) { + error.InvalidCharacter => { + const bad_byte = slice[bad_index]; + return mod.fail(scope, src + bad_index, "invalid character: '{c}'\n", .{bad_byte}); + }, + }; + + const int_payload = try scope.arena().create(Value.Payload.Int_u64); + int_payload.* = .{ .int = value }; + return addZIRInstConst(mod, scope, src, .{ + .ty = Type.initTag(.comptime_int), + .val = Value.initPayload(&int_payload.base), + }); +} + fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) InnerError!*zir.Inst { const arena = scope.arena(); const tree = scope.tree(); @@ -1158,7 +1472,8 @@ fn as(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) I _ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result); return result; }, - .lvalue => { + .lvalue => unreachable, + .ref => { const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]); return addZIRUnOp(mod, scope, result.src, .ref, result); }, @@ -1209,9 +1524,10 @@ fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCa _ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result); return result; }, - .lvalue => { - const operand = try expr(mod, scope, .lvalue, params[1]); - const result = try addZIRBinOp(mod, scope, src, .bitcast_lvalue, dest_type, operand); + .lvalue => unreachable, + .ref => { + const operand = try expr(mod, scope, .ref, params[1]); + const result = try addZIRBinOp(mod, scope, src, .bitcast_ref, dest_type, operand); return result; }, .ty => |result_ty| { @@ -1476,7 +1792,7 @@ fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr _ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result); return result; }, - .lvalue => { + .lvalue, .ref => { // We need a pointer but we have a value. return addZIRUnOp(mod, scope, result.src, .ref, result); }, diff --git a/src-self-hosted/codegen.zig b/src-self-hosted/codegen.zig index c59b8db825..8887a1e0ca 100644 --- a/src-self-hosted/codegen.zig +++ b/src-self-hosted/codegen.zig @@ -73,6 +73,8 @@ pub fn generateSymbol( switch (typed_value.ty.zigTypeTag()) { .Fn => { switch (bin_file.base.options.target.cpu.arch) { + .wasm32 => unreachable, // has its own code path + .wasm64 => unreachable, // has its own code path //.arm => return Function(.arm).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), //.armeb => return Function(.armeb).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), //.aarch64 => return Function(.aarch64).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), @@ -119,8 +121,6 @@ pub fn generateSymbol( //.kalimba => return Function(.kalimba).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), //.shave => return Function(.shave).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), //.lanai => return Function(.lanai).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), - //.wasm32 => return Function(.wasm32).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), - //.wasm64 => return Function(.wasm64).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), //.renderscript32 => return Function(.renderscript32).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), //.renderscript64 => return Function(.renderscript64).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), //.ve => return Function(.ve).generateSymbol(bin_file, src, typed_value, code, dbg_line, dbg_info, dbg_info_type_relocs), diff --git a/src-self-hosted/codegen/wasm.zig b/src-self-hosted/codegen/wasm.zig index 57eb002e82..e55e904934 100644 --- a/src-self-hosted/codegen/wasm.zig +++ b/src-self-hosted/codegen/wasm.zig @@ -62,58 +62,80 @@ pub fn genCode(buf: *ArrayList(u8), decl: *Decl) !void { // TODO: check for and handle death of instructions const tv = decl.typed_value.most_recent.typed_value; const mod_fn = tv.val.cast(Value.Payload.Function).?.func; - for (mod_fn.analysis.success.instructions) |inst| try genInst(writer, inst); + for (mod_fn.analysis.success.instructions) |inst| try genInst(buf, decl, inst); // Write 'end' opcode try writer.writeByte(0x0B); // Fill in the size of the generated code to the reserved space at the // beginning of the buffer. - leb.writeUnsignedFixed(5, buf.items[0..5], @intCast(u32, buf.items.len - 5)); + const size = buf.items.len - 5 + decl.fn_link.wasm.?.idx_refs.items.len * 5; + leb.writeUnsignedFixed(5, buf.items[0..5], @intCast(u32, size)); } -fn genInst(writer: ArrayList(u8).Writer, inst: *Inst) !void { +fn genInst(buf: *ArrayList(u8), decl: *Decl, inst: *Inst) !void { return switch (inst.tag) { + .call => genCall(buf, decl, inst.castTag(.call).?), + .constant => genConstant(buf, decl, inst.castTag(.constant).?), .dbg_stmt => {}, - .ret => genRet(writer, inst.castTag(.ret).?), + .ret => genRet(buf, decl, inst.castTag(.ret).?), + .retvoid => {}, else => error.TODOImplementMoreWasmCodegen, }; } -fn genRet(writer: ArrayList(u8).Writer, inst: *Inst.UnOp) !void { - switch (inst.operand.tag) { - .constant => { - const constant = inst.operand.castTag(.constant).?; - switch (inst.operand.ty.tag()) { - .u32 => { - try writer.writeByte(0x41); // i32.const - try leb.writeILEB128(writer, constant.val.toUnsignedInt()); - }, - .i32 => { - try writer.writeByte(0x41); // i32.const - try leb.writeILEB128(writer, constant.val.toSignedInt()); - }, - .u64 => { - try writer.writeByte(0x42); // i64.const - try leb.writeILEB128(writer, constant.val.toUnsignedInt()); - }, - .i64 => { - try writer.writeByte(0x42); // i64.const - try leb.writeILEB128(writer, constant.val.toSignedInt()); - }, - .f32 => { - try writer.writeByte(0x43); // f32.const - // TODO: enforce LE byte order - try writer.writeAll(mem.asBytes(&constant.val.toFloat(f32))); - }, - .f64 => { - try writer.writeByte(0x44); // f64.const - // TODO: enforce LE byte order - try writer.writeAll(mem.asBytes(&constant.val.toFloat(f64))); - }, - else => return error.TODOImplementMoreWasmCodegen, - } +fn genConstant(buf: *ArrayList(u8), decl: *Decl, inst: *Inst.Constant) !void { + const writer = buf.writer(); + switch (inst.base.ty.tag()) { + .u32 => { + try writer.writeByte(0x41); // i32.const + try leb.writeILEB128(writer, inst.val.toUnsignedInt()); + }, + .i32 => { + try writer.writeByte(0x41); // i32.const + try leb.writeILEB128(writer, inst.val.toSignedInt()); + }, + .u64 => { + try writer.writeByte(0x42); // i64.const + try leb.writeILEB128(writer, inst.val.toUnsignedInt()); + }, + .i64 => { + try writer.writeByte(0x42); // i64.const + try leb.writeILEB128(writer, inst.val.toSignedInt()); }, + .f32 => { + try writer.writeByte(0x43); // f32.const + // TODO: enforce LE byte order + try writer.writeAll(mem.asBytes(&inst.val.toFloat(f32))); + }, + .f64 => { + try writer.writeByte(0x44); // f64.const + // TODO: enforce LE byte order + try writer.writeAll(mem.asBytes(&inst.val.toFloat(f64))); + }, + .void => {}, else => return error.TODOImplementMoreWasmCodegen, } } + +fn genRet(buf: *ArrayList(u8), decl: *Decl, inst: *Inst.UnOp) !void { + try genInst(buf, decl, inst.operand); +} + +fn genCall(buf: *ArrayList(u8), decl: *Decl, inst: *Inst.Call) !void { + const func_inst = inst.func.castTag(.constant).?; + const func_val = func_inst.val.cast(Value.Payload.Function).?; + const target = func_val.func.owner_decl; + const target_ty = target.typed_value.most_recent.typed_value.ty; + + if (inst.args.len != 0) return error.TODOImplementMoreWasmCodegen; + + try buf.append(0x10); // call + + // The function index immediate argument will be filled in using this data + // in link.Wasm.flush(). + try decl.fn_link.wasm.?.idx_refs.append(buf.allocator, .{ + .offset = @intCast(u32, buf.items.len), + .decl = target, + }); +} diff --git a/src-self-hosted/link.zig b/src-self-hosted/link.zig index 506efa85cc..39e6d27c58 100644 --- a/src-self-hosted/link.zig +++ b/src-self-hosted/link.zig @@ -109,6 +109,8 @@ pub const File = struct { } } + /// May be called before or after updateDeclExports but must be called + /// after allocateDeclIndexes for any given Decl. pub fn updateDecl(base: *File, module: *Module, decl: *Module.Decl) !void { switch (base.tag) { .elf => return @fieldParentPtr(Elf, "base", base).updateDecl(module, decl), @@ -126,6 +128,8 @@ pub const File = struct { } } + /// Must be called before any call to updateDecl or updateDeclExports for + /// any given Decl. pub fn allocateDeclIndexes(base: *File, decl: *Module.Decl) !void { switch (base.tag) { .elf => return @fieldParentPtr(Elf, "base", base).allocateDeclIndexes(decl), @@ -199,7 +203,8 @@ pub const File = struct { }; } - /// Must be called only after a successful call to `updateDecl`. + /// May be called before or after updateDecl, but must be called after + /// allocateDeclIndexes for any given Decl. pub fn updateDeclExports( base: *File, module: *Module, diff --git a/src-self-hosted/link/Wasm.zig b/src-self-hosted/link/Wasm.zig index a381cfad57..d8f172f584 100644 --- a/src-self-hosted/link/Wasm.zig +++ b/src-self-hosted/link/Wasm.zig @@ -32,19 +32,22 @@ const spec = struct { pub const base_tag = link.File.Tag.wasm; pub const FnData = struct { - funcidx: u32, + /// Generated code for the type of the function + functype: std.ArrayListUnmanaged(u8) = .{}, + /// Generated code for the body of the function + code: std.ArrayListUnmanaged(u8) = .{}, + /// Locations in the generated code where function indexes must be filled in. + /// This must be kept ordered by offset. + idx_refs: std.ArrayListUnmanaged(struct { offset: u32, decl: *Module.Decl }) = .{}, }; base: link.File, -types: Types, -funcs: Funcs, -exports: Exports, - -/// Array over the section structs used in the various sections above to -/// allow iteration when shifting sections to make space. -/// TODO: this should eventually be size 11 when we use all the sections. -sections: [4]*Section, +/// List of all function Decls to be written to the output file. The index of +/// each Decl in this list at the time of writing the binary is used as the +/// function index. +/// TODO: can/should we access some data structure in Module directly? +funcs: std.ArrayListUnmanaged(*Module.Decl) = .{}, pub fn openPath(allocator: *Allocator, dir: fs.Dir, sub_path: []const u8, options: link.Options) !*link.File { assert(options.object_format == .wasm); @@ -58,10 +61,6 @@ pub fn openPath(allocator: *Allocator, dir: fs.Dir, sub_path: []const u8, option try file.writeAll(&(spec.magic ++ spec.version)); - // TODO: this should vary depending on the section and be less arbitrary - const size = 1024; - const offset = @sizeOf(@TypeOf(spec.magic ++ spec.version)); - wasm.* = .{ .base = .{ .tag = .wasm, @@ -69,52 +68,42 @@ pub fn openPath(allocator: *Allocator, dir: fs.Dir, sub_path: []const u8, option .file = file, .allocator = allocator, }, - - .types = try Types.init(file, offset, size), - .funcs = try Funcs.init(file, offset + size, size, offset + 3 * size, size), - .exports = try Exports.init(file, offset + 2 * size, size), - - // These must be ordered as they will appear in the output file - .sections = [_]*Section{ - &wasm.types.typesec.section, - &wasm.funcs.funcsec, - &wasm.exports.exportsec, - &wasm.funcs.codesec.section, - }, }; - try file.setEndPos(offset + 4 * size); - return &wasm.base; } pub fn deinit(self: *Wasm) void { - self.types.deinit(); - self.funcs.deinit(); + for (self.funcs.items) |decl| { + decl.fn_link.wasm.?.functype.deinit(self.base.allocator); + decl.fn_link.wasm.?.code.deinit(self.base.allocator); + decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator); + } + self.funcs.deinit(self.base.allocator); } +// Generate code for the Decl, storing it in memory to be later written to +// the file on flush(). pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void { if (decl.typed_value.most_recent.typed_value.ty.zigTypeTag() != .Fn) return error.TODOImplementNonFnDeclsForWasm; - if (decl.fn_link.wasm) |fn_data| { - self.funcs.free(fn_data.funcidx); - } - - var buf = std.ArrayList(u8).init(self.base.allocator); - defer buf.deinit(); - - try codegen.genFunctype(&buf, decl); - const typeidx = try self.types.new(buf.items); - buf.items.len = 0; - - try codegen.genCode(&buf, decl); - const funcidx = try self.funcs.new(typeidx, buf.items); - - decl.fn_link.wasm = .{ .funcidx = funcidx }; - - // TODO: we should be more smart and set this only when needed - self.exports.dirty = true; + if (decl.fn_link.wasm) |*fn_data| { + fn_data.functype.items.len = 0; + fn_data.code.items.len = 0; + fn_data.idx_refs.items.len = 0; + } else { + decl.fn_link.wasm = .{}; + try self.funcs.append(self.base.allocator, decl); + } + const fn_data = &decl.fn_link.wasm.?; + + var managed_functype = fn_data.functype.toManaged(self.base.allocator); + var managed_code = fn_data.code.toManaged(self.base.allocator); + try codegen.genFunctype(&managed_functype, decl); + try codegen.genCode(&managed_code, decl); + fn_data.functype = managed_functype.toUnmanaged(); + fn_data.code = managed_code.toUnmanaged(); } pub fn updateDeclExports( @@ -122,332 +111,141 @@ pub fn updateDeclExports( module: *Module, decl: *const Module.Decl, exports: []const *Module.Export, -) !void { - self.exports.dirty = true; -} +) !void {} pub fn freeDecl(self: *Wasm, decl: *Module.Decl) void { // TODO: remove this assert when non-function Decls are implemented assert(decl.typed_value.most_recent.typed_value.ty.zigTypeTag() == .Fn); - if (decl.fn_link.wasm) |fn_data| { - self.funcs.free(fn_data.funcidx); - decl.fn_link.wasm = null; - } + _ = self.funcs.swapRemove(self.getFuncidx(decl).?); + decl.fn_link.wasm.?.functype.deinit(self.base.allocator); + decl.fn_link.wasm.?.code.deinit(self.base.allocator); + decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator); + decl.fn_link.wasm = null; } pub fn flush(self: *Wasm, module: *Module) !void { - if (self.exports.dirty) try self.exports.writeAll(module); -} - -/// This struct describes the location of a named section + custom section -/// padding in the output file. This is all the data we need to allow for -/// shifting sections around when padding runs out. -const Section = struct { - /// The size of a section header: 1 byte section id + 5 bytes - /// for the fixed-width ULEB128 encoded contents size. - const header_size = 1 + 5; - /// Offset of the section id byte from the start of the file. - offset: u64, - /// Size of the section, including the header and directly - /// following custom section used for padding if any. - size: u64, - - /// Resize the usable part of the section, handling the following custom - /// section used for padding. If there is not enough padding left, shift - /// all following sections to make space. Takes the current and target - /// contents sizes of the section as arguments. - fn resize(self: *Section, file: fs.File, current: u32, target: u32) !void { - // Section header + target contents size + custom section header - // + custom section name + empty custom section > owned chunk of the file - if (header_size + target + header_size + 1 + 0 > self.size) - return error.TODOImplementSectionShifting; - - const new_custom_start = self.offset + header_size + target; - const new_custom_contents_size = self.size - target - 2 * header_size; - assert(new_custom_contents_size >= 1); - // +1 for the name of the custom section, which we set to an empty string - var custom_header: [header_size + 1]u8 = undefined; - custom_header[0] = spec.custom_id; - leb.writeUnsignedFixed(5, custom_header[1..header_size], @intCast(u32, new_custom_contents_size)); - custom_header[header_size] = 0; - try file.pwriteAll(&custom_header, new_custom_start); - } -}; - -/// This can be used to manage the contents of any section which uses a vector -/// of contents. This interface maintains index stability while allowing for -/// reuse of "dead" indexes. -const VecSection = struct { - /// Represents a single entry in the vector (e.g. a type in the type section) - const Entry = struct { - /// Offset from the start of the section contents in bytes - offset: u32, - /// Size in bytes of the entry - size: u32, - }; - section: Section, - /// Size in bytes of the contents of the section. Does not include - /// the "header" containing the section id and this value. - contents_size: u32, - /// List of all entries in the contents of the section. - entries: std.ArrayListUnmanaged(Entry) = std.ArrayListUnmanaged(Entry){}, - /// List of indexes of unreferenced entries which may be - /// overwritten and reused. - dead_list: std.ArrayListUnmanaged(u32) = std.ArrayListUnmanaged(u32){}, - - /// Write the headers of the section and custom padding section - fn init(comptime section_id: u8, file: fs.File, offset: u64, initial_size: u64) !VecSection { - // section id, section size, empty vector, custom section id, - // custom section size, empty custom section name - var initial_data: [1 + 5 + 5 + 1 + 5 + 1]u8 = undefined; - - assert(initial_size >= initial_data.len); - - comptime var i = 0; - initial_data[i] = section_id; - i += 1; - leb.writeUnsignedFixed(5, initial_data[i..(i + 5)], 5); - i += 5; - leb.writeUnsignedFixed(5, initial_data[i..(i + 5)], 0); - i += 5; - initial_data[i] = spec.custom_id; - i += 1; - leb.writeUnsignedFixed(5, initial_data[i..(i + 5)], @intCast(u32, initial_size - @sizeOf(@TypeOf(initial_data)))); - i += 5; - initial_data[i] = 0; - - try file.pwriteAll(&initial_data, offset); - - return VecSection{ - .section = .{ - .offset = offset, - .size = initial_size, - }, - .contents_size = 5, - }; - } - - fn deinit(self: *VecSection, allocator: *Allocator) void { - self.entries.deinit(allocator); - self.dead_list.deinit(allocator); - } - - /// Write a new entry into the file, returning the index used. - fn addEntry(self: *VecSection, file: fs.File, allocator: *Allocator, data: []const u8) !u32 { - // First look for a dead entry we can reuse - for (self.dead_list.items) |dead_idx, i| { - const dead_entry = &self.entries.items[dead_idx]; - if (dead_entry.size == data.len) { - // Found a dead entry of the right length, overwrite it - try file.pwriteAll(data, self.section.offset + Section.header_size + dead_entry.offset); - _ = self.dead_list.swapRemove(i); - return dead_idx; - } + const file = self.base.file.?; + const header_size = 5 + 1; + + // No need to rewrite the magic/version header + try file.setEndPos(@sizeOf(@TypeOf(spec.magic ++ spec.version))); + try file.seekTo(@sizeOf(@TypeOf(spec.magic ++ spec.version))); + + // Type section + { + const header_offset = try reserveVecSectionHeader(file); + for (self.funcs.items) |decl| { + try file.writeAll(decl.fn_link.wasm.?.functype.items); } - - // TODO: We can be more efficient if we special-case one or - // more consecutive dead entries at the end of the vector. - - // We failed to find a dead entry to reuse, so write the new - // entry to the end of the section. - try self.section.resize(file, self.contents_size, self.contents_size + @intCast(u32, data.len)); - try file.pwriteAll(data, self.section.offset + Section.header_size + self.contents_size); - try self.entries.append(allocator, .{ - .offset = self.contents_size, - .size = @intCast(u32, data.len), - }); - self.contents_size += @intCast(u32, data.len); - // Make sure the dead list always has enough space to store all free'd - // entries. This makes it so that delEntry() cannot fail. - // TODO: figure out a better way that doesn't waste as much memory - try self.dead_list.ensureCapacity(allocator, self.entries.items.len); - - // Update the size in the section header and the item count of - // the contents vector. - var size_and_count: [10]u8 = undefined; - leb.writeUnsignedFixed(5, size_and_count[0..5], self.contents_size); - leb.writeUnsignedFixed(5, size_and_count[5..], @intCast(u32, self.entries.items.len)); - try file.pwriteAll(&size_and_count, self.section.offset + 1); - - return @intCast(u32, self.entries.items.len - 1); - } - - /// Mark the type referenced by the given index as dead. - fn delEntry(self: *VecSection, index: u32) void { - self.dead_list.appendAssumeCapacity(index); - } -}; - -const Types = struct { - typesec: VecSection, - - fn init(file: fs.File, offset: u64, initial_size: u64) !Types { - return Types{ .typesec = try VecSection.init(spec.types_id, file, offset, initial_size) }; - } - - fn deinit(self: *Types) void { - const wasm = @fieldParentPtr(Wasm, "types", self); - self.typesec.deinit(wasm.base.allocator); - } - - fn new(self: *Types, data: []const u8) !u32 { - const wasm = @fieldParentPtr(Wasm, "types", self); - return self.typesec.addEntry(wasm.base.file.?, wasm.base.allocator, data); - } - - fn free(self: *Types, typeidx: u32) void { - self.typesec.delEntry(typeidx); - } -}; - -const Funcs = struct { - /// This section needs special handling to keep the indexes matching with - /// the codesec, so we cant just use a VecSection. - funcsec: Section, - /// The typeidx stored for each function, indexed by funcidx. - func_types: std.ArrayListUnmanaged(u32) = std.ArrayListUnmanaged(u32){}, - codesec: VecSection, - - fn init(file: fs.File, funcs_offset: u64, funcs_size: u64, code_offset: u64, code_size: u64) !Funcs { - return Funcs{ - .funcsec = (try VecSection.init(spec.funcs_id, file, funcs_offset, funcs_size)).section, - .codesec = try VecSection.init(spec.code_id, file, code_offset, code_size), - }; - } - - fn deinit(self: *Funcs) void { - const wasm = @fieldParentPtr(Wasm, "funcs", self); - self.func_types.deinit(wasm.base.allocator); - self.codesec.deinit(wasm.base.allocator); - } - - /// Add a new function to the binary, first finding space for and writing - /// the code then writing the typeidx to the corresponding index in the - /// funcsec. Returns the function index used. - fn new(self: *Funcs, typeidx: u32, code: []const u8) !u32 { - const wasm = @fieldParentPtr(Wasm, "funcs", self); - const file = wasm.base.file.?; - const allocator = wasm.base.allocator; - - assert(self.func_types.items.len == self.codesec.entries.items.len); - - // TODO: consider nop-padding the code if there is a close but not perfect fit - const funcidx = try self.codesec.addEntry(file, allocator, code); - - if (self.func_types.items.len < self.codesec.entries.items.len) { - // u32 vector length + funcs_count u32s in the vector - const current = 5 + @intCast(u32, self.func_types.items.len) * 5; - try self.funcsec.resize(file, current, current + 5); - try self.func_types.append(allocator, typeidx); - - // Update the size in the section header and the item count of - // the contents vector. - const count = @intCast(u32, self.func_types.items.len); - var size_and_count: [10]u8 = undefined; - leb.writeUnsignedFixed(5, size_and_count[0..5], 5 + count * 5); - leb.writeUnsignedFixed(5, size_and_count[5..], count); - try file.pwriteAll(&size_and_count, self.funcsec.offset + 1); - } else { - // We are overwriting a dead function and may now free the type - wasm.types.free(self.func_types.items[funcidx]); - } - - assert(self.func_types.items.len == self.codesec.entries.items.len); - - var typeidx_leb: [5]u8 = undefined; - leb.writeUnsignedFixed(5, &typeidx_leb, typeidx); - try file.pwriteAll(&typeidx_leb, self.funcsec.offset + Section.header_size + 5 + funcidx * 5); - - return funcidx; - } - - fn free(self: *Funcs, funcidx: u32) void { - self.codesec.delEntry(funcidx); - } -}; - -/// Exports are tricky. We can't leave dead entries in the binary as they -/// would obviously be visible from the execution environment. The simplest -/// way to work around this is to re-emit the export section whenever -/// something changes. This also makes it easier to ensure exported function -/// and global indexes are updated as they change. -const Exports = struct { - exportsec: Section, - /// Size in bytes of the contents of the section. Does not include - /// the "header" containing the section id and this value. - contents_size: u32, - /// If this is true, then exports will be rewritten on flush() - dirty: bool, - - fn init(file: fs.File, offset: u64, initial_size: u64) !Exports { - return Exports{ - .exportsec = (try VecSection.init(spec.exports_id, file, offset, initial_size)).section, - .contents_size = 5, - .dirty = false, - }; - } - - fn writeAll(self: *Exports, module: *Module) !void { - const wasm = @fieldParentPtr(Wasm, "exports", self); - const file = wasm.base.file.?; - var buf: [5]u8 = undefined; - - // First ensure the section is the right size - var export_count: u32 = 0; - var new_contents_size: u32 = 5; + try writeVecSectionHeader( + file, + header_offset, + spec.types_id, + @intCast(u32, (try file.getPos()) - header_offset - header_size), + @intCast(u32, self.funcs.items.len), + ); + } + + // Function section + { + const header_offset = try reserveVecSectionHeader(file); + const writer = file.writer(); + for (self.funcs.items) |_, typeidx| try leb.writeULEB128(writer, @intCast(u32, typeidx)); + try writeVecSectionHeader( + file, + header_offset, + spec.funcs_id, + @intCast(u32, (try file.getPos()) - header_offset - header_size), + @intCast(u32, self.funcs.items.len), + ); + } + + // Export section + { + const header_offset = try reserveVecSectionHeader(file); + const writer = file.writer(); + var count: u32 = 0; for (module.decl_exports.entries.items) |entry| { - for (entry.value) |e| { - export_count += 1; - new_contents_size += calcSize(e); + for (entry.value) |exprt| { + // Export name length + name + try leb.writeULEB128(writer, @intCast(u32, exprt.options.name.len)); + try writer.writeAll(exprt.options.name); + + switch (exprt.exported_decl.typed_value.most_recent.typed_value.ty.zigTypeTag()) { + .Fn => { + // Type of the export + try writer.writeByte(0x00); + // Exported function index + try leb.writeULEB128(writer, self.getFuncidx(exprt.exported_decl).?); + }, + else => return error.TODOImplementNonFnDeclsForWasm, + } + + count += 1; } } - if (new_contents_size != self.contents_size) { - try self.exportsec.resize(file, self.contents_size, new_contents_size); - leb.writeUnsignedFixed(5, &buf, new_contents_size); - try file.pwriteAll(&buf, self.exportsec.offset + 1); - } - - try file.seekTo(self.exportsec.offset + Section.header_size); + try writeVecSectionHeader( + file, + header_offset, + spec.exports_id, + @intCast(u32, (try file.getPos()) - header_offset - header_size), + count, + ); + } + + // Code section + { + const header_offset = try reserveVecSectionHeader(file); const writer = file.writer(); + for (self.funcs.items) |decl| { + const fn_data = &decl.fn_link.wasm.?; + + // Write the already generated code to the file, inserting + // function indexes where required. + var current: u32 = 0; + for (fn_data.idx_refs.items) |idx_ref| { + try writer.writeAll(fn_data.code.items[current..idx_ref.offset]); + current = idx_ref.offset; + // Use a fixed width here to make calculating the code size + // in codegen.wasm.genCode() simpler. + var buf: [5]u8 = undefined; + leb.writeUnsignedFixed(5, &buf, self.getFuncidx(idx_ref.decl).?); + try writer.writeAll(&buf); + } - // Length of the exports vec - leb.writeUnsignedFixed(5, &buf, export_count); - try writer.writeAll(&buf); - - for (module.decl_exports.entries.items) |entry| - for (entry.value) |e| try writeExport(writer, e); - - self.dirty = false; + try writer.writeAll(fn_data.code.items[current..]); + } + try writeVecSectionHeader( + file, + header_offset, + spec.code_id, + @intCast(u32, (try file.getPos()) - header_offset - header_size), + @intCast(u32, self.funcs.items.len), + ); } +} - /// Return the total number of bytes an export will take. - /// TODO: fixed-width LEB128 is currently used for simplicity, but should - /// be replaced with proper variable-length LEB128 as it is inefficient. - fn calcSize(e: *Module.Export) u32 { - // LEB128 name length + name bytes + export type + LEB128 index - return 5 + @intCast(u32, e.options.name.len) + 1 + 5; - } +/// Get the current index of a given Decl in the function list +/// TODO: we could maintain a hash map to potentially make this +fn getFuncidx(self: Wasm, decl: *Module.Decl) ?u32 { + return for (self.funcs.items) |func, idx| { + if (func == decl) break @intCast(u32, idx); + } else null; +} - /// Write the data for a single export to the given file at a given offset. - /// TODO: fixed-width LEB128 is currently used for simplicity, but should - /// be replaced with proper variable-length LEB128 as it is inefficient. - fn writeExport(writer: anytype, e: *Module.Export) !void { - var buf: [5]u8 = undefined; - - // Export name length + name - leb.writeUnsignedFixed(5, &buf, @intCast(u32, e.options.name.len)); - try writer.writeAll(&buf); - try writer.writeAll(e.options.name); - - switch (e.exported_decl.typed_value.most_recent.typed_value.ty.zigTypeTag()) { - .Fn => { - // Type of the export - try writer.writeByte(0x00); - // Exported function index - leb.writeUnsignedFixed(5, &buf, e.exported_decl.fn_link.wasm.?.funcidx); - try writer.writeAll(&buf); - }, - else => return error.TODOImplementNonFnDeclsForWasm, - } - } -}; +fn reserveVecSectionHeader(file: fs.File) !u64 { + // section id + fixed leb contents size + fixed leb vector length + const header_size = 1 + 5 + 5; + // TODO: this should be a single lseek(2) call, but fs.File does not + // currently provide a way to do this. + try file.seekBy(header_size); + return (try file.getPos()) - header_size; +} + +fn writeVecSectionHeader(file: fs.File, offset: u64, section: u8, size: u32, items: u32) !void { + var buf: [1 + 5 + 5]u8 = undefined; + buf[0] = section; + leb.writeUnsignedFixed(5, buf[1..6], size); + leb.writeUnsignedFixed(5, buf[6..], items); + try file.pwriteAll(&buf, offset); +} diff --git a/src-self-hosted/test.zig b/src-self-hosted/test.zig index 265c26b2dc..b51cb313f8 100644 --- a/src-self-hosted/test.zig +++ b/src-self-hosted/test.zig @@ -421,7 +421,7 @@ pub const TestContext = struct { } fn runOneCase(self: *TestContext, allocator: *Allocator, root_node: *std.Progress.Node, case: Case) !void { - const target_info = try std.zig.system.NativeTargetInfo.detect(std.testing.allocator, case.target); + const target_info = try std.zig.system.NativeTargetInfo.detect(allocator, case.target); const target = target_info.target; var arena_allocator = std.heap.ArenaAllocator.init(allocator); diff --git a/src-self-hosted/type.zig b/src-self-hosted/type.zig index faba784f90..a6ec90a35b 100644 --- a/src-self-hosted/type.zig +++ b/src-self-hosted/type.zig @@ -65,7 +65,7 @@ pub const Type = extern union { .fn_ccc_void_no_args => return .Fn, .function => return .Fn, - .array, .array_u8_sentinel_0 => return .Array, + .array, .array_u8_sentinel_0, .array_u8, .array_sentinel => return .Array, .single_const_pointer => return .Pointer, .single_mut_pointer => return .Pointer, .single_const_pointer_to_comptime_int => return .Pointer, @@ -75,6 +75,7 @@ pub const Type = extern union { .optional_single_const_pointer, .optional_single_mut_pointer, => return .Optional, + .enum_literal => return .EnumLiteral, } } @@ -127,6 +128,7 @@ pub const Type = extern union { if (zig_tag_a != zig_tag_b) return false; switch (zig_tag_a) { + .EnumLiteral => return true, .Type => return true, .Void => return true, .Bool => return true, @@ -211,7 +213,6 @@ pub const Type = extern union { .Frame, .AnyFrame, .Vector, - .EnumLiteral, => std.debug.panic("TODO implement Type equality comparison of {} and {}", .{ a, b }), } } @@ -327,9 +328,11 @@ pub const Type = extern union { .fn_ccc_void_no_args, .single_const_pointer_to_comptime_int, .const_slice_u8, + .enum_literal, => unreachable, .array_u8_sentinel_0 => return self.copyPayloadShallow(allocator, Payload.Array_u8_Sentinel0), + .array_u8 => return self.copyPayloadShallow(allocator, Payload.Array_u8), .array => { const payload = @fieldParentPtr(Payload.Array, "base", self.ptr_otherwise); const new_payload = try allocator.create(Payload.Array); @@ -340,6 +343,17 @@ pub const Type = extern union { }; return Type{ .ptr_otherwise = &new_payload.base }; }, + .array_sentinel => { + const payload = @fieldParentPtr(Payload.ArraySentinel, "base", self.ptr_otherwise); + const new_payload = try allocator.create(Payload.ArraySentinel); + new_payload.* = .{ + .base = payload.base, + .len = payload.len, + .sentinel = try payload.sentinel.copy(allocator), + .elem_type = try payload.elem_type.copy(allocator), + }; + return Type{ .ptr_otherwise = &new_payload.base }; + }, .int_signed => return self.copyPayloadShallow(allocator, Payload.IntSigned), .int_unsigned => return self.copyPayloadShallow(allocator, Payload.IntUnsigned), .function => { @@ -425,6 +439,7 @@ pub const Type = extern union { .noreturn, => return out_stream.writeAll(@tagName(t)), + .enum_literal => return out_stream.writeAll("@TypeOf(.EnumLiteral)"), .@"null" => return out_stream.writeAll("@TypeOf(null)"), .@"undefined" => return out_stream.writeAll("@TypeOf(undefined)"), @@ -445,6 +460,10 @@ pub const Type = extern union { try payload.return_type.format("", .{}, out_stream); }, + .array_u8 => { + const payload = @fieldParentPtr(Payload.Array_u8, "base", ty.ptr_otherwise); + return out_stream.print("[{}]u8", .{payload.len}); + }, .array_u8_sentinel_0 => { const payload = @fieldParentPtr(Payload.Array_u8_Sentinel0, "base", ty.ptr_otherwise); return out_stream.print("[{}:0]u8", .{payload.len}); @@ -455,6 +474,12 @@ pub const Type = extern union { ty = payload.elem_type; continue; }, + .array_sentinel => { + const payload = @fieldParentPtr(Payload.ArraySentinel, "base", ty.ptr_otherwise); + try out_stream.print("[{}:{}]", .{ payload.len, payload.sentinel }); + ty = payload.elem_type; + continue; + }, .single_const_pointer => { const payload = @fieldParentPtr(Payload.Pointer, "base", ty.ptr_otherwise); try out_stream.writeAll("*const "); @@ -539,6 +564,7 @@ pub const Type = extern union { .fn_ccc_void_no_args => return Value.initTag(.fn_ccc_void_no_args_type), .single_const_pointer_to_comptime_int => return Value.initTag(.single_const_pointer_to_comptime_int_type), .const_slice_u8 => return Value.initTag(.const_slice_u8_type), + .enum_literal => return Value.initTag(.enum_literal_type), else => { const ty_payload = try allocator.create(Value.Payload.Ty); ty_payload.* = .{ .ty = self }; @@ -588,6 +614,8 @@ pub const Type = extern union { => true, // TODO lazy types .array => self.elemType().hasCodeGenBits() and self.arrayLen() != 0, + .array_u8 => self.arrayLen() != 0, + .array_sentinel => self.elemType().hasCodeGenBits(), .single_const_pointer => self.elemType().hasCodeGenBits(), .single_mut_pointer => self.elemType().hasCodeGenBits(), .int_signed => self.cast(Payload.IntSigned).?.bits == 0, @@ -601,6 +629,7 @@ pub const Type = extern union { .noreturn, .@"null", .@"undefined", + .enum_literal, => false, }; } @@ -616,6 +645,7 @@ pub const Type = extern union { .i8, .bool, .array_u8_sentinel_0, + .array_u8, => return 1, .fn_noreturn_no_args, // represents machine code; not a pointer @@ -659,7 +689,7 @@ pub const Type = extern union { .anyerror => return 2, // TODO revisit this when we have the concept of the error tag type - .array => return self.cast(Payload.Array).?.elem_type.abiAlignment(target), + .array, .array_sentinel => return self.elemType().abiAlignment(target), .int_signed, .int_unsigned => { const bits: u16 = if (self.cast(Payload.IntSigned)) |pl| @@ -691,6 +721,7 @@ pub const Type = extern union { .noreturn, .@"null", .@"undefined", + .enum_literal, => unreachable, }; } @@ -711,18 +742,25 @@ pub const Type = extern union { .noreturn => unreachable, .@"null" => unreachable, .@"undefined" => unreachable, + .enum_literal => unreachable, .u8, .i8, .bool, => return 1, - .array_u8_sentinel_0 => @fieldParentPtr(Payload.Array_u8_Sentinel0, "base", self.ptr_otherwise).len, + .array_u8 => @fieldParentPtr(Payload.Array_u8_Sentinel0, "base", self.ptr_otherwise).len, + .array_u8_sentinel_0 => @fieldParentPtr(Payload.Array_u8_Sentinel0, "base", self.ptr_otherwise).len + 1, .array => { const payload = @fieldParentPtr(Payload.Array, "base", self.ptr_otherwise); const elem_size = std.math.max(payload.elem_type.abiAlignment(target), payload.elem_type.abiSize(target)); return payload.len * elem_size; }, + .array_sentinel => { + const payload = @fieldParentPtr(Payload.ArraySentinel, "base", self.ptr_otherwise); + const elem_size = std.math.max(payload.elem_type.abiAlignment(target), payload.elem_type.abiSize(target)); + return (payload.len + 1) * elem_size; + }, .i16, .u16 => return 2, .i32, .u32 => return 4, .i64, .u64 => return 8, @@ -818,6 +856,8 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, .array_u8_sentinel_0, .const_slice_u8, .fn_noreturn_no_args, @@ -830,6 +870,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, .single_const_pointer, @@ -875,6 +916,8 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, @@ -889,6 +932,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, .const_slice_u8 => true, @@ -931,6 +975,8 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, .array_u8_sentinel_0, .fn_noreturn_no_args, .fn_void_no_args, @@ -943,6 +989,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, .single_const_pointer, @@ -988,6 +1035,8 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, .array_u8_sentinel_0, .fn_noreturn_no_args, .fn_void_no_args, @@ -1003,6 +1052,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, }; } @@ -1023,6 +1073,45 @@ pub const Type = extern union { } } + /// Returns if type can be used for a runtime variable + pub fn isValidVarType(self: Type) bool { + var ty = self; + while (true) switch (ty.zigTypeTag()) { + .Bool, + .Int, + .Float, + .ErrorSet, + .Enum, + .Frame, + .AnyFrame, + .Vector, + => return true, + + .BoundFn, + .ComptimeFloat, + .ComptimeInt, + .EnumLiteral, + .NoReturn, + .Type, + .Void, + .Undefined, + .Null, + .Opaque, + => return false, + + .Optional => { + var buf: Payload.Pointer = undefined; + return ty.optionalChild(&buf).isValidVarType(); + }, + .Pointer, .Array => ty = ty.elemType(), + + .ErrorUnion => @panic("TODO fn isValidVarType"), + .Fn => @panic("TODO fn isValidVarType"), + .Struct => @panic("TODO struct isValidVarType"), + .Union => @panic("TODO union isValidVarType"), + }; + } + /// Asserts the type is a pointer or array type. pub fn elemType(self: Type) Type { return switch (self.tag()) { @@ -1069,12 +1158,14 @@ pub const Type = extern union { .optional, .optional_single_const_pointer, .optional_single_mut_pointer, + .enum_literal, => unreachable, .array => self.cast(Payload.Array).?.elem_type, + .array_sentinel => self.cast(Payload.ArraySentinel).?.elem_type, .single_const_pointer => self.castPointer().?.pointee_type, .single_mut_pointer => self.castPointer().?.pointee_type, - .array_u8_sentinel_0, .const_slice_u8 => Type.initTag(.u8), + .array_u8, .array_u8_sentinel_0, .const_slice_u8 => Type.initTag(.u8), .single_const_pointer_to_comptime_int => Type.initTag(.comptime_int), }; } @@ -1173,9 +1264,12 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, .array => self.cast(Payload.Array).?.len, + .array_sentinel => self.cast(Payload.ArraySentinel).?.len, + .array_u8 => self.cast(Payload.Array_u8).?.len, .array_u8_sentinel_0 => self.cast(Payload.Array_u8_Sentinel0).?.len, }; } @@ -1230,9 +1324,11 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, - .array => return null, + .array, .array_u8 => return null, + .array_sentinel => return self.cast(Payload.ArraySentinel).?.sentinel, .array_u8_sentinel_0 => return Value.initTag(.zero), }; } @@ -1266,10 +1362,12 @@ pub const Type = extern union { .fn_ccc_void_no_args, .function, .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .int_unsigned, .u8, @@ -1284,6 +1382,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, .int_signed, @@ -1324,10 +1423,12 @@ pub const Type = extern union { .fn_ccc_void_no_args, .function, .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .int_signed, .i8, @@ -1342,6 +1443,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, .int_unsigned, @@ -1382,14 +1484,17 @@ pub const Type = extern union { .fn_ccc_void_no_args, .function, .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, .int_unsigned => .{ .signed = false, .bits = self.cast(Payload.IntUnsigned).?.bits }, @@ -1438,10 +1543,12 @@ pub const Type = extern union { .fn_ccc_void_no_args, .function, .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .int_unsigned, .int_signed, @@ -1456,6 +1563,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, .usize, @@ -1523,10 +1631,12 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .u8, .i8, @@ -1551,6 +1661,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, }; } @@ -1584,10 +1695,12 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .u8, .i8, @@ -1612,6 +1725,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, } } @@ -1644,10 +1758,12 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .u8, .i8, @@ -1672,6 +1788,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, } } @@ -1704,10 +1821,12 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .u8, .i8, @@ -1732,6 +1851,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, }; } @@ -1761,10 +1881,12 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .u8, .i8, @@ -1789,6 +1911,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, }; } @@ -1818,10 +1941,12 @@ pub const Type = extern union { .@"null", .@"undefined", .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .u8, .i8, @@ -1846,6 +1971,7 @@ pub const Type = extern union { .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => unreachable, }; } @@ -1895,14 +2021,17 @@ pub const Type = extern union { .fn_ccc_void_no_args, .function, .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => false, }; } @@ -1944,12 +2073,14 @@ pub const Type = extern union { .fn_ccc_void_no_args, .function, .single_const_pointer_to_comptime_int, + .array_sentinel, .array_u8_sentinel_0, .const_slice_u8, .c_void, .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => return null, .void => return Value.initTag(.void_value), @@ -1971,11 +2102,10 @@ pub const Type = extern union { return null; } }, - .array => { - const array = ty.cast(Payload.Array).?; - if (array.len == 0) + .array, .array_u8 => { + if (ty.arrayLen() == 0) return Value.initTag(.empty_array); - ty = array.elem_type; + ty = ty.elemType(); continue; }, .single_const_pointer, .single_mut_pointer => { @@ -2022,7 +2152,6 @@ pub const Type = extern union { .fn_ccc_void_no_args, .function, .single_const_pointer_to_comptime_int, - .array_u8_sentinel_0, .const_slice_u8, .c_void, .void, @@ -2032,11 +2161,15 @@ pub const Type = extern union { .int_unsigned, .int_signed, .array, + .array_sentinel, + .array_u8, + .array_u8_sentinel_0, .single_const_pointer, .single_mut_pointer, .optional, .optional_single_mut_pointer, .optional_single_const_pointer, + .enum_literal, => return false, }; } @@ -2080,6 +2213,7 @@ pub const Type = extern union { comptime_int, comptime_float, noreturn, + enum_literal, @"null", @"undefined", fn_noreturn_no_args, @@ -2090,8 +2224,10 @@ pub const Type = extern union { const_slice_u8, // See last_no_payload_tag below. // After this, the tag requires a payload. + array_u8, array_u8_sentinel_0, array, + array_sentinel, single_const_pointer, single_mut_pointer, int_signed, @@ -2114,11 +2250,25 @@ pub const Type = extern union { len: u64, }; + pub const Array_u8 = struct { + base: Payload = Payload{ .tag = .array_u8 }, + + len: u64, + }; + pub const Array = struct { base: Payload = Payload{ .tag = .array }, + len: u64, elem_type: Type, + }; + + pub const ArraySentinel = struct { + base: Payload = Payload{ .tag = .array_sentinel }, + len: u64, + sentinel: Value, + elem_type: Type, }; pub const Pointer = struct { diff --git a/src-self-hosted/value.zig b/src-self-hosted/value.zig index c8af4716a6..b6356d9b17 100644 --- a/src-self-hosted/value.zig +++ b/src-self-hosted/value.zig @@ -60,6 +60,7 @@ pub const Value = extern union { fn_ccc_void_no_args_type, single_const_pointer_to_comptime_int_type, const_slice_u8_type, + enum_literal_type, undef, zero, @@ -87,6 +88,7 @@ pub const Value = extern union { float_32, float_64, float_128, + enum_literal, pub const last_no_payload_tag = Tag.bool_false; pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1; @@ -164,6 +166,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .undef, .zero, .void_value, @@ -213,7 +216,7 @@ pub const Value = extern union { }; return Value{ .ptr_otherwise = &new_payload.base }; }, - .bytes => return self.copyPayloadShallow(allocator, Payload.Bytes), + .enum_literal, .bytes => return self.copyPayloadShallow(allocator, Payload.Bytes), .repeated => { const payload = @fieldParentPtr(Payload.Repeated, "base", self.ptr_otherwise); const new_payload = try allocator.create(Payload.Repeated); @@ -285,6 +288,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type => return out_stream.writeAll("fn() callconv(.C) void"), .single_const_pointer_to_comptime_int_type => return out_stream.writeAll("*const comptime_int"), .const_slice_u8_type => return out_stream.writeAll("[]const u8"), + .enum_literal_type => return out_stream.writeAll("@TypeOf(.EnumLiteral)"), .null_value => return out_stream.writeAll("null"), .undef => return out_stream.writeAll("undefined"), @@ -318,7 +322,7 @@ pub const Value = extern union { val = elem_ptr.array_ptr; }, .empty_array => return out_stream.writeAll(".{}"), - .bytes => return std.zig.renderStringLiteral(self.cast(Payload.Bytes).?.data, out_stream), + .enum_literal, .bytes => return std.zig.renderStringLiteral(self.cast(Payload.Bytes).?.data, out_stream), .repeated => { try out_stream.writeAll("(repeated) "); val = val.cast(Payload.Repeated).?.val; @@ -391,6 +395,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type => Type.initTag(.fn_ccc_void_no_args), .single_const_pointer_to_comptime_int_type => Type.initTag(.single_const_pointer_to_comptime_int), .const_slice_u8_type => Type.initTag(.const_slice_u8), + .enum_literal_type => Type.initTag(.enum_literal), .undef, .zero, @@ -414,6 +419,7 @@ pub const Value = extern union { .float_32, .float_64, .float_128, + .enum_literal, => unreachable, }; } @@ -462,6 +468,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .null_value, .function, .ref_val, @@ -476,6 +483,7 @@ pub const Value = extern union { .void_value, .unreachable_value, .empty_array, + .enum_literal, => unreachable, .undef => unreachable, @@ -537,6 +545,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .null_value, .function, .ref_val, @@ -551,6 +560,7 @@ pub const Value = extern union { .void_value, .unreachable_value, .empty_array, + .enum_literal, => unreachable, .undef => unreachable, @@ -612,6 +622,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .null_value, .function, .ref_val, @@ -626,6 +637,7 @@ pub const Value = extern union { .void_value, .unreachable_value, .empty_array, + .enum_literal, => unreachable, .undef => unreachable, @@ -713,6 +725,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .null_value, .function, .ref_val, @@ -728,6 +741,7 @@ pub const Value = extern union { .void_value, .unreachable_value, .empty_array, + .enum_literal, => unreachable, .zero, @@ -793,6 +807,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .null_value, .function, .ref_val, @@ -807,6 +822,7 @@ pub const Value = extern union { .void_value, .unreachable_value, .empty_array, + .enum_literal, => unreachable, .zero, @@ -953,6 +969,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .bool_true, .bool_false, .null_value, @@ -970,6 +987,7 @@ pub const Value = extern union { .empty_array, .void_value, .unreachable_value, + .enum_literal, => unreachable, .zero => false, @@ -1025,6 +1043,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .null_value, .function, .ref_val, @@ -1036,6 +1055,7 @@ pub const Value = extern union { .void_value, .unreachable_value, .empty_array, + .enum_literal, => unreachable, .zero, @@ -1102,6 +1122,11 @@ pub const Value = extern union { } pub fn eql(a: Value, b: Value) bool { + if (a.tag() == b.tag() and a.tag() == .enum_literal) { + const a_name = @fieldParentPtr(Payload.Bytes, "base", a.ptr_otherwise).data; + const b_name = @fieldParentPtr(Payload.Bytes, "base", b.ptr_otherwise).data; + return std.mem.eql(u8, a_name, b_name); + } // TODO non numerical comparisons return compare(a, .eq, b); } @@ -1151,6 +1176,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .zero, .bool_true, .bool_false, @@ -1170,6 +1196,7 @@ pub const Value = extern union { .void_value, .unreachable_value, .empty_array, + .enum_literal, => unreachable, .ref_val => self.cast(Payload.RefVal).?.val, @@ -1227,6 +1254,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .zero, .bool_true, .bool_false, @@ -1246,6 +1274,7 @@ pub const Value = extern union { .float_128, .void_value, .unreachable_value, + .enum_literal, => unreachable, .empty_array => unreachable, // out of bounds array index @@ -1320,6 +1349,7 @@ pub const Value = extern union { .fn_ccc_void_no_args_type, .single_const_pointer_to_comptime_int_type, .const_slice_u8_type, + .enum_literal_type, .zero, .empty_array, .bool_true, @@ -1339,6 +1369,7 @@ pub const Value = extern union { .float_64, .float_128, .void_value, + .enum_literal, => false, .undef => unreachable, diff --git a/src-self-hosted/zir.zig b/src-self-hosted/zir.zig index 276db6d522..1a2c5cebaa 100644 --- a/src-self-hosted/zir.zig +++ b/src-self-hosted/zir.zig @@ -47,6 +47,10 @@ pub const Inst = struct { array_cat, /// Array multiplication `a ** b` array_mul, + /// Create an array type + array_type, + /// Create an array type with sentinel + array_type_sentinel, /// Function parameter value. These must be first in a function's main block, /// in respective order with the parameters. arg, @@ -58,11 +62,11 @@ pub const Inst = struct { bitand, /// TODO delete this instruction, it has no purpose. bitcast, - /// An arbitrary typed pointer, which is to be used as an L-Value, is pointer-casted - /// to a new L-Value. The destination type is given by LHS. The cast is to be evaluated + /// An arbitrary typed pointer is pointer-casted to a new Pointer. + /// The destination type is given by LHS. The cast is to be evaluated /// as if it were a bit-cast operation from the operand pointer element type to the /// provided destination type. - bitcast_lvalue, + bitcast_ref, /// A typed result location pointer is bitcasted to a new result location pointer. /// The new result location pointer has an inferred type. bitcast_result_ptr, @@ -225,6 +229,10 @@ pub const Inst = struct { unwrap_err_safe, /// Same as previous, but without safety checks. Used for orelse, if and while unwrap_err_unsafe, + /// Takes a *E!T and raises a compiler error if T != void + ensure_err_payload_void, + /// Enum literal + enum_literal, pub fn Type(tag: Tag) type { return switch (tag) { @@ -250,7 +258,7 @@ pub const Inst = struct { .ensure_result_non_error, .bitcast_result_ptr, .ref, - .bitcast_lvalue, + .bitcast_ref, .typeof, .single_const_ptr_type, .single_mut_ptr_type, @@ -259,12 +267,14 @@ pub const Inst = struct { .unwrap_optional_unsafe, .unwrap_err_safe, .unwrap_err_unsafe, + .ensure_err_payload_void, => UnOp, .add, .addwrap, .array_cat, .array_mul, + .array_type, .bitand, .bitor, .div, @@ -291,6 +301,7 @@ pub const Inst = struct { => BinOp, .arg => Arg, + .array_type_sentinel => ArrayTypeSentinel, .block => Block, .@"break" => Break, .breakvoid => BreakVoid, @@ -317,6 +328,7 @@ pub const Inst = struct { .elemptr => ElemPtr, .condbr => CondBr, .ptr_type => PtrType, + .enum_literal => EnumLiteral, }; } @@ -330,12 +342,14 @@ pub const Inst = struct { .alloc_inferred, .array_cat, .array_mul, + .array_type, + .array_type_sentinel, .arg, .as, .@"asm", .bitand, .bitcast, - .bitcast_lvalue, + .bitcast_ref, .bitcast_result_ptr, .bitor, .block, @@ -398,6 +412,8 @@ pub const Inst = struct { .unwrap_err_safe, .unwrap_err_unsafe, .ptr_type, + .ensure_err_payload_void, + .enum_literal, => false, .@"break", @@ -845,6 +861,28 @@ pub const Inst = struct { sentinel: ?*Inst = null, }, }; + + pub const ArrayTypeSentinel = struct { + pub const base_tag = Tag.array_type_sentinel; + base: Inst, + + positionals: struct { + len: *Inst, + sentinel: *Inst, + elem_type: *Inst, + }, + kw_args: struct {}, + }; + + pub const EnumLiteral = struct { + pub const base_tag = Tag.enum_literal; + base: Inst, + + positionals: struct { + name: []const u8, + }, + kw_args: struct {}, + }; }; pub const ErrorMsg = struct { @@ -872,6 +910,8 @@ pub const Module = struct { }; pub fn deinit(self: *Module, allocator: *Allocator) void { + self.metadata.deinit(); + self.body_metadata.deinit(); allocator.free(self.decls); self.arena.deinit(); self.* = undefined; @@ -1543,8 +1583,8 @@ pub fn emit(allocator: *Allocator, old_module: IrModule) !Module { .metadata = std.AutoHashMap(*Inst, Module.MetaData).init(allocator), .body_metadata = std.AutoHashMap(*Module.Body, Module.BodyMetaData).init(allocator), }; - defer ctx.metadata.deinit(); - defer ctx.body_metadata.deinit(); + errdefer ctx.metadata.deinit(); + errdefer ctx.body_metadata.deinit(); defer ctx.block_table.deinit(); defer ctx.loop_table.deinit(); defer ctx.decls.deinit(allocator); @@ -1920,6 +1960,10 @@ const EmitZIR = struct { return self.emitUnnamedDecl(&str_inst.base); }, .Void => return self.emitPrimitive(src, .void_value), + .Bool => if (typed_value.val.toBool()) + return self.emitPrimitive(src, .@"true") + else + return self.emitPrimitive(src, .@"false"), else => |t| std.debug.panic("TODO implement emitTypedValue for {}", .{@tagName(t)}), } } diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 94c3a19677..fb39a1e077 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -29,7 +29,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .alloc => return analyzeInstAlloc(mod, scope, old_inst.castTag(.alloc).?), .alloc_inferred => return analyzeInstAllocInferred(mod, scope, old_inst.castTag(.alloc_inferred).?), .arg => return analyzeInstArg(mod, scope, old_inst.castTag(.arg).?), - .bitcast_lvalue => return analyzeInstBitCastLValue(mod, scope, old_inst.castTag(.bitcast_lvalue).?), + .bitcast_ref => return analyzeInstBitCastRef(mod, scope, old_inst.castTag(.bitcast_ref).?), .bitcast_result_ptr => return analyzeInstBitCastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?), .block => return analyzeInstBlock(mod, scope, old_inst.castTag(.block).?), .@"break" => return analyzeInstBreak(mod, scope, old_inst.castTag(.@"break").?), @@ -112,6 +112,10 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .unwrap_optional_unsafe => return analyzeInstUnwrapOptional(mod, scope, old_inst.castTag(.unwrap_optional_unsafe).?, false), .unwrap_err_safe => return analyzeInstUnwrapErr(mod, scope, old_inst.castTag(.unwrap_err_safe).?, true), .unwrap_err_unsafe => return analyzeInstUnwrapErr(mod, scope, old_inst.castTag(.unwrap_err_unsafe).?, false), + .ensure_err_payload_void => return analyzeInstEnsureErrPayloadVoid(mod, scope, old_inst.castTag(.ensure_err_payload_void).?), + .array_type => return analyzeInstArrayType(mod, scope, old_inst.castTag(.array_type).?), + .array_type_sentinel => return analyzeInstArrayTypeSentinel(mod, scope, old_inst.castTag(.array_type_sentinel).?), + .enum_literal => return analyzeInstEnumLiteral(mod, scope, old_inst.castTag(.enum_literal).?), } } @@ -295,8 +299,8 @@ fn analyzeInstCoerceResultBlockPtr( return mod.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultBlockPtr", .{}); } -fn analyzeInstBitCastLValue(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst { - return mod.fail(scope, inst.base.src, "TODO implement analyzeInstBitCastLValue", .{}); +fn analyzeInstBitCastRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst { + return mod.fail(scope, inst.base.src, "TODO implement analyzeInstBitCastRef", .{}); } fn analyzeInstBitCastResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst { @@ -361,6 +365,10 @@ fn analyzeInstEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst. fn analyzeInstAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst { const var_type = try resolveType(mod, scope, inst.positionals.operand); + // TODO this should happen only for var allocs + if (!var_type.isValidVarType()) { + return mod.fail(scope, inst.base.src, "variable of type '{}' must be const or comptime", .{var_type}); + } const ptr_type = try mod.singlePtrType(scope, inst.base.src, true, var_type); const b = try mod.requireRuntimeBlock(scope, inst.base.src); return mod.addNoOp(b, inst.base.src, ptr_type, .alloc); @@ -675,31 +683,36 @@ fn analyzeInstIntType(mod: *Module, scope: *Scope, inttype: *zir.Inst.IntType) I fn analyzeInstOptionalType(mod: *Module, scope: *Scope, optional: *zir.Inst.UnOp) InnerError!*Inst { const child_type = try resolveType(mod, scope, optional.positionals.operand); - return mod.constType(scope, optional.base.src, Type.initPayload(switch (child_type.tag()) { - .single_const_pointer => blk: { - const payload = try scope.arena().create(Type.Payload.Pointer); - payload.* = .{ - .base = .{ .tag = .optional_single_const_pointer }, - .pointee_type = child_type.elemType(), - }; - break :blk &payload.base; - }, - .single_mut_pointer => blk: { - const payload = try scope.arena().create(Type.Payload.Pointer); - payload.* = .{ - .base = .{ .tag = .optional_single_mut_pointer }, - .pointee_type = child_type.elemType(), - }; - break :blk &payload.base; - }, - else => blk: { - const payload = try scope.arena().create(Type.Payload.Optional); - payload.* = .{ - .child_type = child_type, - }; - break :blk &payload.base; - }, - })); + return mod.constType(scope, optional.base.src, try mod.optionalType(scope, child_type)); +} + +fn analyzeInstArrayType(mod: *Module, scope: *Scope, array: *zir.Inst.BinOp) InnerError!*Inst { + // TODO these should be lazily evaluated + const len = try resolveInstConst(mod, scope, array.positionals.lhs); + const elem_type = try resolveType(mod, scope, array.positionals.rhs); + + return mod.constType(scope, array.base.src, try mod.arrayType(scope, len.val.toUnsignedInt(), null, elem_type)); +} + +fn analyzeInstArrayTypeSentinel(mod: *Module, scope: *Scope, array: *zir.Inst.ArrayTypeSentinel) InnerError!*Inst { + // TODO these should be lazily evaluated + const len = try resolveInstConst(mod, scope, array.positionals.len); + const sentinel = try resolveInstConst(mod, scope, array.positionals.sentinel); + const elem_type = try resolveType(mod, scope, array.positionals.elem_type); + + return mod.constType(scope, array.base.src, try mod.arrayType(scope, len.val.toUnsignedInt(), sentinel.val, elem_type)); +} + +fn analyzeInstEnumLiteral(mod: *Module, scope: *Scope, inst: *zir.Inst.EnumLiteral) InnerError!*Inst { + const payload = try scope.arena().create(Value.Payload.Bytes); + payload.* = .{ + .base = .{ .tag = .enum_literal }, + .data = try scope.arena().dupe(u8, inst.positionals.name), + }; + return mod.constInst(scope, inst.base.src, .{ + .ty = Type.initTag(.enum_literal), + .val = Value.initPayload(&payload.base), + }); } fn analyzeInstUnwrapOptional(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, safety_check: bool) InnerError!*Inst { @@ -735,6 +748,10 @@ fn analyzeInstUnwrapErr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, saf return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstUnwrapErr", .{}); } +fn analyzeInstEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst { + return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstEnsureErrPayloadVoid", .{}); +} + fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst { const return_type = try resolveType(mod, scope, fntype.positionals.return_type); @@ -760,7 +777,12 @@ fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) Inne const arena = scope.arena(); const param_types = try arena.alloc(Type, fntype.positionals.param_types.len); for (fntype.positionals.param_types) |param_type, i| { - param_types[i] = try resolveType(mod, scope, param_type); + const resolved = try resolveType(mod, scope, param_type); + // TODO skip for comptime params + if (!resolved.isValidVarType()) { + return mod.fail(scope, param_type.src, "parameter of type '{}' must be declared comptime", .{resolved}); + } + param_types[i] = resolved; } const payload = try arena.create(Type.Payload.Function); diff --git a/test/stage2/compare_output.zig b/test/stage2/compare_output.zig index 4208cc3911..ec9d1b9f9e 100644 --- a/test/stage2/compare_output.zig +++ b/test/stage2/compare_output.zig @@ -543,31 +543,88 @@ pub fn addCases(ctx: *TestContext) !void { , "", ); + + case.addCompareOutput( + \\export fn _start() noreturn { + \\ const ignore = + \\ \\ cool thx + \\ \\ + \\ ; + \\ add('ぁ', '\x03'); + \\ + \\ exit(); + \\} + \\ + \\fn add(a: u32, b: u32) void { + \\ assert(a + b == 12356); + \\} + \\ + \\pub fn assert(ok: bool) void { + \\ if (!ok) unreachable; // assertion failure + \\} + \\ + \\fn exit() noreturn { + \\ asm volatile ("syscall" + \\ : + \\ : [number] "{rax}" (231), + \\ [arg1] "{rdi}" (0) + \\ : "rcx", "r11", "memory" + \\ ); + \\ unreachable; + \\} + , + "", + ); } { - var case = ctx.exe("wasm returns", wasi); + var case = ctx.exe("wasm function calls", wasi); case.addCompareOutput( \\export fn _start() u32 { + \\ foo(); + \\ bar(); \\ return 42; \\} + \\fn foo() void { + \\ bar(); + \\ bar(); + \\} + \\fn bar() void {} , "42\n", ); case.addCompareOutput( \\export fn _start() i64 { + \\ bar(); + \\ foo(); + \\ foo(); + \\ bar(); + \\ foo(); + \\ bar(); \\ return 42; \\} + \\fn foo() void { + \\ bar(); + \\} + \\fn bar() void {} , "42\n", ); case.addCompareOutput( \\export fn _start() f32 { + \\ bar(); + \\ foo(); \\ return 42.0; \\} + \\fn foo() void { + \\ bar(); + \\ bar(); + \\ bar(); + \\} + \\fn bar() void {} , // This is what you get when you take the bits of the IEE-754 // representation of 42.0 and reinterpret them as an unsigned diff --git a/test/stage2/zir.zig b/test/stage2/zir.zig index f77c950052..225a7f58cd 100644 --- a/test/stage2/zir.zig +++ b/test/stage2/zir.zig @@ -28,7 +28,7 @@ pub fn addCases(ctx: *TestContext) !void { \\@unnamed$5 = export(@unnamed$4, "entry") \\@unnamed$6 = fntype([], @void, cc=C) \\@entry = fn(@unnamed$6, { - \\ %0 = returnvoid() + \\ %0 = returnvoid() ; deaths=0b1000000000000000 \\}) \\ ); @@ -75,7 +75,7 @@ pub fn addCases(ctx: *TestContext) !void { \\@3 = int(3) \\@unnamed$6 = fntype([], @void, cc=C) \\@entry = fn(@unnamed$6, { - \\ %0 = returnvoid() + \\ %0 = returnvoid() ; deaths=0b1000000000000000 \\}) \\@entry__anon_1 = str("2\x08\x01\n") \\@9 = declref("9__anon_0") @@ -117,18 +117,18 @@ pub fn addCases(ctx: *TestContext) !void { \\@unnamed$5 = export(@unnamed$4, "entry") \\@unnamed$6 = fntype([], @void, cc=C) \\@entry = fn(@unnamed$6, { - \\ %0 = call(@a, [], modifier=auto) - \\ %1 = returnvoid() + \\ %0 = call(@a, [], modifier=auto) ; deaths=0b1000000000000001 + \\ %1 = returnvoid() ; deaths=0b1000000000000000 \\}) \\@unnamed$8 = fntype([], @void, cc=C) \\@a = fn(@unnamed$8, { - \\ %0 = call(@b, [], modifier=auto) - \\ %1 = returnvoid() + \\ %0 = call(@b, [], modifier=auto) ; deaths=0b1000000000000001 + \\ %1 = returnvoid() ; deaths=0b1000000000000000 \\}) \\@unnamed$10 = fntype([], @void, cc=C) \\@b = fn(@unnamed$10, { - \\ %0 = call(@a, [], modifier=auto) - \\ %1 = returnvoid() + \\ %0 = call(@a, [], modifier=auto) ; deaths=0b1000000000000001 + \\ %1 = returnvoid() ; deaths=0b1000000000000000 \\}) \\ ); @@ -193,7 +193,7 @@ pub fn addCases(ctx: *TestContext) !void { \\@unnamed$5 = export(@unnamed$4, "entry") \\@unnamed$6 = fntype([], @void, cc=C) \\@entry = fn(@unnamed$6, { - \\ %0 = returnvoid() + \\ %0 = returnvoid() ; deaths=0b1000000000000000 \\}) \\ ); |
