aboutsummaryrefslogtreecommitdiff
path: root/src-self-hosted
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2020-07-16 05:57:32 +0000
committerGitHub <noreply@github.com>2020-07-16 05:57:32 +0000
commite079fdeee78d37a50c4e2a9fafe77903d62dbc34 (patch)
treec7b06e08f3696ae2b4f70a6783a7573aef8347b6 /src-self-hosted
parent82562b205f9d99c27c4d5224311734e141bf2fda (diff)
parentd29dd5834b9d7386bb88e44bd2852428863cae81 (diff)
downloadzig-e079fdeee78d37a50c4e2a9fafe77903d62dbc34.tar.gz
zig-e079fdeee78d37a50c4e2a9fafe77903d62dbc34.zip
Merge pull request #5885 from ziglang/stage2-locals
self-hosted compiler local consts
Diffstat (limited to 'src-self-hosted')
-rw-r--r--src-self-hosted/Module.zig567
-rw-r--r--src-self-hosted/astgen.zig643
-rw-r--r--src-self-hosted/codegen.zig6
-rw-r--r--src-self-hosted/ir.zig5
-rw-r--r--src-self-hosted/translate_c.zig188
-rw-r--r--src-self-hosted/zir.zig15
6 files changed, 861 insertions, 563 deletions
diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig
index 0a8a5152cf..72e5f6cd63 100644
--- a/src-self-hosted/Module.zig
+++ b/src-self-hosted/Module.zig
@@ -19,6 +19,7 @@ const Body = ir.Body;
const ast = std.zig.ast;
const trace = @import("tracy.zig").trace;
const liveness = @import("liveness.zig");
+const astgen = @import("astgen.zig");
/// General-purpose allocator. Used for both temporary and long-term storage.
gpa: *Allocator,
@@ -76,6 +77,8 @@ deletion_set: std.ArrayListUnmanaged(*Decl) = .{},
keep_source_files_loaded: bool,
+pub const InnerError = error{ OutOfMemory, AnalysisFail };
+
const WorkItem = union(enum) {
/// Write the machine code for a Decl to the output file.
codegen_decl: *Decl,
@@ -209,6 +212,7 @@ pub const Decl = struct {
},
.block => unreachable,
.gen_zir => unreachable,
+ .local_var => unreachable,
.decl => unreachable,
}
}
@@ -304,6 +308,7 @@ pub const Scope = struct {
.block => return self.cast(Block).?.arena,
.decl => return &self.cast(DeclAnalysis).?.arena.allocator,
.gen_zir => return self.cast(GenZIR).?.arena,
+ .local_var => return self.cast(LocalVar).?.gen_zir.arena,
.zir_module => return &self.cast(ZIRModule).?.contents.module.arena.allocator,
.file => unreachable,
}
@@ -315,6 +320,7 @@ pub const Scope = struct {
return switch (self.tag) {
.block => self.cast(Block).?.decl,
.gen_zir => self.cast(GenZIR).?.decl,
+ .local_var => return self.cast(LocalVar).?.gen_zir.decl,
.decl => self.cast(DeclAnalysis).?.decl,
.zir_module => null,
.file => null,
@@ -327,6 +333,7 @@ pub const Scope = struct {
switch (self.tag) {
.block => return self.cast(Block).?.decl.scope,
.gen_zir => return self.cast(GenZIR).?.decl.scope,
+ .local_var => return self.cast(LocalVar).?.gen_zir.decl.scope,
.decl => return self.cast(DeclAnalysis).?.decl.scope,
.zir_module, .file => return self,
}
@@ -339,6 +346,7 @@ pub const Scope = struct {
switch (self.tag) {
.block => unreachable,
.gen_zir => unreachable,
+ .local_var => unreachable,
.decl => unreachable,
.zir_module => return self.cast(ZIRModule).?.fullyQualifiedNameHash(name),
.file => return self.cast(File).?.fullyQualifiedNameHash(name),
@@ -353,9 +361,22 @@ pub const Scope = struct {
.decl => return self.cast(DeclAnalysis).?.decl.scope.cast(File).?.contents.tree,
.block => return self.cast(Block).?.decl.scope.cast(File).?.contents.tree,
.gen_zir => return self.cast(GenZIR).?.decl.scope.cast(File).?.contents.tree,
+ .local_var => return self.cast(LocalVar).?.gen_zir.decl.scope.cast(File).?.contents.tree,
}
}
+ /// Asserts the scope is a child of a `GenZIR` and returns it.
+ pub fn getGenZIR(self: *Scope) *GenZIR {
+ return switch (self.tag) {
+ .block => unreachable,
+ .gen_zir => self.cast(GenZIR).?,
+ .local_var => return self.cast(LocalVar).?.gen_zir,
+ .decl => unreachable,
+ .zir_module => unreachable,
+ .file => unreachable,
+ };
+ }
+
pub fn dumpInst(self: *Scope, inst: *Inst) void {
const zir_module = self.namespace();
const loc = std.zig.findLineColumn(zir_module.source.bytes, inst.src);
@@ -376,6 +397,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).sub_file_path,
.block => unreachable,
.gen_zir => unreachable,
+ .local_var => unreachable,
.decl => unreachable,
}
}
@@ -386,6 +408,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).unload(gpa),
.block => unreachable,
.gen_zir => unreachable,
+ .local_var => unreachable,
.decl => unreachable,
}
}
@@ -395,6 +418,7 @@ pub const Scope = struct {
.file => return @fieldParentPtr(File, "base", base).getSource(module),
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).getSource(module),
.gen_zir => unreachable,
+ .local_var => unreachable,
.block => unreachable,
.decl => unreachable,
}
@@ -407,6 +431,7 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).removeDecl(child),
.block => unreachable,
.gen_zir => unreachable,
+ .local_var => unreachable,
.decl => unreachable,
}
}
@@ -426,6 +451,7 @@ pub const Scope = struct {
},
.block => unreachable,
.gen_zir => unreachable,
+ .local_var => unreachable,
.decl => unreachable,
}
}
@@ -446,6 +472,7 @@ pub const Scope = struct {
block,
decl,
gen_zir,
+ local_var,
};
pub const File = struct {
@@ -673,10 +700,25 @@ pub const Scope = struct {
pub const GenZIR = struct {
pub const base_tag: Tag = .gen_zir;
base: Scope = Scope{ .tag = base_tag },
+ /// Parents can be: `GenZIR`, `ZIRModule`, `File`
+ parent: *Scope,
decl: *Decl,
arena: *Allocator,
+ /// The first N instructions in a function body ZIR are arg instructions.
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
};
+
+ /// This structure lives as long as the AST generation of the Block
+ /// node that contains the variable.
+ pub const LocalVar = struct {
+ pub const base_tag: Tag = .local_var;
+ base: Scope = Scope{ .tag = base_tag },
+ /// Parents can be: `LocalVar`, `GenZIR`.
+ parent: *Scope,
+ gen_zir: *GenZIR,
+ name: []const u8,
+ inst: *zir.Inst,
+ };
};
pub const AllErrors = struct {
@@ -944,8 +986,6 @@ pub fn getAllErrorsAlloc(self: *Module) !AllErrors {
};
}
-const InnerError = error{ OutOfMemory, AnalysisFail };
-
pub fn performAllTheWork(self: *Module) error{OutOfMemory}!void {
while (self.work_queue.readItem()) |work_item| switch (work_item) {
.codegen_decl => |decl| switch (decl.analysis) {
@@ -1113,7 +1153,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const file_scope = decl.scope.cast(Scope.File).?;
const tree = try self.getAstTree(file_scope);
const ast_node = tree.root_node.decls()[decl.src_index];
- switch (ast_node.id) {
+ switch (ast_node.tag) {
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", ast_node);
@@ -1127,6 +1167,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var fn_type_scope: Scope.GenZIR = .{
.decl = decl,
.arena = &fn_type_scope_arena.allocator,
+ .parent = decl.scope,
};
defer fn_type_scope.instructions.deinit(self.gpa);
@@ -1140,7 +1181,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.any_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
.type_expr => |node| node,
};
- param_types[i] = try self.astGenExpr(&fn_type_scope.base, param_type_node);
+ param_types[i] = try astgen.expr(self, &fn_type_scope.base, param_type_node);
}
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{});
@@ -1168,7 +1209,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.Invalid => |tok| return self.failTok(&fn_type_scope.base, tok, "unable to parse return type", .{}),
};
- const return_type_inst = try self.astGenExpr(&fn_type_scope.base, return_type_expr);
+ const return_type_inst = try astgen.expr(self, &fn_type_scope.base, return_type_expr);
const fn_src = tree.token_locs[fn_proto.fn_token].start;
const fn_type_inst = try self.addZIRInst(&fn_type_scope.base, fn_src, zir.Inst.FnType, .{
.return_type = return_type_inst,
@@ -1204,12 +1245,32 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
var gen_scope: Scope.GenZIR = .{
.decl = decl,
.arena = &gen_scope_arena.allocator,
+ .parent = decl.scope,
};
defer gen_scope.instructions.deinit(self.gpa);
+ // We need an instruction for each parameter, and they must be first in the body.
+ try gen_scope.instructions.resize(self.gpa, fn_proto.params_len);
+ var params_scope = &gen_scope.base;
+ for (fn_proto.params()) |param, i| {
+ const name_token = param.name_token.?;
+ const src = tree.token_locs[name_token].start;
+ const param_name = tree.tokenSlice(name_token);
+ const arg = try newZIRInst(&gen_scope_arena.allocator, src, zir.Inst.Arg, .{}, .{});
+ gen_scope.instructions.items[i] = &arg.base;
+ const sub_scope = try gen_scope_arena.allocator.create(Scope.LocalVar);
+ sub_scope.* = .{
+ .parent = params_scope,
+ .gen_zir = &gen_scope,
+ .name = param_name,
+ .inst = &arg.base,
+ };
+ params_scope = &sub_scope.base;
+ }
+
const body_block = body_node.cast(ast.Node.Block).?;
- try self.astGenBlock(&gen_scope.base, body_block);
+ try astgen.blockExpr(self, params_scope, body_block);
if (!fn_type.fnReturnType().isNoReturn() and (gen_scope.instructions.items.len == 0 or
!gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn()))
@@ -1298,465 +1359,6 @@ fn analyzeBodyValueAsType(self: *Module, block_scope: *Scope.Block, body: zir.Mo
unreachable;
}
-fn astGenExpr(self: *Module, scope: *Scope, ast_node: *ast.Node) InnerError!*zir.Inst {
- switch (ast_node.id) {
- .Identifier => return self.astGenIdent(scope, @fieldParentPtr(ast.Node.Identifier, "base", ast_node)),
- .Asm => return self.astGenAsm(scope, @fieldParentPtr(ast.Node.Asm, "base", ast_node)),
- .StringLiteral => return self.astGenStringLiteral(scope, @fieldParentPtr(ast.Node.StringLiteral, "base", ast_node)),
- .IntegerLiteral => return self.astGenIntegerLiteral(scope, @fieldParentPtr(ast.Node.IntegerLiteral, "base", ast_node)),
- .BuiltinCall => return self.astGenBuiltinCall(scope, @fieldParentPtr(ast.Node.BuiltinCall, "base", ast_node)),
- .Call => return self.astGenCall(scope, @fieldParentPtr(ast.Node.Call, "base", ast_node)),
- .Unreachable => return self.astGenUnreachable(scope, @fieldParentPtr(ast.Node.Unreachable, "base", ast_node)),
- .ControlFlowExpression => return self.astGenControlFlowExpression(scope, @fieldParentPtr(ast.Node.ControlFlowExpression, "base", ast_node)),
- .If => return self.astGenIf(scope, @fieldParentPtr(ast.Node.If, "base", ast_node)),
- .InfixOp => return self.astGenInfixOp(scope, @fieldParentPtr(ast.Node.InfixOp, "base", ast_node)),
- .BoolNot => return self.astGenBoolNot(scope, @fieldParentPtr(ast.Node.BoolNot, "base", ast_node)),
- else => return self.failNode(scope, ast_node, "TODO implement astGenExpr for {}", .{@tagName(ast_node.id)}),
- }
-}
-
-fn astGenBoolNot(self: *Module, scope: *Scope, node: *ast.Node.BoolNot) InnerError!*zir.Inst {
- const operand = try self.astGenExpr(scope, node.rhs);
- const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
- return self.addZIRInst(scope, src, zir.Inst.BoolNot, .{ .operand = operand }, .{});
-}
-
-fn astGenInfixOp(self: *Module, scope: *Scope, infix_node: *ast.Node.InfixOp) InnerError!*zir.Inst {
- switch (infix_node.op) {
- .Assign => {
- if (infix_node.lhs.id == .Identifier) {
- const ident = @fieldParentPtr(ast.Node.Identifier, "base", infix_node.lhs);
- const tree = scope.tree();
- const ident_name = tree.tokenSlice(ident.token);
- if (std.mem.eql(u8, ident_name, "_")) {
- return self.astGenExpr(scope, infix_node.rhs);
- } else {
- return self.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
- }
- } else {
- return self.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
- }
- },
- .Add => {
- const lhs = try self.astGenExpr(scope, infix_node.lhs);
- const rhs = try self.astGenExpr(scope, infix_node.rhs);
-
- const tree = scope.tree();
- const src = tree.token_locs[infix_node.op_token].start;
-
- return self.addZIRInst(scope, src, zir.Inst.Add, .{ .lhs = lhs, .rhs = rhs }, .{});
- },
- .BangEqual,
- .EqualEqual,
- .GreaterThan,
- .GreaterOrEqual,
- .LessThan,
- .LessOrEqual,
- => {
- const lhs = try self.astGenExpr(scope, infix_node.lhs);
- const rhs = try self.astGenExpr(scope, infix_node.rhs);
-
- const tree = scope.tree();
- const src = tree.token_locs[infix_node.op_token].start;
-
- const op: std.math.CompareOperator = switch (infix_node.op) {
- .BangEqual => .neq,
- .EqualEqual => .eq,
- .GreaterThan => .gt,
- .GreaterOrEqual => .gte,
- .LessThan => .lt,
- .LessOrEqual => .lte,
- else => unreachable,
- };
-
- return self.addZIRInst(scope, src, zir.Inst.Cmp, .{
- .lhs = lhs,
- .op = op,
- .rhs = rhs,
- }, .{});
- },
- else => |op| {
- return self.failNode(scope, &infix_node.base, "TODO implement infix operator {}", .{op});
- },
- }
-}
-
-fn astGenIf(self: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.Inst {
- if (if_node.payload) |payload| {
- return self.failNode(scope, payload, "TODO implement astGenIf for optionals", .{});
- }
- if (if_node.@"else") |else_node| {
- if (else_node.payload) |payload| {
- return self.failNode(scope, payload, "TODO implement astGenIf for error unions", .{});
- }
- }
- var block_scope: Scope.GenZIR = .{
- .decl = scope.decl().?,
- .arena = scope.arena(),
- .instructions = .{},
- };
- defer block_scope.instructions.deinit(self.gpa);
-
- const cond = try self.astGenExpr(&block_scope.base, if_node.condition);
-
- const tree = scope.tree();
- const if_src = tree.token_locs[if_node.if_token].start;
- const condbr = try self.addZIRInstSpecial(&block_scope.base, if_src, zir.Inst.CondBr, .{
- .condition = cond,
- .true_body = undefined, // populated below
- .false_body = undefined, // populated below
- }, .{});
-
- const block = try self.addZIRInstBlock(scope, if_src, .{
- .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
- });
- var then_scope: Scope.GenZIR = .{
- .decl = block_scope.decl,
- .arena = block_scope.arena,
- .instructions = .{},
- };
- defer then_scope.instructions.deinit(self.gpa);
-
- const then_result = try self.astGenExpr(&then_scope.base, if_node.body);
- if (!then_result.tag.isNoReturn()) {
- const then_src = tree.token_locs[if_node.body.lastToken()].start;
- _ = try self.addZIRInst(&then_scope.base, then_src, zir.Inst.Break, .{
- .block = block,
- .operand = then_result,
- }, .{});
- }
- condbr.positionals.true_body = .{
- .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
- };
-
- var else_scope: Scope.GenZIR = .{
- .decl = block_scope.decl,
- .arena = block_scope.arena,
- .instructions = .{},
- };
- defer else_scope.instructions.deinit(self.gpa);
-
- if (if_node.@"else") |else_node| {
- const else_result = try self.astGenExpr(&else_scope.base, else_node.body);
- if (!else_result.tag.isNoReturn()) {
- const else_src = tree.token_locs[else_node.body.lastToken()].start;
- _ = try self.addZIRInst(&else_scope.base, else_src, zir.Inst.Break, .{
- .block = block,
- .operand = else_result,
- }, .{});
- }
- } else {
- // TODO Optimization opportunity: we can avoid an allocation and a memcpy here
- // by directly allocating the body for this one instruction.
- const else_src = tree.token_locs[if_node.lastToken()].start;
- _ = try self.addZIRInst(&else_scope.base, else_src, zir.Inst.BreakVoid, .{
- .block = block,
- }, .{});
- }
- condbr.positionals.false_body = .{
- .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
- };
-
- return &block.base;
-}
-
-fn astGenControlFlowExpression(
- self: *Module,
- scope: *Scope,
- cfe: *ast.Node.ControlFlowExpression,
-) InnerError!*zir.Inst {
- switch (cfe.kind) {
- .Break => return self.failNode(scope, &cfe.base, "TODO implement astGenExpr for Break", .{}),
- .Continue => return self.failNode(scope, &cfe.base, "TODO implement astGenExpr for Continue", .{}),
- .Return => {},
- }
- const tree = scope.tree();
- const src = tree.token_locs[cfe.ltoken].start;
- if (cfe.rhs) |rhs_node| {
- const operand = try self.astGenExpr(scope, rhs_node);
- return self.addZIRInst(scope, src, zir.Inst.Return, .{ .operand = operand }, .{});
- } else {
- return self.addZIRInst(scope, src, zir.Inst.ReturnVoid, .{}, .{});
- }
-}
-
-fn astGenIdent(self: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerError!*zir.Inst {
- const tree = scope.tree();
- const ident_name = tree.tokenSlice(ident.token);
- const src = tree.token_locs[ident.token].start;
- if (mem.eql(u8, ident_name, "_")) {
- return self.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});
- }
-
- if (getSimplePrimitiveValue(ident_name)) |typed_value| {
- return self.addZIRInstConst(scope, src, typed_value);
- }
-
- if (ident_name.len >= 2) integer: {
- const first_c = ident_name[0];
- if (first_c == 'i' or first_c == 'u') {
- const is_signed = first_c == 'i';
- const bit_count = std.fmt.parseInt(u16, ident_name[1..], 10) catch |err| switch (err) {
- error.Overflow => return self.failNode(
- scope,
- &ident.base,
- "primitive integer type '{}' exceeds maximum bit width of 65535",
- .{ident_name},
- ),
- error.InvalidCharacter => break :integer,
- };
- const val = switch (bit_count) {
- 8 => if (is_signed) Value.initTag(.i8_type) else Value.initTag(.u8_type),
- 16 => if (is_signed) Value.initTag(.i16_type) else Value.initTag(.u16_type),
- 32 => if (is_signed) Value.initTag(.i32_type) else Value.initTag(.u32_type),
- 64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
- else => return self.failNode(scope, &ident.base, "TODO implement arbitrary integer bitwidth types", .{}),
- };
- return self.addZIRInstConst(scope, src, .{
- .ty = Type.initTag(.type),
- .val = val,
- });
- }
- }
-
- if (self.lookupDeclName(scope, ident_name)) |decl| {
- return try self.addZIRInst(scope, src, zir.Inst.DeclValInModule, .{ .decl = decl }, .{});
- }
-
- // Function parameter
- if (scope.decl()) |decl| {
- if (tree.root_node.decls()[decl.src_index].cast(ast.Node.FnProto)) |fn_proto| {
- for (fn_proto.params()) |param, i| {
- const param_name = tree.tokenSlice(param.name_token.?);
- if (mem.eql(u8, param_name, ident_name)) {
- return try self.addZIRInst(scope, src, zir.Inst.Arg, .{ .index = i }, .{});
- }
- }
- }
- }
-
- return self.failNode(scope, &ident.base, "TODO implement local variable identifier lookup", .{});
-}
-
-fn astGenStringLiteral(self: *Module, scope: *Scope, str_lit: *ast.Node.StringLiteral) InnerError!*zir.Inst {
- const tree = scope.tree();
- const unparsed_bytes = tree.tokenSlice(str_lit.token);
- const arena = scope.arena();
-
- var bad_index: usize = undefined;
- const bytes = std.zig.parseStringLiteral(arena, unparsed_bytes, &bad_index) catch |err| switch (err) {
- error.InvalidCharacter => {
- const bad_byte = unparsed_bytes[bad_index];
- const src = tree.token_locs[str_lit.token].start;
- return self.fail(scope, src + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
- },
- else => |e| return e,
- };
-
- const src = tree.token_locs[str_lit.token].start;
- return self.addZIRInst(scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
-}
-
-fn astGenIntegerLiteral(self: *Module, scope: *Scope, int_lit: *ast.Node.IntegerLiteral) InnerError!*zir.Inst {
- const arena = scope.arena();
- const tree = scope.tree();
- const prefixed_bytes = tree.tokenSlice(int_lit.token);
- const base = if (mem.startsWith(u8, prefixed_bytes, "0x"))
- 16
- else if (mem.startsWith(u8, prefixed_bytes, "0o"))
- 8
- else if (mem.startsWith(u8, prefixed_bytes, "0b"))
- 2
- else
- @as(u8, 10);
-
- const bytes = if (base == 10)
- prefixed_bytes
- else
- prefixed_bytes[2..];
-
- if (std.fmt.parseInt(u64, bytes, base)) |small_int| {
- const int_payload = try arena.create(Value.Payload.Int_u64);
- int_payload.* = .{ .int = small_int };
- const src = tree.token_locs[int_lit.token].start;
- return self.addZIRInstConst(scope, src, .{
- .ty = Type.initTag(.comptime_int),
- .val = Value.initPayload(&int_payload.base),
- });
- } else |err| {
- return self.failTok(scope, int_lit.token, "TODO implement int literals that don't fit in a u64", .{});
- }
-}
-
-fn astGenBlock(self: *Module, scope: *Scope, block_node: *ast.Node.Block) !void {
- const tracy = trace(@src());
- defer tracy.end();
-
- if (block_node.label) |label| {
- return self.failTok(scope, label, "TODO implement labeled blocks", .{});
- }
- for (block_node.statements()) |statement| {
- _ = try self.astGenExpr(scope, statement);
- }
-}
-
-fn astGenAsm(self: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zir.Inst {
- if (asm_node.outputs.len != 0) {
- return self.failNode(scope, &asm_node.base, "TODO implement asm with an output", .{});
- }
- const arena = scope.arena();
- const tree = scope.tree();
-
- const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len);
- const args = try arena.alloc(*zir.Inst, asm_node.inputs.len);
-
- for (asm_node.inputs) |input, i| {
- // TODO semantically analyze constraints
- inputs[i] = try self.astGenExpr(scope, input.constraint);
- args[i] = try self.astGenExpr(scope, input.expr);
- }
-
- const src = tree.token_locs[asm_node.asm_token].start;
- const return_type = try self.addZIRInstConst(scope, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.void_type),
- });
- const asm_inst = try self.addZIRInst(scope, src, zir.Inst.Asm, .{
- .asm_source = try self.astGenExpr(scope, asm_node.template),
- .return_type = return_type,
- }, .{
- .@"volatile" = asm_node.volatile_token != null,
- //.clobbers = TODO handle clobbers
- .inputs = inputs,
- .args = args,
- });
- return asm_inst;
-}
-
-fn astGenBuiltinCall(self: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- const tree = scope.tree();
- const builtin_name = tree.tokenSlice(call.builtin_token);
- const src = tree.token_locs[call.builtin_token].start;
-
- inline for (std.meta.declarations(zir.Inst)) |inst| {
- if (inst.data != .Type) continue;
- const T = inst.data.Type;
- if (!@hasDecl(T, "builtin_name")) continue;
- if (std.mem.eql(u8, builtin_name, T.builtin_name)) {
- var value: T = undefined;
- const positionals = @typeInfo(std.meta.fieldInfo(T, "positionals").field_type).Struct;
- if (positionals.fields.len == 0) {
- return self.addZIRInst(scope, src, T, value.positionals, value.kw_args);
- }
- const arg_count: ?usize = if (positionals.fields[0].field_type == []*zir.Inst) null else positionals.fields.len;
- if (arg_count) |some| {
- if (call.params_len != some) {
- return self.failTok(
- scope,
- call.builtin_token,
- "expected {} parameter{}, found {}",
- .{ some, if (some == 1) "" else "s", call.params_len },
- );
- }
- const params = call.params();
- inline for (positionals.fields) |p, i| {
- @field(value.positionals, p.name) = try self.astGenExpr(scope, params[i]);
- }
- } else {
- return self.failTok(scope, call.builtin_token, "TODO var args builtin '{}'", .{builtin_name});
- }
-
- return self.addZIRInst(scope, src, T, value.positionals, .{});
- }
- }
- return self.failTok(scope, call.builtin_token, "TODO implement builtin call for '{}'", .{builtin_name});
-}
-
-fn astGenCall(self: *Module, scope: *Scope, call: *ast.Node.Call) InnerError!*zir.Inst {
- const tree = scope.tree();
- const lhs = try self.astGenExpr(scope, call.lhs);
-
- const param_nodes = call.params();
- const args = try scope.cast(Scope.GenZIR).?.arena.alloc(*zir.Inst, param_nodes.len);
- for (param_nodes) |param_node, i| {
- args[i] = try self.astGenExpr(scope, param_node);
- }
-
- const src = tree.token_locs[call.lhs.firstToken()].start;
- return self.addZIRInst(scope, src, zir.Inst.Call, .{
- .func = lhs,
- .args = args,
- }, .{});
-}
-
-fn astGenUnreachable(self: *Module, scope: *Scope, unreach_node: *ast.Node.Unreachable) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[unreach_node.token].start;
- return self.addZIRInst(scope, src, zir.Inst.Unreachable, .{}, .{});
-}
-
-fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
- const simple_types = std.ComptimeStringMap(Value.Tag, .{
- .{ "u8", .u8_type },
- .{ "i8", .i8_type },
- .{ "isize", .isize_type },
- .{ "usize", .usize_type },
- .{ "c_short", .c_short_type },
- .{ "c_ushort", .c_ushort_type },
- .{ "c_int", .c_int_type },
- .{ "c_uint", .c_uint_type },
- .{ "c_long", .c_long_type },
- .{ "c_ulong", .c_ulong_type },
- .{ "c_longlong", .c_longlong_type },
- .{ "c_ulonglong", .c_ulonglong_type },
- .{ "c_longdouble", .c_longdouble_type },
- .{ "f16", .f16_type },
- .{ "f32", .f32_type },
- .{ "f64", .f64_type },
- .{ "f128", .f128_type },
- .{ "c_void", .c_void_type },
- .{ "bool", .bool_type },
- .{ "void", .void_type },
- .{ "type", .type_type },
- .{ "anyerror", .anyerror_type },
- .{ "comptime_int", .comptime_int_type },
- .{ "comptime_float", .comptime_float_type },
- .{ "noreturn", .noreturn_type },
- });
- if (simple_types.get(name)) |tag| {
- return TypedValue{
- .ty = Type.initTag(.type),
- .val = Value.initTag(tag),
- };
- }
- if (mem.eql(u8, name, "null")) {
- return TypedValue{
- .ty = Type.initTag(.@"null"),
- .val = Value.initTag(.null_value),
- };
- }
- if (mem.eql(u8, name, "undefined")) {
- return TypedValue{
- .ty = Type.initTag(.@"undefined"),
- .val = Value.initTag(.undef),
- };
- }
- if (mem.eql(u8, name, "true")) {
- return TypedValue{
- .ty = Type.initTag(.bool),
- .val = Value.initTag(.bool_true),
- };
- }
- if (mem.eql(u8, name, "false")) {
- return TypedValue{
- .ty = Type.initTag(.bool),
- .val = Value.initTag(.bool_false),
- };
- }
- return null;
-}
-
fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void {
try depender.dependencies.ensureCapacity(self.gpa, depender.dependencies.items().len + 1);
try dependee.dependants.ensureCapacity(self.gpa, dependee.dependants.items().len + 1);
@@ -2368,7 +1970,7 @@ fn newZIRInst(
return inst;
}
-fn addZIRInstSpecial(
+pub fn addZIRInstSpecial(
self: *Module,
scope: *Scope,
src: usize,
@@ -2376,14 +1978,14 @@ fn addZIRInstSpecial(
positionals: std.meta.fieldInfo(T, "positionals").field_type,
kw_args: std.meta.fieldInfo(T, "kw_args").field_type,
) !*T {
- const gen_zir = scope.cast(Scope.GenZIR).?;
+ const gen_zir = scope.getGenZIR();
try gen_zir.instructions.ensureCapacity(self.gpa, gen_zir.instructions.items.len + 1);
const inst = try newZIRInst(gen_zir.arena, src, T, positionals, kw_args);
gen_zir.instructions.appendAssumeCapacity(&inst.base);
return inst;
}
-fn addZIRInst(
+pub fn addZIRInst(
self: *Module,
scope: *Scope,
src: usize,
@@ -2396,13 +1998,13 @@ fn addZIRInst(
}
/// TODO The existence of this function is a workaround for a bug in stage1.
-fn addZIRInstConst(self: *Module, scope: *Scope, src: usize, typed_value: TypedValue) !*zir.Inst {
+pub fn addZIRInstConst(self: *Module, scope: *Scope, src: usize, typed_value: TypedValue) !*zir.Inst {
const P = std.meta.fieldInfo(zir.Inst.Const, "positionals").field_type;
return self.addZIRInst(scope, src, zir.Inst.Const, P{ .typed_value = typed_value }, .{});
}
/// TODO The existence of this function is a workaround for a bug in stage1.
-fn addZIRInstBlock(self: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Block {
+pub fn addZIRInstBlock(self: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Block {
const P = std.meta.fieldInfo(zir.Inst.Block, "positionals").field_type;
return self.addZIRInstSpecial(scope, src, zir.Inst.Block, P{ .body = body }, .{});
}
@@ -2637,7 +2239,7 @@ fn getNextAnonNameIndex(self: *Module) usize {
return @atomicRmw(usize, &self.next_anon_name_index, .Add, 1, .Monotonic);
}
-fn lookupDeclName(self: *Module, scope: *Scope, ident_name: []const u8) ?*Decl {
+pub fn lookupDeclName(self: *Module, scope: *Scope, ident_name: []const u8) ?*Decl {
const namespace = scope.namespace();
const name_hash = namespace.fullyQualifiedNameHash(ident_name);
return self.decl_table.get(name_hash);
@@ -2658,17 +2260,16 @@ fn analyzeInstCompileError(self: *Module, scope: *Scope, inst: *zir.Inst.Compile
fn analyzeInstArg(self: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, inst.base.src);
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
+ const param_index = b.instructions.items.len;
const param_count = fn_ty.fnParamLen();
- if (inst.positionals.index >= param_count) {
+ if (param_index >= param_count) {
return self.fail(scope, inst.base.src, "parameter index {} outside list of length {}", .{
- inst.positionals.index,
+ param_index,
param_count,
});
}
- const param_type = fn_ty.fnParamType(inst.positionals.index);
- return self.addNewInstArgs(b, inst.base.src, param_type, Inst.Arg, .{
- .index = inst.positionals.index,
- });
+ const param_type = fn_ty.fnParamType(param_index);
+ return self.addNewInstArgs(b, inst.base.src, param_type, Inst.Arg, {});
}
fn analyzeInstBlock(self: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerError!*Inst {
@@ -3646,13 +3247,13 @@ fn coerceArrayPtrToSlice(self: *Module, scope: *Scope, dest_type: Type, inst: *I
return self.fail(scope, inst.src, "TODO implement coerceArrayPtrToSlice runtime instruction", .{});
}
-fn fail(self: *Module, scope: *Scope, src: usize, comptime format: []const u8, args: anytype) InnerError {
+pub fn fail(self: *Module, scope: *Scope, src: usize, comptime format: []const u8, args: anytype) InnerError {
@setCold(true);
const err_msg = try ErrorMsg.create(self.gpa, src, format, args);
return self.failWithOwnedErrorMsg(scope, src, err_msg);
}
-fn failTok(
+pub fn failTok(
self: *Module,
scope: *Scope,
token_index: ast.TokenIndex,
@@ -3664,7 +3265,7 @@ fn failTok(
return self.fail(scope, src, format, args);
}
-fn failNode(
+pub fn failNode(
self: *Module,
scope: *Scope,
ast_node: *ast.Node,
@@ -3705,6 +3306,12 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Err
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
+ .local_var => {
+ const gen_zir = scope.cast(Scope.LocalVar).?.gen_zir;
+ gen_zir.decl.analysis = .sema_failure;
+ gen_zir.decl.generation = self.generation;
+ self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
+ },
.zir_module => {
const zir_module = scope.cast(Scope.ZIRModule).?;
zir_module.status = .loaded_sema_failure;
diff --git a/src-self-hosted/astgen.zig b/src-self-hosted/astgen.zig
new file mode 100644
index 0000000000..be70a724c2
--- /dev/null
+++ b/src-self-hosted/astgen.zig
@@ -0,0 +1,643 @@
+const std = @import("std");
+const mem = std.mem;
+const Value = @import("value.zig").Value;
+const Type = @import("type.zig").Type;
+const TypedValue = @import("TypedValue.zig");
+const assert = std.debug.assert;
+const zir = @import("zir.zig");
+const Module = @import("Module.zig");
+const ast = std.zig.ast;
+const trace = @import("tracy.zig").trace;
+const Scope = Module.Scope;
+const InnerError = Module.InnerError;
+
+/// Turn Zig AST into untyped ZIR istructions.
+pub fn expr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
+ switch (node.tag) {
+ .VarDecl => unreachable, // Handled in `blockExpr`.
+
+ .Identifier => return identifier(mod, scope, node.castTag(.Identifier).?),
+ .Asm => return assembly(mod, scope, node.castTag(.Asm).?),
+ .StringLiteral => return stringLiteral(mod, scope, node.castTag(.StringLiteral).?),
+ .IntegerLiteral => return integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?),
+ .BuiltinCall => return builtinCall(mod, scope, node.castTag(.BuiltinCall).?),
+ .Call => return callExpr(mod, scope, node.castTag(.Call).?),
+ .Unreachable => return unreach(mod, scope, node.castTag(.Unreachable).?),
+ .ControlFlowExpression => return controlFlowExpr(mod, scope, node.castTag(.ControlFlowExpression).?),
+ .If => return ifExpr(mod, scope, node.castTag(.If).?),
+ .Assign => return assign(mod, scope, node.castTag(.Assign).?),
+ .Add => return add(mod, scope, node.castTag(.Add).?),
+ .BangEqual => return cmp(mod, scope, node.castTag(.BangEqual).?, .neq),
+ .EqualEqual => return cmp(mod, scope, node.castTag(.EqualEqual).?, .eq),
+ .GreaterThan => return cmp(mod, scope, node.castTag(.GreaterThan).?, .gt),
+ .GreaterOrEqual => return cmp(mod, scope, node.castTag(.GreaterOrEqual).?, .gte),
+ .LessThan => return cmp(mod, scope, node.castTag(.LessThan).?, .lt),
+ .LessOrEqual => return cmp(mod, scope, node.castTag(.LessOrEqual).?, .lte),
+ .BoolNot => return boolNot(mod, scope, node.castTag(.BoolNot).?),
+ else => return mod.failNode(scope, node, "TODO implement astgen.Expr for {}", .{@tagName(node.tag)}),
+ }
+}
+
+pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.Block) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ if (block_node.label) |label| {
+ return mod.failTok(parent_scope, label, "TODO implement labeled blocks", .{});
+ }
+
+ var block_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer block_arena.deinit();
+
+ var scope = parent_scope;
+ for (block_node.statements()) |statement| {
+ switch (statement.tag) {
+ .VarDecl => {
+ const sub_scope = try block_arena.allocator.create(Scope.LocalVar);
+ const var_decl_node = @fieldParentPtr(ast.Node.VarDecl, "base", statement);
+ sub_scope.* = try varDecl(mod, scope, var_decl_node);
+ scope = &sub_scope.base;
+ },
+ else => _ = try expr(mod, scope, statement),
+ }
+ }
+}
+
+fn varDecl(mod: *Module, scope: *Scope, node: *ast.Node.VarDecl) InnerError!Scope.LocalVar {
+ // TODO implement detection of shadowing
+ if (node.getTrailer("comptime_token")) |comptime_token| {
+ return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{});
+ }
+ if (node.getTrailer("align_node")) |align_node| {
+ return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{});
+ }
+ if (node.getTrailer("type_node")) |type_node| {
+ return mod.failNode(scope, type_node, "TODO implement typed locals", .{});
+ }
+ const tree = scope.tree();
+ switch (tree.token_ids[node.mut_token]) {
+ .Keyword_const => {},
+ .Keyword_var => {
+ return mod.failTok(scope, node.mut_token, "TODO implement mutable locals", .{});
+ },
+ else => unreachable,
+ }
+ // Depending on the type of AST the initialization expression is, we may need an lvalue
+ // or an rvalue as a result location. If it is an rvalue, we can use the instruction as
+ // the variable, no memory location needed.
+ const init_node = node.getTrailer("init_node").?;
+ if (nodeNeedsMemoryLocation(init_node)) {
+ return mod.failNode(scope, init_node, "TODO implement result locations", .{});
+ }
+ const init_inst = try expr(mod, scope, init_node);
+ const ident_name = tree.tokenSlice(node.name_token); // TODO support @"aoeu" identifiers
+ return Scope.LocalVar{
+ .parent = scope,
+ .gen_zir = scope.getGenZIR(),
+ .name = ident_name,
+ .inst = init_inst,
+ };
+}
+
+fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
+ const operand = try expr(mod, scope, node.rhs);
+ const tree = scope.tree();
+ const src = tree.token_locs[node.op_token].start;
+ return mod.addZIRInst(scope, src, zir.Inst.BoolNot, .{ .operand = operand }, .{});
+}
+
+fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
+ if (infix_node.lhs.tag == .Identifier) {
+ const ident = @fieldParentPtr(ast.Node.Identifier, "base", infix_node.lhs);
+ const tree = scope.tree();
+ const ident_name = tree.tokenSlice(ident.token);
+ if (std.mem.eql(u8, ident_name, "_")) {
+ return expr(mod, scope, infix_node.rhs);
+ } else {
+ return mod.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
+ }
+ } else {
+ return mod.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
+ }
+}
+
+fn add(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
+ const lhs = try expr(mod, scope, infix_node.lhs);
+ const rhs = try expr(mod, scope, infix_node.rhs);
+
+ const tree = scope.tree();
+ const src = tree.token_locs[infix_node.op_token].start;
+
+ return mod.addZIRInst(scope, src, zir.Inst.Add, .{ .lhs = lhs, .rhs = rhs }, .{});
+}
+
+fn cmp(
+ mod: *Module,
+ scope: *Scope,
+ infix_node: *ast.Node.SimpleInfixOp,
+ op: std.math.CompareOperator,
+) InnerError!*zir.Inst {
+ const lhs = try expr(mod, scope, infix_node.lhs);
+ const rhs = try expr(mod, scope, infix_node.rhs);
+
+ const tree = scope.tree();
+ const src = tree.token_locs[infix_node.op_token].start;
+
+ return mod.addZIRInst(scope, src, zir.Inst.Cmp, .{
+ .lhs = lhs,
+ .op = op,
+ .rhs = rhs,
+ }, .{});
+}
+
+fn ifExpr(mod: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.Inst {
+ if (if_node.payload) |payload| {
+ return mod.failNode(scope, payload, "TODO implement astgen.IfExpr for optionals", .{});
+ }
+ if (if_node.@"else") |else_node| {
+ if (else_node.payload) |payload| {
+ return mod.failNode(scope, payload, "TODO implement astgen.IfExpr for error unions", .{});
+ }
+ }
+ var block_scope: Scope.GenZIR = .{
+ .parent = scope,
+ .decl = scope.decl().?,
+ .arena = scope.arena(),
+ .instructions = .{},
+ };
+ defer block_scope.instructions.deinit(mod.gpa);
+
+ const cond = try expr(mod, &block_scope.base, if_node.condition);
+
+ const tree = scope.tree();
+ const if_src = tree.token_locs[if_node.if_token].start;
+ const condbr = try mod.addZIRInstSpecial(&block_scope.base, if_src, zir.Inst.CondBr, .{
+ .condition = cond,
+ .true_body = undefined, // populated below
+ .false_body = undefined, // populated below
+ }, .{});
+
+ const block = try mod.addZIRInstBlock(scope, if_src, .{
+ .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+ });
+ var then_scope: Scope.GenZIR = .{
+ .parent = scope,
+ .decl = block_scope.decl,
+ .arena = block_scope.arena,
+ .instructions = .{},
+ };
+ defer then_scope.instructions.deinit(mod.gpa);
+
+ const then_result = try expr(mod, &then_scope.base, if_node.body);
+ if (!then_result.tag.isNoReturn()) {
+ const then_src = tree.token_locs[if_node.body.lastToken()].start;
+ _ = try mod.addZIRInst(&then_scope.base, then_src, zir.Inst.Break, .{
+ .block = block,
+ .operand = then_result,
+ }, .{});
+ }
+ condbr.positionals.true_body = .{
+ .instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
+ };
+
+ var else_scope: Scope.GenZIR = .{
+ .parent = scope,
+ .decl = block_scope.decl,
+ .arena = block_scope.arena,
+ .instructions = .{},
+ };
+ defer else_scope.instructions.deinit(mod.gpa);
+
+ if (if_node.@"else") |else_node| {
+ const else_result = try expr(mod, &else_scope.base, else_node.body);
+ if (!else_result.tag.isNoReturn()) {
+ const else_src = tree.token_locs[else_node.body.lastToken()].start;
+ _ = try mod.addZIRInst(&else_scope.base, else_src, zir.Inst.Break, .{
+ .block = block,
+ .operand = else_result,
+ }, .{});
+ }
+ } else {
+ // TODO Optimization opportunity: we can avoid an allocation and a memcpy here
+ // by directly allocating the body for this one instruction.
+ const else_src = tree.token_locs[if_node.lastToken()].start;
+ _ = try mod.addZIRInst(&else_scope.base, else_src, zir.Inst.BreakVoid, .{
+ .block = block,
+ }, .{});
+ }
+ condbr.positionals.false_body = .{
+ .instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
+ };
+
+ return &block.base;
+}
+
+fn controlFlowExpr(
+ mod: *Module,
+ scope: *Scope,
+ cfe: *ast.Node.ControlFlowExpression,
+) InnerError!*zir.Inst {
+ switch (cfe.kind) {
+ .Break => return mod.failNode(scope, &cfe.base, "TODO implement astgen.Expr for Break", .{}),
+ .Continue => return mod.failNode(scope, &cfe.base, "TODO implement astgen.Expr for Continue", .{}),
+ .Return => {},
+ }
+ const tree = scope.tree();
+ const src = tree.token_locs[cfe.ltoken].start;
+ if (cfe.rhs) |rhs_node| {
+ const operand = try expr(mod, scope, rhs_node);
+ return mod.addZIRInst(scope, src, zir.Inst.Return, .{ .operand = operand }, .{});
+ } else {
+ return mod.addZIRInst(scope, src, zir.Inst.ReturnVoid, .{}, .{});
+ }
+}
+
+fn identifier(mod: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerError!*zir.Inst {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const tree = scope.tree();
+ // TODO implement @"aoeu" identifiers
+ const ident_name = tree.tokenSlice(ident.token);
+ const src = tree.token_locs[ident.token].start;
+ if (mem.eql(u8, ident_name, "_")) {
+ return mod.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});
+ }
+
+ if (getSimplePrimitiveValue(ident_name)) |typed_value| {
+ return mod.addZIRInstConst(scope, src, typed_value);
+ }
+
+ if (ident_name.len >= 2) integer: {
+ const first_c = ident_name[0];
+ if (first_c == 'i' or first_c == 'u') {
+ const is_signed = first_c == 'i';
+ const bit_count = std.fmt.parseInt(u16, ident_name[1..], 10) catch |err| switch (err) {
+ error.Overflow => return mod.failNode(
+ scope,
+ &ident.base,
+ "primitive integer type '{}' exceeds maximum bit width of 65535",
+ .{ident_name},
+ ),
+ error.InvalidCharacter => break :integer,
+ };
+ const val = switch (bit_count) {
+ 8 => if (is_signed) Value.initTag(.i8_type) else Value.initTag(.u8_type),
+ 16 => if (is_signed) Value.initTag(.i16_type) else Value.initTag(.u16_type),
+ 32 => if (is_signed) Value.initTag(.i32_type) else Value.initTag(.u32_type),
+ 64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
+ else => return mod.failNode(scope, &ident.base, "TODO implement arbitrary integer bitwidth types", .{}),
+ };
+ return mod.addZIRInstConst(scope, src, .{
+ .ty = Type.initTag(.type),
+ .val = val,
+ });
+ }
+ }
+
+ // Local variables, including function parameters.
+ {
+ var s = scope;
+ while (true) switch (s.tag) {
+ .local_var => {
+ const local_var = s.cast(Scope.LocalVar).?;
+ if (mem.eql(u8, local_var.name, ident_name)) {
+ return local_var.inst;
+ }
+ s = local_var.parent;
+ },
+ .gen_zir => s = s.cast(Scope.GenZIR).?.parent,
+ else => break,
+ };
+ }
+
+ if (mod.lookupDeclName(scope, ident_name)) |decl| {
+ return try mod.addZIRInst(scope, src, zir.Inst.DeclValInModule, .{ .decl = decl }, .{});
+ }
+
+ return mod.failNode(scope, &ident.base, "use of undeclared identifier '{}'", .{ident_name});
+}
+
+fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.StringLiteral) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const unparsed_bytes = tree.tokenSlice(str_lit.token);
+ const arena = scope.arena();
+
+ var bad_index: usize = undefined;
+ const bytes = std.zig.parseStringLiteral(arena, unparsed_bytes, &bad_index) catch |err| switch (err) {
+ error.InvalidCharacter => {
+ const bad_byte = unparsed_bytes[bad_index];
+ const src = tree.token_locs[str_lit.token].start;
+ return mod.fail(scope, src + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
+ },
+ else => |e| return e,
+ };
+
+ const src = tree.token_locs[str_lit.token].start;
+ return mod.addZIRInst(scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
+}
+
+fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.IntegerLiteral) InnerError!*zir.Inst {
+ const arena = scope.arena();
+ const tree = scope.tree();
+ const prefixed_bytes = tree.tokenSlice(int_lit.token);
+ const base = if (mem.startsWith(u8, prefixed_bytes, "0x"))
+ 16
+ else if (mem.startsWith(u8, prefixed_bytes, "0o"))
+ 8
+ else if (mem.startsWith(u8, prefixed_bytes, "0b"))
+ 2
+ else
+ @as(u8, 10);
+
+ const bytes = if (base == 10)
+ prefixed_bytes
+ else
+ prefixed_bytes[2..];
+
+ if (std.fmt.parseInt(u64, bytes, base)) |small_int| {
+ const int_payload = try arena.create(Value.Payload.Int_u64);
+ int_payload.* = .{ .int = small_int };
+ const src = tree.token_locs[int_lit.token].start;
+ return mod.addZIRInstConst(scope, src, .{
+ .ty = Type.initTag(.comptime_int),
+ .val = Value.initPayload(&int_payload.base),
+ });
+ } else |err| {
+ return mod.failTok(scope, int_lit.token, "TODO implement int literals that don't fit in a u64", .{});
+ }
+}
+
+fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zir.Inst {
+ if (asm_node.outputs.len != 0) {
+ return mod.failNode(scope, &asm_node.base, "TODO implement asm with an output", .{});
+ }
+ const arena = scope.arena();
+ const tree = scope.tree();
+
+ const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len);
+ const args = try arena.alloc(*zir.Inst, asm_node.inputs.len);
+
+ for (asm_node.inputs) |input, i| {
+ // TODO semantically analyze constraints
+ inputs[i] = try expr(mod, scope, input.constraint);
+ args[i] = try expr(mod, scope, input.expr);
+ }
+
+ const src = tree.token_locs[asm_node.asm_token].start;
+ const return_type = try mod.addZIRInstConst(scope, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.void_type),
+ });
+ const asm_inst = try mod.addZIRInst(scope, src, zir.Inst.Asm, .{
+ .asm_source = try expr(mod, scope, asm_node.template),
+ .return_type = return_type,
+ }, .{
+ .@"volatile" = asm_node.volatile_token != null,
+ //.clobbers = TODO handle clobbers
+ .inputs = inputs,
+ .args = args,
+ });
+ return asm_inst;
+}
+
+fn builtinCall(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const builtin_name = tree.tokenSlice(call.builtin_token);
+ const src = tree.token_locs[call.builtin_token].start;
+
+ inline for (std.meta.declarations(zir.Inst)) |inst| {
+ if (inst.data != .Type) continue;
+ const T = inst.data.Type;
+ if (!@hasDecl(T, "builtin_name")) continue;
+ if (std.mem.eql(u8, builtin_name, T.builtin_name)) {
+ var value: T = undefined;
+ const positionals = @typeInfo(std.meta.fieldInfo(T, "positionals").field_type).Struct;
+ if (positionals.fields.len == 0) {
+ return mod.addZIRInst(scope, src, T, value.positionals, value.kw_args);
+ }
+ const arg_count: ?usize = if (positionals.fields[0].field_type == []*zir.Inst) null else positionals.fields.len;
+ if (arg_count) |some| {
+ if (call.params_len != some) {
+ return mod.failTok(
+ scope,
+ call.builtin_token,
+ "expected {} parameter{}, found {}",
+ .{ some, if (some == 1) "" else "s", call.params_len },
+ );
+ }
+ const params = call.params();
+ inline for (positionals.fields) |p, i| {
+ @field(value.positionals, p.name) = try expr(mod, scope, params[i]);
+ }
+ } else {
+ return mod.failTok(scope, call.builtin_token, "TODO var args builtin '{}'", .{builtin_name});
+ }
+
+ return mod.addZIRInst(scope, src, T, value.positionals, .{});
+ }
+ }
+ return mod.failTok(scope, call.builtin_token, "TODO implement builtin call for '{}'", .{builtin_name});
+}
+
+fn callExpr(mod: *Module, scope: *Scope, node: *ast.Node.Call) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const lhs = try expr(mod, scope, node.lhs);
+
+ const param_nodes = node.params();
+ const args = try scope.getGenZIR().arena.alloc(*zir.Inst, param_nodes.len);
+ for (param_nodes) |param_node, i| {
+ args[i] = try expr(mod, scope, param_node);
+ }
+
+ const src = tree.token_locs[node.lhs.firstToken()].start;
+ return mod.addZIRInst(scope, src, zir.Inst.Call, .{
+ .func = lhs,
+ .args = args,
+ }, .{});
+}
+
+fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.Unreachable) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const src = tree.token_locs[unreach_node.token].start;
+ return mod.addZIRInst(scope, src, zir.Inst.Unreachable, .{}, .{});
+}
+
+fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
+ const simple_types = std.ComptimeStringMap(Value.Tag, .{
+ .{ "u8", .u8_type },
+ .{ "i8", .i8_type },
+ .{ "isize", .isize_type },
+ .{ "usize", .usize_type },
+ .{ "c_short", .c_short_type },
+ .{ "c_ushort", .c_ushort_type },
+ .{ "c_int", .c_int_type },
+ .{ "c_uint", .c_uint_type },
+ .{ "c_long", .c_long_type },
+ .{ "c_ulong", .c_ulong_type },
+ .{ "c_longlong", .c_longlong_type },
+ .{ "c_ulonglong", .c_ulonglong_type },
+ .{ "c_longdouble", .c_longdouble_type },
+ .{ "f16", .f16_type },
+ .{ "f32", .f32_type },
+ .{ "f64", .f64_type },
+ .{ "f128", .f128_type },
+ .{ "c_void", .c_void_type },
+ .{ "bool", .bool_type },
+ .{ "void", .void_type },
+ .{ "type", .type_type },
+ .{ "anyerror", .anyerror_type },
+ .{ "comptime_int", .comptime_int_type },
+ .{ "comptime_float", .comptime_float_type },
+ .{ "noreturn", .noreturn_type },
+ });
+ if (simple_types.get(name)) |tag| {
+ return TypedValue{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(tag),
+ };
+ }
+ if (mem.eql(u8, name, "null")) {
+ return TypedValue{
+ .ty = Type.initTag(.@"null"),
+ .val = Value.initTag(.null_value),
+ };
+ }
+ if (mem.eql(u8, name, "undefined")) {
+ return TypedValue{
+ .ty = Type.initTag(.@"undefined"),
+ .val = Value.initTag(.undef),
+ };
+ }
+ if (mem.eql(u8, name, "true")) {
+ return TypedValue{
+ .ty = Type.initTag(.bool),
+ .val = Value.initTag(.bool_true),
+ };
+ }
+ if (mem.eql(u8, name, "false")) {
+ return TypedValue{
+ .ty = Type.initTag(.bool),
+ .val = Value.initTag(.bool_false),
+ };
+ }
+ return null;
+}
+
+fn nodeNeedsMemoryLocation(node: *ast.Node) bool {
+ return switch (node.tag) {
+ .Root,
+ .Use,
+ .TestDecl,
+ .DocComment,
+ .SwitchCase,
+ .SwitchElse,
+ .Else,
+ .Payload,
+ .PointerPayload,
+ .PointerIndexPayload,
+ .ContainerField,
+ .ErrorTag,
+ .FieldInitializer,
+ => unreachable,
+
+ .ControlFlowExpression,
+ .BitNot,
+ .BoolNot,
+ .VarDecl,
+ .Defer,
+ .AddressOf,
+ .OptionalType,
+ .Negation,
+ .NegationWrap,
+ .Resume,
+ .ArrayType,
+ .ArrayTypeSentinel,
+ .PtrType,
+ .SliceType,
+ .Suspend,
+ .AnyType,
+ .ErrorType,
+ .FnProto,
+ .AnyFrameType,
+ .IntegerLiteral,
+ .FloatLiteral,
+ .EnumLiteral,
+ .StringLiteral,
+ .MultilineStringLiteral,
+ .CharLiteral,
+ .BoolLiteral,
+ .NullLiteral,
+ .UndefinedLiteral,
+ .Unreachable,
+ .Identifier,
+ .ErrorSetDecl,
+ .ContainerDecl,
+ .Asm,
+ .Add,
+ .AddWrap,
+ .ArrayCat,
+ .ArrayMult,
+ .Assign,
+ .AssignBitAnd,
+ .AssignBitOr,
+ .AssignBitShiftLeft,
+ .AssignBitShiftRight,
+ .AssignBitXor,
+ .AssignDiv,
+ .AssignSub,
+ .AssignSubWrap,
+ .AssignMod,
+ .AssignAdd,
+ .AssignAddWrap,
+ .AssignMul,
+ .AssignMulWrap,
+ .BangEqual,
+ .BitAnd,
+ .BitOr,
+ .BitShiftLeft,
+ .BitShiftRight,
+ .BitXor,
+ .BoolAnd,
+ .BoolOr,
+ .Div,
+ .EqualEqual,
+ .ErrorUnion,
+ .GreaterOrEqual,
+ .GreaterThan,
+ .LessOrEqual,
+ .LessThan,
+ .MergeErrorSets,
+ .Mod,
+ .Mul,
+ .MulWrap,
+ .Range,
+ .Period,
+ .Sub,
+ .SubWrap,
+ => false,
+
+ .ArrayInitializer,
+ .ArrayInitializerDot,
+ .StructInitializer,
+ .StructInitializerDot,
+ => true,
+
+ .GroupedExpression => nodeNeedsMemoryLocation(node.castTag(.GroupedExpression).?.expr),
+
+ .UnwrapOptional => @panic("TODO nodeNeedsMemoryLocation for UnwrapOptional"),
+ .Catch => @panic("TODO nodeNeedsMemoryLocation for Catch"),
+ .Await => @panic("TODO nodeNeedsMemoryLocation for Await"),
+ .Try => @panic("TODO nodeNeedsMemoryLocation for Try"),
+ .If => @panic("TODO nodeNeedsMemoryLocation for If"),
+ .SuffixOp => @panic("TODO nodeNeedsMemoryLocation for SuffixOp"),
+ .Call => @panic("TODO nodeNeedsMemoryLocation for Call"),
+ .Switch => @panic("TODO nodeNeedsMemoryLocation for Switch"),
+ .While => @panic("TODO nodeNeedsMemoryLocation for While"),
+ .For => @panic("TODO nodeNeedsMemoryLocation for For"),
+ .BuiltinCall => @panic("TODO nodeNeedsMemoryLocation for BuiltinCall"),
+ .Comptime => @panic("TODO nodeNeedsMemoryLocation for Comptime"),
+ .Nosuspend => @panic("TODO nodeNeedsMemoryLocation for Nosuspend"),
+ .Block => @panic("TODO nodeNeedsMemoryLocation for Block"),
+ };
+}
diff --git a/src-self-hosted/codegen.zig b/src-self-hosted/codegen.zig
index 314d497808..e78ee28b5d 100644
--- a/src-self-hosted/codegen.zig
+++ b/src-self-hosted/codegen.zig
@@ -73,6 +73,7 @@ pub fn generateSymbol(
.code = code,
.err_msg = null,
.args = mc_args,
+ .arg_index = 0,
.branch_stack = &branch_stack,
.src = src,
};
@@ -255,6 +256,7 @@ const Function = struct {
code: *std.ArrayList(u8),
err_msg: ?*ErrorMsg,
args: []MCValue,
+ arg_index: usize,
src: usize,
/// Whenever there is a runtime branch, we push a Branch onto this stack,
@@ -603,7 +605,9 @@ const Function = struct {
}
fn genArg(self: *Function, inst: *ir.Inst.Arg) !MCValue {
- return self.args[inst.args.index];
+ const i = self.arg_index;
+ self.arg_index += 1;
+ return self.args[i];
}
fn genBreakpoint(self: *Function, src: usize, comptime arch: std.Target.Cpu.Arch) !MCValue {
diff --git a/src-self-hosted/ir.zig b/src-self-hosted/ir.zig
index c654bef611..a150957de0 100644
--- a/src-self-hosted/ir.zig
+++ b/src-self-hosted/ir.zig
@@ -101,10 +101,7 @@ pub const Inst = struct {
pub const Arg = struct {
pub const base_tag = Tag.arg;
base: Inst,
-
- args: struct {
- index: usize,
- },
+ args: void,
};
pub const Assembly = struct {
diff --git a/src-self-hosted/translate_c.zig b/src-self-hosted/translate_c.zig
index b9ab28cc17..1da52cda96 100644
--- a/src-self-hosted/translate_c.zig
+++ b/src-self-hosted/translate_c.zig
@@ -1103,11 +1103,11 @@ fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.No
const enum_ident = try transCreateNodeIdentifier(c, name);
const period_tok = try appendToken(c, .Period, ".");
const field_ident = try transCreateNodeIdentifier(c, field_name);
- const field_access_node = try c.arena.create(ast.Node.InfixOp);
+ const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp);
field_access_node.* = .{
+ .base = .{ .tag = .Period },
.op_token = period_tok,
.lhs = enum_ident,
- .op = .Period,
.rhs = field_ident,
};
cast_node.params()[0] = &field_access_node.base;
@@ -1219,7 +1219,7 @@ fn transStmt(
.StringLiteralClass => return transStringLiteral(rp, scope, @ptrCast(*const ZigClangStringLiteral, stmt), result_used),
.ParenExprClass => {
const expr = try transExpr(rp, scope, ZigClangParenExpr_getSubExpr(@ptrCast(*const ZigClangParenExpr, stmt)), .used, lrvalue);
- if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
+ if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
@@ -1264,7 +1264,7 @@ fn transStmt(
.OpaqueValueExprClass => {
const source_expr = ZigClangOpaqueValueExpr_getSourceExpr(@ptrCast(*const ZigClangOpaqueValueExpr, stmt)).?;
const expr = try transExpr(rp, scope, source_expr, .used, lrvalue);
- if (expr.id == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
+ if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
const node = try rp.c.arena.create(ast.Node.GroupedExpression);
node.* = .{
.lparen = try appendToken(rp.c, .LParen, "("),
@@ -1294,7 +1294,7 @@ fn transBinaryOperator(
const op = ZigClangBinaryOperator_getOpcode(stmt);
const qt = ZigClangBinaryOperator_getType(stmt);
var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.InfixOp.Op = undefined;
+ var op_id: ast.Node.Tag = undefined;
switch (op) {
.Assign => return try transCreateNodeAssign(rp, scope, result_used, ZigClangBinaryOperator_getLHS(stmt), ZigClangBinaryOperator_getRHS(stmt)),
.Comma => {
@@ -1693,7 +1693,7 @@ fn transBoolExpr(
var res = try transExpr(rp, scope, expr, used, lrvalue);
if (isBoolRes(res)) {
- if (!grouped and res.id == .GroupedExpression) {
+ if (!grouped and res.tag == .GroupedExpression) {
const group = @fieldParentPtr(ast.Node.GroupedExpression, "base", res);
res = group.expr;
// get zig fmt to work properly
@@ -1736,26 +1736,23 @@ fn exprIsStringLiteral(expr: *const ZigClangExpr) bool {
}
fn isBoolRes(res: *ast.Node) bool {
- switch (res.id) {
- .InfixOp => switch (@fieldParentPtr(ast.Node.InfixOp, "base", res).op) {
- .BoolOr,
- .BoolAnd,
- .EqualEqual,
- .BangEqual,
- .LessThan,
- .GreaterThan,
- .LessOrEqual,
- .GreaterOrEqual,
- => return true,
+ switch (res.tag) {
+ .BoolOr,
+ .BoolAnd,
+ .EqualEqual,
+ .BangEqual,
+ .LessThan,
+ .GreaterThan,
+ .LessOrEqual,
+ .GreaterOrEqual,
+ .BoolNot,
+ .BoolLiteral,
+ => return true,
- else => {},
- },
- .BoolNot => return true,
- .BoolLiteral => return true,
.GroupedExpression => return isBoolRes(@fieldParentPtr(ast.Node.GroupedExpression, "base", res).expr),
- else => {},
+
+ else => return false,
}
- return false;
}
fn finishBoolExpr(
@@ -2312,11 +2309,11 @@ fn transInitListExprArray(
&filler_init_node.base
else blk: {
const mul_tok = try appendToken(rp.c, .AsteriskAsterisk, "**");
- const mul_node = try rp.c.arena.create(ast.Node.InfixOp);
+ const mul_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
mul_node.* = .{
+ .base = .{ .tag = .ArrayMult },
.op_token = mul_tok,
.lhs = &filler_init_node.base,
- .op = .ArrayMult,
.rhs = try transCreateNodeInt(rp.c, leftover_count),
};
break :blk &mul_node.base;
@@ -2326,11 +2323,11 @@ fn transInitListExprArray(
return rhs_node;
}
- const cat_node = try rp.c.arena.create(ast.Node.InfixOp);
+ const cat_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
cat_node.* = .{
+ .base = .{ .tag = .ArrayCat },
.op_token = cat_tok,
.lhs = &init_node.base,
- .op = .ArrayCat,
.rhs = rhs_node,
};
return &cat_node.base;
@@ -2723,11 +2720,11 @@ fn transCase(
const ellips = try appendToken(rp.c, .Ellipsis3, "...");
const rhs_node = try transExpr(rp, scope, rhs, .used, .r_value);
- const node = try rp.c.arena.create(ast.Node.InfixOp);
+ const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
node.* = .{
+ .base = .{ .tag = .Range },
.op_token = ellips,
.lhs = lhs_node,
- .op = .Range,
.rhs = rhs_node,
};
break :blk &node.base;
@@ -3153,7 +3150,7 @@ fn transCreatePreCrement(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangUnaryOperator,
- op: ast.Node.InfixOp.Op,
+ op: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
used: ResultUsed,
@@ -3227,7 +3224,7 @@ fn transCreatePostCrement(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangUnaryOperator,
- op: ast.Node.InfixOp.Op,
+ op: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
used: ResultUsed,
@@ -3349,10 +3346,10 @@ fn transCreateCompoundAssign(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangCompoundAssignOperator,
- assign_op: ast.Node.InfixOp.Op,
+ assign_op: ast.Node.Tag,
assign_tok_id: std.zig.Token.Id,
assign_bytes: []const u8,
- bin_op: ast.Node.InfixOp.Op,
+ bin_op: ast.Node.Tag,
bin_tok_id: std.zig.Token.Id,
bin_bytes: []const u8,
used: ResultUsed,
@@ -3377,7 +3374,7 @@ fn transCreateCompoundAssign(
// zig: lhs += rhs
if ((is_mod or is_div) and is_signed) {
const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.arena.create(ast.Node.InfixOp);
+ const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
const builtin = if (is_mod) "@rem" else "@divTrunc";
const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value);
@@ -3386,9 +3383,9 @@ fn transCreateCompoundAssign(
builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value);
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
op_node.* = .{
+ .base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = lhs_node,
- .op = .Assign,
.rhs = &builtin_node.base,
};
_ = try appendToken(rp.c, .Semicolon, ";");
@@ -3452,7 +3449,7 @@ fn transCreateCompoundAssign(
if ((is_mod or is_div) and is_signed) {
const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.arena.create(ast.Node.InfixOp);
+ const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
const builtin = if (is_mod) "@rem" else "@divTrunc";
const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
builtin_node.params()[0] = try transCreateNodePtrDeref(rp.c, lhs_node);
@@ -3461,9 +3458,9 @@ fn transCreateCompoundAssign(
builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
_ = try appendToken(rp.c, .Semicolon, ";");
op_node.* = .{
+ .base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = ref_node,
- .op = .Assign,
.rhs = &builtin_node.base,
};
_ = try appendToken(rp.c, .Semicolon, ";");
@@ -3716,11 +3713,11 @@ fn maybeSuppressResult(
}
const lhs = try transCreateNodeIdentifier(rp.c, "_");
const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.arena.create(ast.Node.InfixOp);
+ const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
op_node.* = .{
+ .base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = lhs,
- .op = .Assign,
.rhs = result,
};
return &op_node.base;
@@ -4095,11 +4092,11 @@ fn transCreateNodeAssign(
}
fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []const u8) !*ast.Node {
- const field_access_node = try c.arena.create(ast.Node.InfixOp);
+ const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp);
field_access_node.* = .{
+ .base = .{ .tag = .Period },
.op_token = try appendToken(c, .Period, "."),
.lhs = container,
- .op = .Period,
.rhs = try transCreateNodeIdentifier(c, field_name),
};
return &field_access_node.base;
@@ -4107,12 +4104,13 @@ fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []c
fn transCreateNodeSimplePrefixOp(
c: *Context,
- comptime tag: ast.Node.Id,
+ comptime tag: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
-) !*ast.Node.SimplePrefixOp(tag) {
- const node = try c.arena.create(ast.Node.SimplePrefixOp(tag));
+) !*ast.Node.SimplePrefixOp {
+ const node = try c.arena.create(ast.Node.SimplePrefixOp);
node.* = .{
+ .base = .{ .tag = tag },
.op_token = try appendToken(c, op_tok_id, bytes),
.rhs = undefined, // translate and set afterward
};
@@ -4123,7 +4121,7 @@ fn transCreateNodeInfixOp(
rp: RestorePoint,
scope: *Scope,
lhs_node: *ast.Node,
- op: ast.Node.InfixOp.Op,
+ op: ast.Node.Tag,
op_token: ast.TokenIndex,
rhs_node: *ast.Node,
used: ResultUsed,
@@ -4133,11 +4131,11 @@ fn transCreateNodeInfixOp(
try appendToken(rp.c, .LParen, "(")
else
null;
- const node = try rp.c.arena.create(ast.Node.InfixOp);
+ const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
node.* = .{
+ .base = .{ .tag = op },
.op_token = op_token,
.lhs = lhs_node,
- .op = op,
.rhs = rhs_node,
};
if (!grouped) return maybeSuppressResult(rp, scope, used, &node.base);
@@ -4155,7 +4153,7 @@ fn transCreateNodeBoolInfixOp(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangBinaryOperator,
- op: ast.Node.InfixOp.Op,
+ op: ast.Node.Tag,
used: ResultUsed,
grouped: bool,
) !*ast.Node {
@@ -4535,7 +4533,7 @@ fn transCreateNodeShiftOp(
rp: RestorePoint,
scope: *Scope,
stmt: *const ZigClangBinaryOperator,
- op: ast.Node.InfixOp.Op,
+ op: ast.Node.Tag,
op_tok_id: std.zig.Token.Id,
bytes: []const u8,
) !*ast.Node {
@@ -4557,11 +4555,11 @@ fn transCreateNodeShiftOp(
cast_node.params()[1] = rhs;
cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- const node = try rp.c.arena.create(ast.Node.InfixOp);
+ const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
node.* = .{
+ .base = .{ .tag = op },
.op_token = op_token,
.lhs = lhs,
- .op = op,
.rhs = &cast_node.base,
};
@@ -5338,10 +5336,10 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.{@tagName(last.id)},
);
_ = try appendToken(c, .Semicolon, ";");
- const type_of_arg = if (expr.id != .Block) expr else blk: {
+ const type_of_arg = if (expr.tag != .Block) expr else blk: {
const blk = @fieldParentPtr(ast.Node.Block, "base", expr);
const blk_last = blk.statements()[blk.statements_len - 1];
- std.debug.assert(blk_last.id == .ControlFlowExpression);
+ std.debug.assert(blk_last.tag == .ControlFlowExpression);
const br = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", blk_last);
break :blk br.rhs.?;
};
@@ -5403,11 +5401,11 @@ fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_
// suppress result
const lhs = try transCreateNodeIdentifier(c, "_");
const op_token = try appendToken(c, .Equal, "=");
- const op_node = try c.arena.create(ast.Node.InfixOp);
+ const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
op_node.* = .{
+ .base = .{ .tag = .Assign },
.op_token = op_token,
.lhs = lhs,
- .op = .Assign,
.rhs = last,
};
try block_scope.statements.append(&op_node.base);
@@ -5786,9 +5784,60 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
}
}
+fn nodeIsInfixOp(tag: ast.Node.Tag) bool {
+ return switch (tag) {
+ .Add,
+ .AddWrap,
+ .ArrayCat,
+ .ArrayMult,
+ .Assign,
+ .AssignBitAnd,
+ .AssignBitOr,
+ .AssignBitShiftLeft,
+ .AssignBitShiftRight,
+ .AssignBitXor,
+ .AssignDiv,
+ .AssignSub,
+ .AssignSubWrap,
+ .AssignMod,
+ .AssignAdd,
+ .AssignAddWrap,
+ .AssignMul,
+ .AssignMulWrap,
+ .BangEqual,
+ .BitAnd,
+ .BitOr,
+ .BitShiftLeft,
+ .BitShiftRight,
+ .BitXor,
+ .BoolAnd,
+ .BoolOr,
+ .Div,
+ .EqualEqual,
+ .ErrorUnion,
+ .GreaterOrEqual,
+ .GreaterThan,
+ .LessOrEqual,
+ .LessThan,
+ .MergeErrorSets,
+ .Mod,
+ .Mul,
+ .MulWrap,
+ .Period,
+ .Range,
+ .Sub,
+ .SubWrap,
+ .UnwrapOptional,
+ .Catch,
+ => true,
+
+ else => false,
+ };
+}
+
fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
if (!isBoolRes(node)) {
- if (node.id != .InfixOp) return node;
+ if (!nodeIsInfixOp(node.tag)) return node;
const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
@@ -5807,7 +5856,7 @@ fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
if (isBoolRes(node)) {
- if (node.id != .InfixOp) return node;
+ if (!nodeIsInfixOp(node.tag)) return node;
const group_node = try c.arena.create(ast.Node.GroupedExpression);
group_node.* = .{
@@ -5820,11 +5869,11 @@ fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
const op_token = try appendToken(c, .BangEqual, "!=");
const zero = try transCreateNodeInt(c, 0);
- const res = try c.arena.create(ast.Node.InfixOp);
+ const res = try c.arena.create(ast.Node.SimpleInfixOp);
res.* = .{
+ .base = .{ .tag = .BangEqual },
.op_token = op_token,
.lhs = node,
- .op = .BangEqual,
.rhs = zero,
};
const group_node = try c.arena.create(ast.Node.GroupedExpression);
@@ -5841,7 +5890,7 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
while (true) {
const tok = it.next().?;
var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.InfixOp.Op = undefined;
+ var op_id: ast.Node.Tag = undefined;
var bool_op = false;
switch (tok.id) {
.Period => {
@@ -6048,11 +6097,11 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const cast_fn = if (bool_op) macroIntToBool else macroBoolToInt;
const lhs_node = try cast_fn(c, node);
const rhs_node = try parseCPrefixOpExpr(c, it, source, source_loc, scope);
- const op_node = try c.arena.create(ast.Node.InfixOp);
+ const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
op_node.* = .{
+ .base = .{ .tag = op_id },
.op_token = op_token,
.lhs = lhs_node,
- .op = op_id,
.rhs = try cast_fn(c, rhs_node),
};
node = &op_node.base;
@@ -6105,7 +6154,7 @@ fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 {
}
fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
- switch (node.id) {
+ switch (node.tag) {
.ContainerDecl,
.AddressOf,
.Await,
@@ -6130,10 +6179,9 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
}
},
- .InfixOp => {
- const infix = node.cast(ast.Node.InfixOp).?;
- if (infix.op != .Period)
- return null;
+ .Period => {
+ const infix = node.castTag(.Period).?;
+
if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
for (container.fieldsAndDecls()) |field_ref| {
@@ -6160,9 +6208,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
return getContainer(c, ty);
}
}
- } else if (ref.cast(ast.Node.InfixOp)) |infix| {
- if (infix.op != .Period)
- return null;
+ } else if (ref.castTag(.Period)) |infix| {
if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
for (container.fieldsAndDecls()) |field_ref| {
@@ -6182,7 +6228,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto {
const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getTrailer("init_node").? else return null;
if (getContainerTypeOf(c, init)) |ty_node| {
- if (ty_node.cast(ast.Node.OptionalType)) |prefix| {
+ if (ty_node.castTag(.OptionalType)) |prefix| {
if (prefix.rhs.cast(ast.Node.FnProto)) |fn_proto| {
return fn_proto;
}
diff --git a/src-self-hosted/zir.zig b/src-self-hosted/zir.zig
index 45ced54255..2f696d1787 100644
--- a/src-self-hosted/zir.zig
+++ b/src-self-hosted/zir.zig
@@ -34,7 +34,8 @@ pub const Inst = struct {
/// These names are used directly as the instruction names in the text format.
pub const Tag = enum {
- /// Function parameter value.
+ /// Function parameter value. These must be first in a function's main block,
+ /// in respective order with the parameters.
arg,
/// A labeled block of code, which can return a value.
block,
@@ -184,9 +185,7 @@ pub const Inst = struct {
pub const base_tag = Tag.arg;
base: Inst,
- positionals: struct {
- index: usize,
- },
+ positionals: struct {},
kw_args: struct {},
};
@@ -1384,15 +1383,17 @@ const EmitZIR = struct {
for (src_decls.items) |ir_decl| {
switch (ir_decl.analysis) {
.unreferenced => continue,
+
.complete => {},
+ .codegen_failure => {}, // We still can emit the ZIR.
+ .codegen_failure_retryable => {}, // We still can emit the ZIR.
+
.in_progress => unreachable,
.outdated => unreachable,
.sema_failure,
.sema_failure_retryable,
- .codegen_failure,
.dependency_failure,
- .codegen_failure_retryable,
=> if (self.old_module.failed_decls.get(ir_decl)) |err_msg| {
const fail_inst = try self.arena.allocator.create(Inst.CompileError);
fail_inst.* = .{
@@ -1728,7 +1729,7 @@ const EmitZIR = struct {
.src = inst.src,
.tag = Inst.Arg.base_tag,
},
- .positionals = .{ .index = old_inst.args.index },
+ .positionals = .{},
.kw_args = .{},
};
break :blk &new_inst.base;