aboutsummaryrefslogtreecommitdiff
path: root/src/Module.zig
diff options
context:
space:
mode:
Diffstat (limited to 'src/Module.zig')
-rw-r--r--src/Module.zig3126
1 files changed, 1952 insertions, 1174 deletions
diff --git a/src/Module.zig b/src/Module.zig
index 20cb7bf195..21dc953262 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -22,6 +22,9 @@ const ast = std.zig.ast;
const trace = @import("tracy.zig").trace;
const astgen = @import("astgen.zig");
const zir_sema = @import("zir_sema.zig");
+const target_util = @import("target.zig");
+
+const default_eval_branch_quota = 1000;
/// General-purpose allocator. Used for both temporary and long-term storage.
gpa: *Allocator,
@@ -32,11 +35,11 @@ zig_cache_artifact_directory: Compilation.Directory,
/// Pointer to externally managed resource. `null` if there is no zig file being compiled.
root_pkg: *Package,
/// Module owns this resource.
-/// The `Scope` is either a `Scope.ZIRModule` or `Scope.File`.
-root_scope: *Scope,
+root_scope: *Scope.File,
/// It's rare for a decl to be exported, so we save memory by having a sparse map of
/// Decl pointers to details about them being exported.
/// The Export memory is owned by the `export_owners` table; the slice itself is owned by this table.
+/// The slice is guaranteed to not be empty.
decl_exports: std.AutoArrayHashMapUnmanaged(*Decl, []*Export) = .{},
/// We track which export is associated with the given symbol name for quick
/// detection of symbol collisions.
@@ -53,13 +56,19 @@ decl_table: std.ArrayHashMapUnmanaged(Scope.NameHash, *Decl, Scope.name_hash_has
/// The ErrorMsg memory is owned by the decl, using Module's general purpose allocator.
/// Note that a Decl can succeed but the Fn it represents can fail. In this case,
/// a Decl can have a failed_decls entry but have analysis status of success.
-failed_decls: std.AutoArrayHashMapUnmanaged(*Decl, *Compilation.ErrorMsg) = .{},
+failed_decls: std.AutoArrayHashMapUnmanaged(*Decl, *ErrorMsg) = .{},
+/// When emit_h is non-null, each Decl gets one more compile error slot for
+/// emit-h failing for that Decl. This table is also how we tell if a Decl has
+/// failed emit-h or succeeded.
+emit_h_failed_decls: std.AutoArrayHashMapUnmanaged(*Decl, *ErrorMsg) = .{},
+/// Keep track of one `@compileLog` callsite per owner Decl.
+compile_log_decls: std.AutoArrayHashMapUnmanaged(*Decl, SrcLoc) = .{},
/// Using a map here for consistency with the other fields here.
/// The ErrorMsg memory is owned by the `Scope`, using Module's general purpose allocator.
-failed_files: std.AutoArrayHashMapUnmanaged(*Scope, *Compilation.ErrorMsg) = .{},
+failed_files: std.AutoArrayHashMapUnmanaged(*Scope, *ErrorMsg) = .{},
/// Using a map here for consistency with the other fields here.
/// The ErrorMsg memory is owned by the `Export`, using Module's general purpose allocator.
-failed_exports: std.AutoArrayHashMapUnmanaged(*Export, *Compilation.ErrorMsg) = .{},
+failed_exports: std.AutoArrayHashMapUnmanaged(*Export, *ErrorMsg) = .{},
next_anon_name_index: usize = 0,
@@ -91,6 +100,10 @@ stage1_flags: packed struct {
reserved: u2 = 0,
} = .{},
+emit_h: ?Compilation.EmitLoc,
+
+compile_log_text: std.ArrayListUnmanaged(u8) = .{},
+
pub const Export = struct {
options: std.builtin.ExportOptions,
/// Byte offset into the file that contains the export directive.
@@ -111,6 +124,13 @@ pub const Export = struct {
},
};
+/// When Module emit_h field is non-null, each Decl is allocated via this struct, so that
+/// there can be EmitH state attached to each Decl.
+pub const DeclPlusEmitH = struct {
+ decl: Decl,
+ emit_h: EmitH,
+};
+
pub const Decl = struct {
/// This name is relative to the containing namespace of the decl. It uses a null-termination
/// to save bytes, since there can be a lot of decls in a compilation. The null byte is not allowed
@@ -119,9 +139,9 @@ pub const Decl = struct {
/// mapping them to an address in the output file.
/// Memory owned by this decl, using Module's allocator.
name: [*:0]const u8,
- /// The direct parent container of the Decl. This is either a `Scope.Container` or `Scope.ZIRModule`.
+ /// The direct parent container of the Decl.
/// Reference to externally owned memory.
- scope: *Scope,
+ container: *Scope.Container,
/// The AST Node decl index or ZIR Inst index that contains this declaration.
/// Must be recomputed when the corresponding source file is modified.
src_index: usize,
@@ -199,41 +219,38 @@ pub const Decl = struct {
/// stage1 compiler giving me: `error: struct 'Module.Decl' depends on itself`
pub const DepsTable = std.ArrayHashMapUnmanaged(*Decl, void, std.array_hash_map.getAutoHashFn(*Decl), std.array_hash_map.getAutoEqlFn(*Decl), false);
- pub fn destroy(self: *Decl, gpa: *Allocator) void {
+ pub fn destroy(self: *Decl, module: *Module) void {
+ const gpa = module.gpa;
gpa.free(mem.spanZ(self.name));
if (self.typedValueManaged()) |tvm| {
tvm.deinit(gpa);
}
self.dependants.deinit(gpa);
self.dependencies.deinit(gpa);
- gpa.destroy(self);
+ if (module.emit_h != null) {
+ const decl_plus_emit_h = @fieldParentPtr(DeclPlusEmitH, "decl", self);
+ decl_plus_emit_h.emit_h.fwd_decl.deinit(gpa);
+ gpa.destroy(decl_plus_emit_h);
+ } else {
+ gpa.destroy(self);
+ }
+ }
+
+ pub fn srcLoc(self: Decl) SrcLoc {
+ return .{
+ .byte_offset = self.src(),
+ .file_scope = self.getFileScope(),
+ };
}
pub fn src(self: Decl) usize {
- switch (self.scope.tag) {
- .container => {
- const container = @fieldParentPtr(Scope.Container, "base", self.scope);
- const tree = container.file_scope.contents.tree;
- // TODO Container should have its own decls()
- const decl_node = tree.root_node.decls()[self.src_index];
- return tree.token_locs[decl_node.firstToken()].start;
- },
- .zir_module => {
- const zir_module = @fieldParentPtr(Scope.ZIRModule, "base", self.scope);
- const module = zir_module.contents.module;
- const src_decl = module.decls[self.src_index];
- return src_decl.inst.src;
- },
- .file, .block => unreachable,
- .gen_zir => unreachable,
- .local_val => unreachable,
- .local_ptr => unreachable,
- .decl => unreachable,
- }
+ const tree = &self.container.file_scope.tree;
+ const decl_node = tree.rootDecls()[self.src_index];
+ return tree.tokens.items(.start)[tree.firstToken(decl_node)];
}
pub fn fullyQualifiedNameHash(self: Decl) Scope.NameHash {
- return self.scope.fullyQualifiedNameHash(mem.spanZ(self.name));
+ return self.container.fullyQualifiedNameHash(mem.spanZ(self.name));
}
pub fn typedValue(self: *Decl) error{AnalysisFail}!TypedValue {
@@ -247,7 +264,7 @@ pub const Decl = struct {
pub fn dump(self: *Decl) void {
const loc = std.zig.findLineColumn(self.scope.source.bytes, self.src);
- std.debug.print("{}:{}:{} name={} status={}", .{
+ std.debug.print("{s}:{d}:{d} name={s} status={s}", .{
self.scope.sub_file_path,
loc.line + 1,
loc.column + 1,
@@ -267,6 +284,16 @@ pub const Decl = struct {
}
}
+ pub fn getFileScope(self: Decl) *Scope.File {
+ return self.container.file_scope;
+ }
+
+ pub fn getEmitH(decl: *Decl, module: *Module) *EmitH {
+ assert(module.emit_h != null);
+ const decl_plus_emit_h = @fieldParentPtr(DeclPlusEmitH, "decl", decl);
+ return &decl_plus_emit_h.emit_h;
+ }
+
fn removeDependant(self: *Decl, other: *Decl) void {
self.dependants.removeAssertDiscard(other);
}
@@ -276,48 +303,41 @@ pub const Decl = struct {
}
};
+/// This state is attached to every Decl when Module emit_h is non-null.
+pub const EmitH = struct {
+ fwd_decl: std.ArrayListUnmanaged(u8) = .{},
+};
+
/// Fn struct memory is owned by the Decl's TypedValue.Managed arena allocator.
+/// Extern functions do not have this data structure; they are represented by
+/// the `Decl` only, with a `Value` tag of `extern_fn`.
pub const Fn = struct {
- /// This memory owned by the Decl's TypedValue.Managed arena allocator.
- analysis: union(enum) {
- queued: *ZIR,
+ owner_decl: *Decl,
+ /// Contains un-analyzed ZIR instructions generated from Zig source AST.
+ /// Even after we finish analysis, the ZIR is kept in memory, so that
+ /// comptime and inline function calls can happen.
+ zir: zir.Body,
+ /// undefined unless analysis state is `success`.
+ body: Body,
+ state: Analysis,
+
+ pub const Analysis = enum {
+ queued,
+ /// This function intentionally only has ZIR generated because it is marked
+ /// inline, which means no runtime version of the function will be generated.
+ inline_only,
in_progress,
/// There will be a corresponding ErrorMsg in Module.failed_decls
sema_failure,
- /// This Fn might be OK but it depends on another Decl which did not successfully complete
- /// semantic analysis.
+ /// This Fn might be OK but it depends on another Decl which did not
+ /// successfully complete semantic analysis.
dependency_failure,
- success: Body,
- },
- owner_decl: *Decl,
-
- /// This memory is temporary and points to stack memory for the duration
- /// of Fn analysis.
- pub const Analysis = struct {
- inner_block: Scope.Block,
- };
-
- /// Contains un-analyzed ZIR instructions generated from Zig source AST.
- pub const ZIR = struct {
- body: zir.Module.Body,
- arena: std.heap.ArenaAllocator.State,
+ success,
};
/// For debugging purposes.
pub fn dump(self: *Fn, mod: Module) void {
- std.debug.print("Module.Function(name={}) ", .{self.owner_decl.name});
- switch (self.analysis) {
- .queued => {
- std.debug.print("queued\n", .{});
- },
- .in_progress => {
- std.debug.print("in_progress\n", .{});
- },
- else => {
- std.debug.print("\n", .{});
- zir.dumpFn(mod, self);
- },
- }
+ zir.dumpFn(mod, self);
}
};
@@ -343,47 +363,53 @@ pub const Scope = struct {
return @fieldParentPtr(T, "base", base);
}
- /// Asserts the scope has a parent which is a DeclAnalysis and
- /// returns the arena Allocator.
+ /// Returns the arena Allocator associated with the Decl of the Scope.
pub fn arena(self: *Scope) *Allocator {
switch (self.tag) {
.block => return self.cast(Block).?.arena,
- .decl => return &self.cast(DeclAnalysis).?.arena.allocator,
.gen_zir => return self.cast(GenZIR).?.arena,
.local_val => return self.cast(LocalVal).?.gen_zir.arena,
.local_ptr => return self.cast(LocalPtr).?.gen_zir.arena,
- .zir_module => return &self.cast(ZIRModule).?.contents.module.arena.allocator,
.file => unreachable,
.container => unreachable,
}
}
- /// If the scope has a parent which is a `DeclAnalysis`,
- /// returns the `Decl`, otherwise returns `null`.
- pub fn decl(self: *Scope) ?*Decl {
+ pub fn isComptime(self: *Scope) bool {
+ return self.getGenZIR().force_comptime;
+ }
+
+ pub fn ownerDecl(self: *Scope) ?*Decl {
return switch (self.tag) {
- .block => self.cast(Block).?.decl,
+ .block => self.cast(Block).?.owner_decl,
.gen_zir => self.cast(GenZIR).?.decl,
.local_val => self.cast(LocalVal).?.gen_zir.decl,
.local_ptr => self.cast(LocalPtr).?.gen_zir.decl,
- .decl => self.cast(DeclAnalysis).?.decl,
- .zir_module => null,
.file => null,
.container => null,
};
}
- /// Asserts the scope has a parent which is a ZIRModule or Container and
- /// returns it.
- pub fn namespace(self: *Scope) *Scope {
+ pub fn srcDecl(self: *Scope) ?*Decl {
+ return switch (self.tag) {
+ .block => self.cast(Block).?.src_decl,
+ .gen_zir => self.cast(GenZIR).?.decl,
+ .local_val => self.cast(LocalVal).?.gen_zir.decl,
+ .local_ptr => self.cast(LocalPtr).?.gen_zir.decl,
+ .file => null,
+ .container => null,
+ };
+ }
+
+ /// Asserts the scope has a parent which is a Container and returns it.
+ pub fn namespace(self: *Scope) *Container {
switch (self.tag) {
- .block => return self.cast(Block).?.decl.scope,
- .gen_zir => return self.cast(GenZIR).?.decl.scope,
- .local_val => return self.cast(LocalVal).?.gen_zir.decl.scope,
- .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.scope,
- .decl => return self.cast(DeclAnalysis).?.decl.scope,
- .file => return &self.cast(File).?.root_container.base,
- .zir_module, .container => return self,
+ .block => return self.cast(Block).?.owner_decl.container,
+ .gen_zir => return self.cast(GenZIR).?.decl.container,
+ .local_val => return self.cast(LocalVal).?.gen_zir.decl.container,
+ .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.container,
+ .file => return &self.cast(File).?.root_container,
+ .container => return self.cast(Container).?,
}
}
@@ -396,24 +422,20 @@ pub const Scope = struct {
.gen_zir => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
- .decl => unreachable,
.file => unreachable,
- .zir_module => return self.cast(ZIRModule).?.fullyQualifiedNameHash(name),
.container => return self.cast(Container).?.fullyQualifiedNameHash(name),
}
}
/// Asserts the scope is a child of a File and has an AST tree and returns the tree.
- pub fn tree(self: *Scope) *ast.Tree {
+ pub fn tree(self: *Scope) *const ast.Tree {
switch (self.tag) {
- .file => return self.cast(File).?.contents.tree,
- .zir_module => unreachable,
- .decl => return self.cast(DeclAnalysis).?.decl.scope.cast(Container).?.file_scope.contents.tree,
- .block => return self.cast(Block).?.decl.scope.cast(Container).?.file_scope.contents.tree,
- .gen_zir => return self.cast(GenZIR).?.decl.scope.cast(Container).?.file_scope.contents.tree,
- .local_val => return self.cast(LocalVal).?.gen_zir.decl.scope.cast(Container).?.file_scope.contents.tree,
- .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.scope.cast(Container).?.file_scope.contents.tree,
- .container => return self.cast(Container).?.file_scope.contents.tree,
+ .file => return &self.cast(File).?.tree,
+ .block => return &self.cast(Block).?.src_decl.container.file_scope.tree,
+ .gen_zir => return &self.cast(GenZIR).?.decl.container.file_scope.tree,
+ .local_val => return &self.cast(LocalVal).?.gen_zir.decl.container.file_scope.tree,
+ .local_ptr => return &self.cast(LocalPtr).?.gen_zir.decl.container.file_scope.tree,
+ .container => return &self.cast(Container).?.file_scope.tree,
}
}
@@ -424,38 +446,21 @@ pub const Scope = struct {
.gen_zir => self.cast(GenZIR).?,
.local_val => return self.cast(LocalVal).?.gen_zir,
.local_ptr => return self.cast(LocalPtr).?.gen_zir,
- .decl => unreachable,
- .zir_module => unreachable,
.file => unreachable,
.container => unreachable,
};
}
- /// Asserts the scope has a parent which is a ZIRModule, Contaienr or File and
+ /// Asserts the scope has a parent which is a Container or File and
/// returns the sub_file_path field.
pub fn subFilePath(base: *Scope) []const u8 {
switch (base.tag) {
.container => return @fieldParentPtr(Container, "base", base).file_scope.sub_file_path,
.file => return @fieldParentPtr(File, "base", base).sub_file_path,
- .zir_module => return @fieldParentPtr(ZIRModule, "base", base).sub_file_path,
.block => unreachable,
.gen_zir => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
- .decl => unreachable,
- }
- }
-
- pub fn unload(base: *Scope, gpa: *Allocator) void {
- switch (base.tag) {
- .file => return @fieldParentPtr(File, "base", base).unload(gpa),
- .zir_module => return @fieldParentPtr(ZIRModule, "base", base).unload(gpa),
- .block => unreachable,
- .gen_zir => unreachable,
- .local_val => unreachable,
- .local_ptr => unreachable,
- .decl => unreachable,
- .container => unreachable,
}
}
@@ -463,67 +468,28 @@ pub const Scope = struct {
switch (base.tag) {
.container => return @fieldParentPtr(Container, "base", base).file_scope.getSource(module),
.file => return @fieldParentPtr(File, "base", base).getSource(module),
- .zir_module => return @fieldParentPtr(ZIRModule, "base", base).getSource(module),
.gen_zir => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.block => unreachable,
- .decl => unreachable,
}
}
- pub fn getOwnerPkg(base: *Scope) *Package {
+ /// When called from inside a Block Scope, chases the src_decl, not the owner_decl.
+ pub fn getFileScope(base: *Scope) *Scope.File {
var cur = base;
while (true) {
cur = switch (cur.tag) {
- .container => return @fieldParentPtr(Container, "base", cur).file_scope.pkg,
- .file => return @fieldParentPtr(File, "base", cur).pkg,
- .zir_module => unreachable, // TODO are zir modules allowed to import packages?
+ .container => return @fieldParentPtr(Container, "base", cur).file_scope,
+ .file => return @fieldParentPtr(File, "base", cur),
.gen_zir => @fieldParentPtr(GenZIR, "base", cur).parent,
.local_val => @fieldParentPtr(LocalVal, "base", cur).parent,
.local_ptr => @fieldParentPtr(LocalPtr, "base", cur).parent,
- .block => @fieldParentPtr(Block, "base", cur).decl.scope,
- .decl => @fieldParentPtr(DeclAnalysis, "base", cur).decl.scope,
+ .block => return @fieldParentPtr(Block, "base", cur).src_decl.container.file_scope,
};
}
}
- /// Asserts the scope is a namespace Scope and removes the Decl from the namespace.
- pub fn removeDecl(base: *Scope, child: *Decl) void {
- switch (base.tag) {
- .container => return @fieldParentPtr(Container, "base", base).removeDecl(child),
- .zir_module => return @fieldParentPtr(ZIRModule, "base", base).removeDecl(child),
- .file => unreachable,
- .block => unreachable,
- .gen_zir => unreachable,
- .local_val => unreachable,
- .local_ptr => unreachable,
- .decl => unreachable,
- }
- }
-
- /// Asserts the scope is a File or ZIRModule and deinitializes it, then deallocates it.
- pub fn destroy(base: *Scope, gpa: *Allocator) void {
- switch (base.tag) {
- .file => {
- const scope_file = @fieldParentPtr(File, "base", base);
- scope_file.deinit(gpa);
- gpa.destroy(scope_file);
- },
- .zir_module => {
- const scope_zir_module = @fieldParentPtr(ZIRModule, "base", base);
- scope_zir_module.deinit(gpa);
- gpa.destroy(scope_zir_module);
- },
- .block => unreachable,
- .gen_zir => unreachable,
- .local_val => unreachable,
- .local_ptr => unreachable,
- .decl => unreachable,
- .container => unreachable,
- }
- }
-
fn name_hash_hash(x: NameHash) u32 {
return @truncate(u32, @bitCast(u128, x));
}
@@ -533,14 +499,11 @@ pub const Scope = struct {
}
pub const Tag = enum {
- /// .zir source code.
- zir_module,
/// .zig source code.
file,
/// struct, enum or union, every .file contains one of these.
container,
block,
- decl,
gen_zir,
local_val,
local_ptr,
@@ -553,19 +516,19 @@ pub const Scope = struct {
file_scope: *Scope.File,
/// Direct children of the file.
- decls: std.AutoArrayHashMapUnmanaged(*Decl, void),
+ decls: std.AutoArrayHashMapUnmanaged(*Decl, void) = .{},
ty: Type,
pub fn deinit(self: *Container, gpa: *Allocator) void {
self.decls.deinit(gpa);
// TODO either Container of File should have an arena for sub_file_path and ty
- gpa.destroy(self.ty.cast(Type.Payload.EmptyStruct).?);
+ gpa.destroy(self.ty.castTag(.empty_struct).?);
gpa.free(self.file_scope.sub_file_path);
self.* = undefined;
}
pub fn removeDecl(self: *Container, child: *Decl) void {
- _ = self.decls.remove(child);
+ _ = self.decls.swapRemove(child);
}
pub fn fullyQualifiedNameHash(self: *Container, name: []const u8) NameHash {
@@ -577,6 +540,12 @@ pub const Scope = struct {
pub const File = struct {
pub const base_tag: Tag = .file;
base: Scope = Scope{ .tag = base_tag },
+ status: enum {
+ never_loaded,
+ unloaded_success,
+ unloaded_parse_failure,
+ loaded_success,
+ },
/// Relative to the owning package's root_src_dir.
/// Reference to external memory, not owned by File.
@@ -585,16 +554,8 @@ pub const Scope = struct {
unloaded: void,
bytes: [:0]const u8,
},
- contents: union {
- not_available: void,
- tree: *ast.Tree,
- },
- status: enum {
- never_loaded,
- unloaded_success,
- unloaded_parse_failure,
- loaded_success,
- },
+ /// Whether this is populated or not depends on `status`.
+ tree: ast.Tree,
/// Package that this file is a part of, managed externally.
pkg: *Package,
@@ -608,7 +569,7 @@ pub const Scope = struct {
=> {},
.loaded_success => {
- self.contents.tree.deinit();
+ self.tree.deinit(gpa);
self.status = .unloaded_success;
},
}
@@ -627,9 +588,14 @@ pub const Scope = struct {
self.* = undefined;
}
+ pub fn destroy(self: *File, gpa: *Allocator) void {
+ self.deinit(gpa);
+ gpa.destroy(self);
+ }
+
pub fn dumpSrc(self: *File, src: usize) void {
const loc = std.zig.findLineColumn(self.source.bytes, src);
- std.debug.print("{}:{}:{}\n", .{ self.sub_file_path, loc.line + 1, loc.column + 1 });
+ std.debug.print("{s}:{d}:{d}\n", .{ self.sub_file_path, loc.line + 1, loc.column + 1 });
}
pub fn getSource(self: *File, module: *Module) ![:0]const u8 {
@@ -651,137 +617,92 @@ pub const Scope = struct {
}
};
- pub const ZIRModule = struct {
- pub const base_tag: Tag = .zir_module;
- base: Scope = Scope{ .tag = base_tag },
- /// Relative to the owning package's root_src_dir.
- /// Reference to external memory, not owned by ZIRModule.
- sub_file_path: []const u8,
- source: union(enum) {
- unloaded: void,
- bytes: [:0]const u8,
- },
- contents: union {
- not_available: void,
- module: *zir.Module,
- },
- status: enum {
- never_loaded,
- unloaded_success,
- unloaded_parse_failure,
- unloaded_sema_failure,
-
- loaded_sema_failure,
- loaded_success,
- },
-
- /// Even though .zir files only have 1 module, this set is still needed
- /// because of anonymous Decls, which can exist in the global set, but
- /// not this one.
- decls: ArrayListUnmanaged(*Decl),
-
- pub fn unload(self: *ZIRModule, gpa: *Allocator) void {
- switch (self.status) {
- .never_loaded,
- .unloaded_parse_failure,
- .unloaded_sema_failure,
- .unloaded_success,
- => {},
-
- .loaded_success => {
- self.contents.module.deinit(gpa);
- gpa.destroy(self.contents.module);
- self.contents = .{ .not_available = {} };
- self.status = .unloaded_success;
- },
- .loaded_sema_failure => {
- self.contents.module.deinit(gpa);
- gpa.destroy(self.contents.module);
- self.contents = .{ .not_available = {} };
- self.status = .unloaded_sema_failure;
- },
- }
- switch (self.source) {
- .bytes => |bytes| {
- gpa.free(bytes);
- self.source = .{ .unloaded = {} };
- },
- .unloaded => {},
- }
- }
-
- pub fn deinit(self: *ZIRModule, gpa: *Allocator) void {
- self.decls.deinit(gpa);
- self.unload(gpa);
- self.* = undefined;
- }
-
- pub fn removeDecl(self: *ZIRModule, child: *Decl) void {
- for (self.decls.items) |item, i| {
- if (item == child) {
- _ = self.decls.swapRemove(i);
- return;
- }
- }
- }
-
- pub fn dumpSrc(self: *ZIRModule, src: usize) void {
- const loc = std.zig.findLineColumn(self.source.bytes, src);
- std.debug.print("{}:{}:{}\n", .{ self.sub_file_path, loc.line + 1, loc.column + 1 });
- }
-
- pub fn getSource(self: *ZIRModule, module: *Module) ![:0]const u8 {
- switch (self.source) {
- .unloaded => {
- const source = try module.root_pkg.root_src_directory.handle.readFileAllocOptions(
- module.gpa,
- self.sub_file_path,
- std.math.maxInt(u32),
- null,
- 1,
- 0,
- );
- self.source = .{ .bytes = source };
- return source;
- },
- .bytes => |bytes| return bytes,
- }
- }
-
- pub fn fullyQualifiedNameHash(self: *ZIRModule, name: []const u8) NameHash {
- // ZIR modules only have 1 file with all decls global in the same namespace.
- return std.zig.hashSrc(name);
- }
- };
-
/// This is a temporary structure, references to it are valid only
/// during semantic analysis of the block.
pub const Block = struct {
pub const base_tag: Tag = .block;
+
base: Scope = Scope{ .tag = base_tag },
parent: ?*Block,
+ /// Maps ZIR to TZIR. Shared to sub-blocks.
+ inst_table: *InstTable,
func: ?*Fn,
- decl: *Decl,
+ /// When analyzing an inline function call, owner_decl is the Decl of the caller
+ /// and src_decl is the Decl of the callee.
+ /// This Decl owns the arena memory of this Block.
+ owner_decl: *Decl,
+ /// This Decl is the Decl according to the Zig source code corresponding to this Block.
+ src_decl: *Decl,
instructions: ArrayListUnmanaged(*Inst),
- /// Points to the arena allocator of DeclAnalysis
+ /// Points to the arena allocator of the Decl.
arena: *Allocator,
label: ?Label = null,
+ inlining: ?*Inlining,
is_comptime: bool,
+ /// Shared to sub-blocks.
+ branch_quota: *u32,
+
+ pub const InstTable = std.AutoHashMap(*zir.Inst, *Inst);
+ /// This `Block` maps a block ZIR instruction to the corresponding
+ /// TZIR instruction for break instruction analysis.
pub const Label = struct {
zir_block: *zir.Inst.Block,
- results: ArrayListUnmanaged(*Inst),
+ merges: Merges,
+ };
+
+ /// This `Block` indicates that an inline function call is happening
+ /// and return instructions should be analyzed as a break instruction
+ /// to this TZIR block instruction.
+ /// It is shared among all the blocks in an inline or comptime called
+ /// function.
+ pub const Inlining = struct {
+ /// Shared state among the entire inline/comptime call stack.
+ shared: *Shared,
+ /// We use this to count from 0 so that arg instructions know
+ /// which parameter index they are, without having to store
+ /// a parameter index with each arg instruction.
+ param_index: usize,
+ casted_args: []*Inst,
+ merges: Merges,
+
+ pub const Shared = struct {
+ caller: ?*Fn,
+ branch_count: u32,
+ };
+ };
+
+ pub const Merges = struct {
block_inst: *Inst.Block,
+ /// Separate array list from break_inst_list so that it can be passed directly
+ /// to resolvePeerTypes.
+ results: ArrayListUnmanaged(*Inst),
+ /// Keeps track of the break instructions so that the operand can be replaced
+ /// if we need to add type coercion at the end of block analysis.
+ /// Same indexes, capacity, length as `results`.
+ br_list: ArrayListUnmanaged(*Inst.Br),
};
- };
- /// This is a temporary structure, references to it are valid only
- /// during semantic analysis of the decl.
- pub const DeclAnalysis = struct {
- pub const base_tag: Tag = .decl;
- base: Scope = Scope{ .tag = base_tag },
- decl: *Decl,
- arena: std.heap.ArenaAllocator,
+ /// For debugging purposes.
+ pub fn dump(self: *Block, mod: Module) void {
+ zir.dumpBlock(mod, self);
+ }
+
+ pub fn makeSubBlock(parent: *Block) Block {
+ return .{
+ .parent = parent,
+ .inst_table = parent.inst_table,
+ .func = parent.func,
+ .owner_decl = parent.owner_decl,
+ .src_decl = parent.src_decl,
+ .instructions = .{},
+ .arena = parent.arena,
+ .label = null,
+ .inlining = parent.inlining,
+ .is_comptime = parent.is_comptime,
+ .branch_quota = parent.branch_quota,
+ };
+ }
};
/// This is a temporary structure, references to it are valid only
@@ -789,18 +710,41 @@ pub const Scope = struct {
pub const GenZIR = struct {
pub const base_tag: Tag = .gen_zir;
base: Scope = Scope{ .tag = base_tag },
- /// Parents can be: `GenZIR`, `ZIRModule`, `File`
+ /// Parents can be: `GenZIR`, `File`
parent: *Scope,
decl: *Decl,
arena: *Allocator,
+ force_comptime: bool,
/// The first N instructions in a function body ZIR are arg instructions.
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
label: ?Label = null,
+ break_block: ?*zir.Inst.Block = null,
+ continue_block: ?*zir.Inst.Block = null,
+ /// Only valid when setBlockResultLoc is called.
+ break_result_loc: astgen.ResultLoc = undefined,
+ /// When a block has a pointer result location, here it is.
+ rl_ptr: ?*zir.Inst = null,
+ /// Keeps track of how many branches of a block did not actually
+ /// consume the result location. astgen uses this to figure out
+ /// whether to rely on break instructions or writing to the result
+ /// pointer for the result instruction.
+ rvalue_rl_count: usize = 0,
+ /// Keeps track of how many break instructions there are. When astgen is finished
+ /// with a block, it can check this against rvalue_rl_count to find out whether
+ /// the break instructions should be downgraded to break_void.
+ break_count: usize = 0,
+ /// Tracks `break :foo bar` instructions so they can possibly be elided later if
+ /// the labeled block ends up not needing a result location pointer.
+ labeled_breaks: std.ArrayListUnmanaged(*zir.Inst.Break) = .{},
+ /// Tracks `store_to_block_ptr` instructions that correspond to break instructions
+ /// so they can possibly be elided later if the labeled block ends up not needing
+ /// a result location pointer.
+ labeled_store_to_block_ptr_list: std.ArrayListUnmanaged(*zir.Inst.BinOp) = .{},
pub const Label = struct {
token: ast.TokenIndex,
block_inst: *zir.Inst.Block,
- result_loc: astgen.ResultLoc,
+ used: bool = false,
};
};
@@ -831,17 +775,79 @@ pub const Scope = struct {
};
};
+/// This struct holds data necessary to construct API-facing `AllErrors.Message`.
+/// Its memory is managed with the general purpose allocator so that they
+/// can be created and destroyed in response to incremental updates.
+/// In some cases, the Scope.File could have been inferred from where the ErrorMsg
+/// is stored. For example, if it is stored in Module.failed_decls, then the Scope.File
+/// would be determined by the Decl Scope. However, the data structure contains the field
+/// anyway so that `ErrorMsg` can be reused for error notes, which may be in a different
+/// file than the parent error message. It also simplifies processing of error messages.
+pub const ErrorMsg = struct {
+ src_loc: SrcLoc,
+ msg: []const u8,
+ notes: []ErrorMsg = &.{},
+
+ pub fn create(
+ gpa: *Allocator,
+ src_loc: SrcLoc,
+ comptime format: []const u8,
+ args: anytype,
+ ) !*ErrorMsg {
+ const self = try gpa.create(ErrorMsg);
+ errdefer gpa.destroy(self);
+ self.* = try init(gpa, src_loc, format, args);
+ return self;
+ }
+
+ /// Assumes the ErrorMsg struct and msg were both allocated with `gpa`,
+ /// as well as all notes.
+ pub fn destroy(self: *ErrorMsg, gpa: *Allocator) void {
+ self.deinit(gpa);
+ gpa.destroy(self);
+ }
+
+ pub fn init(
+ gpa: *Allocator,
+ src_loc: SrcLoc,
+ comptime format: []const u8,
+ args: anytype,
+ ) !ErrorMsg {
+ return ErrorMsg{
+ .src_loc = src_loc,
+ .msg = try std.fmt.allocPrint(gpa, format, args),
+ };
+ }
+
+ pub fn deinit(self: *ErrorMsg, gpa: *Allocator) void {
+ for (self.notes) |*note| {
+ note.deinit(gpa);
+ }
+ gpa.free(self.notes);
+ gpa.free(self.msg);
+ self.* = undefined;
+ }
+};
+
+/// Canonical reference to a position within a source file.
+pub const SrcLoc = struct {
+ file_scope: *Scope.File,
+ byte_offset: usize,
+};
+
pub const InnerError = error{ OutOfMemory, AnalysisFail };
pub fn deinit(self: *Module) void {
const gpa = self.gpa;
+ self.compile_log_text.deinit(gpa);
+
self.zig_cache_artifact_directory.handle.close();
self.deletion_set.deinit(gpa);
for (self.decl_table.items()) |entry| {
- entry.value.destroy(gpa);
+ entry.value.destroy(self);
}
self.decl_table.deinit(gpa);
@@ -850,6 +856,11 @@ pub fn deinit(self: *Module) void {
}
self.failed_decls.deinit(gpa);
+ for (self.emit_h_failed_decls.items()) |entry| {
+ entry.value.destroy(gpa);
+ }
+ self.emit_h_failed_decls.deinit(gpa);
+
for (self.failed_files.items()) |entry| {
entry.value.destroy(gpa);
}
@@ -860,6 +871,8 @@ pub fn deinit(self: *Module) void {
}
self.failed_exports.deinit(gpa);
+ self.compile_log_decls.deinit(gpa);
+
for (self.decl_exports.items()) |entry| {
const export_list = entry.value;
gpa.free(export_list);
@@ -881,7 +894,7 @@ pub fn deinit(self: *Module) void {
self.global_error_set.deinit(gpa);
for (self.import_table.items()) |entry| {
- entry.value.base.destroy(gpa);
+ entry.value.destroy(gpa);
}
self.import_table.deinit(gpa);
}
@@ -894,7 +907,7 @@ fn freeExportList(gpa: *Allocator, export_list: []*Export) void {
gpa.free(export_list);
}
-pub fn ensureDeclAnalyzed(self: *Module, decl: *Decl) InnerError!void {
+pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void {
const tracy = trace(@src());
defer tracy.end();
@@ -911,11 +924,11 @@ pub fn ensureDeclAnalyzed(self: *Module, decl: *Decl) InnerError!void {
.complete => return,
.outdated => blk: {
- log.debug("re-analyzing {}\n", .{decl.name});
+ log.debug("re-analyzing {s}", .{decl.name});
// The exports this Decl performs will be re-discovered, so we remove them here
// prior to re-analysis.
- self.deleteDeclExports(decl);
+ mod.deleteDeclExports(decl);
// Dependencies will be re-discovered, so we remove them here prior to re-analysis.
for (decl.dependencies.items()) |entry| {
const dep = entry.key;
@@ -924,7 +937,7 @@ pub fn ensureDeclAnalyzed(self: *Module, decl: *Decl) InnerError!void {
// We don't perform a deletion here, because this Decl or another one
// may end up referencing it before the update is complete.
dep.deletion_flag = true;
- try self.deletion_set.append(self.gpa, dep);
+ try mod.deletion_set.append(mod.gpa, dep);
}
}
decl.dependencies.clearRetainingCapacity();
@@ -935,24 +948,21 @@ pub fn ensureDeclAnalyzed(self: *Module, decl: *Decl) InnerError!void {
.unreferenced => false,
};
- const type_changed = if (self.root_scope.cast(Scope.ZIRModule)) |zir_module|
- try zir_sema.analyzeZirDecl(self, decl, zir_module.contents.module.decls[decl.src_index])
- else
- self.astGenAndAnalyzeDecl(decl) catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.AnalysisFail => return error.AnalysisFail,
- else => {
- try self.failed_decls.ensureCapacity(self.gpa, self.failed_decls.items().len + 1);
- self.failed_decls.putAssumeCapacityNoClobber(decl, try Compilation.ErrorMsg.create(
- self.gpa,
- decl.src(),
- "unable to analyze: {}",
- .{@errorName(err)},
- ));
- decl.analysis = .sema_failure_retryable;
- return error.AnalysisFail;
- },
- };
+ const type_changed = mod.astgenAndSemaDecl(decl) catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.AnalysisFail => return error.AnalysisFail,
+ else => {
+ decl.analysis = .sema_failure_retryable;
+ try mod.failed_decls.ensureCapacity(mod.gpa, mod.failed_decls.items().len + 1);
+ mod.failed_decls.putAssumeCapacityNoClobber(decl, try ErrorMsg.create(
+ mod.gpa,
+ decl.srcLoc(),
+ "unable to analyze: {s}",
+ .{@errorName(err)},
+ ));
+ return error.AnalysisFail;
+ },
+ };
if (subsequent_analysis) {
// We may need to chase the dependants and re-analyze them.
@@ -971,8 +981,8 @@ pub fn ensureDeclAnalyzed(self: *Module, decl: *Decl) InnerError!void {
.codegen_failure,
.codegen_failure_retryable,
.complete,
- => if (dep.generation != self.generation) {
- try self.markOutdatedDecl(dep);
+ => if (dep.generation != mod.generation) {
+ try mod.markOutdatedDecl(dep);
},
}
}
@@ -980,495 +990,717 @@ pub fn ensureDeclAnalyzed(self: *Module, decl: *Decl) InnerError!void {
}
}
-fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
+/// Returns `true` if the Decl type changed.
+/// Returns `true` if this is the first time analyzing the Decl.
+/// Returns `false` otherwise.
+fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
const tracy = trace(@src());
defer tracy.end();
- const container_scope = decl.scope.cast(Scope.Container).?;
- const tree = try self.getAstTree(container_scope);
- const ast_node = tree.root_node.decls()[decl.src_index];
- switch (ast_node.tag) {
- .FnProto => {
- const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", ast_node);
+ const tree = try mod.getAstTree(decl.container.file_scope);
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const decl_node = tree.rootDecls()[decl.src_index];
+ switch (node_tags[decl_node]) {
+ .fn_decl => {
+ const fn_proto = node_datas[decl_node].lhs;
+ const body = node_datas[decl_node].rhs;
+ switch (node_tags[fn_proto]) {
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoSimple(&params, fn_proto));
+ },
+ .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoMulti(fn_proto)),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoOne(&params, fn_proto));
+ },
+ .fn_proto => return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProto(fn_proto)),
+ else => unreachable,
+ }
+ },
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoSimple(&params, decl_node));
+ },
+ .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoMulti(decl_node)),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoOne(&params, decl_node));
+ },
+ .fn_proto => return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProto(decl_node)),
+
+ .global_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.globalVarDecl(decl_node)),
+ .local_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.localVarDecl(decl_node)),
+ .simple_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.simpleVarDecl(decl_node)),
+ .aligned_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.alignedVarDecl(decl_node)),
+ .@"comptime" => {
decl.analysis = .in_progress;
- // This arena allocator's memory is discarded at the end of this function. It is used
- // to determine the type of the function, and hence the type of the decl, which is needed
- // to complete the Decl analysis.
- var fn_type_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer fn_type_scope_arena.deinit();
- var fn_type_scope: Scope.GenZIR = .{
+ // A comptime decl does not store any value so we can just deinit this arena after analysis is done.
+ var analysis_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer analysis_arena.deinit();
+ var gen_scope: Scope.GenZIR = .{
.decl = decl,
- .arena = &fn_type_scope_arena.allocator,
- .parent = decl.scope,
+ .arena = &analysis_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = true,
};
- defer fn_type_scope.instructions.deinit(self.gpa);
+ defer gen_scope.instructions.deinit(mod.gpa);
- decl.is_pub = fn_proto.getVisibToken() != null;
- const body_node = fn_proto.getBodyNode() orelse
- return self.failTok(&fn_type_scope.base, fn_proto.fn_token, "TODO implement extern functions", .{});
+ const block_expr = node_datas[decl_node].lhs;
+ _ = try astgen.comptimeExpr(mod, &gen_scope.base, .none, block_expr);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
+ }
- const param_decls = fn_proto.params();
- const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_decls.len);
+ var inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer inst_table.deinit();
- const fn_src = tree.token_locs[fn_proto.fn_token].start;
- const type_type = try astgen.addZIRInstConst(self, &fn_type_scope.base, fn_src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.type_type),
+ var branch_quota: u32 = default_eval_branch_quota;
+
+ var block_scope: Scope.Block = .{
+ .parent = null,
+ .inst_table = &inst_table,
+ .func = null,
+ .owner_decl = decl,
+ .src_decl = decl,
+ .instructions = .{},
+ .arena = &analysis_arena.allocator,
+ .inlining = null,
+ .is_comptime = true,
+ .branch_quota = &branch_quota,
+ };
+ defer block_scope.instructions.deinit(mod.gpa);
+
+ _ = try zir_sema.analyzeBody(mod, &block_scope, .{
+ .instructions = gen_scope.instructions.items,
});
- const type_type_rl: astgen.ResultLoc = .{ .ty = type_type };
- for (param_decls) |param_decl, i| {
- const param_type_node = switch (param_decl.param_type) {
- .any_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
- .type_expr => |node| node,
- };
- param_types[i] = try astgen.expr(self, &fn_type_scope.base, type_type_rl, param_type_node);
- }
- if (fn_proto.getVarArgsToken()) |var_args_token| {
- return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{});
- }
- if (fn_proto.getLibName()) |lib_name| {
- return self.failNode(&fn_type_scope.base, lib_name, "TODO implement function library name", .{});
- }
- if (fn_proto.getAlignExpr()) |align_expr| {
- return self.failNode(&fn_type_scope.base, align_expr, "TODO implement function align expression", .{});
+
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
+ return true;
+ },
+ .@"usingnamespace" => @panic("TODO usingnamespace decl"),
+ else => unreachable,
+ }
+}
+
+fn astgenAndSemaFn(
+ mod: *Module,
+ decl: *Decl,
+ tree: ast.Tree,
+ body_node: ast.Node.Index,
+ fn_proto: ast.full.FnProto,
+) !bool {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ decl.analysis = .in_progress;
+
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ // This arena allocator's memory is discarded at the end of this function. It is used
+ // to determine the type of the function, and hence the type of the decl, which is needed
+ // to complete the Decl analysis.
+ var fn_type_scope_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer fn_type_scope_arena.deinit();
+ var fn_type_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &fn_type_scope_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = true,
+ };
+ defer fn_type_scope.instructions.deinit(mod.gpa);
+
+ decl.is_pub = fn_proto.visib_token != null;
+
+ // The AST params array does not contain anytype and ... parameters.
+ // We must iterate to count how many param types to allocate.
+ const param_count = blk: {
+ var count: usize = 0;
+ var it = fn_proto.iterate(tree);
+ while (it.next()) |_| {
+ count += 1;
+ }
+ break :blk count;
+ };
+ const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_count);
+ const fn_src = token_starts[fn_proto.ast.fn_token];
+ const type_type = try astgen.addZIRInstConst(mod, &fn_type_scope.base, fn_src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.type_type),
+ });
+ const type_type_rl: astgen.ResultLoc = .{ .ty = type_type };
+
+ {
+ var param_type_i: usize = 0;
+ var it = fn_proto.iterate(tree);
+ while (it.next()) |param| : (param_type_i += 1) {
+ if (param.anytype_ellipsis3) |token| {
+ switch (token_tags[token]) {
+ .keyword_anytype => return mod.failTok(
+ &fn_type_scope.base,
+ token,
+ "TODO implement anytype parameter",
+ .{},
+ ),
+ .ellipsis3 => return mod.failTok(
+ &fn_type_scope.base,
+ token,
+ "TODO implement var args",
+ .{},
+ ),
+ else => unreachable,
+ }
}
- if (fn_proto.getSectionExpr()) |sect_expr| {
- return self.failNode(&fn_type_scope.base, sect_expr, "TODO implement function section expression", .{});
+ const param_type_node = param.type_expr;
+ assert(param_type_node != 0);
+ param_types[param_type_i] =
+ try astgen.expr(mod, &fn_type_scope.base, type_type_rl, param_type_node);
+ }
+ assert(param_type_i == param_count);
+ }
+ if (fn_proto.lib_name) |lib_name_token| blk: {
+ // TODO call std.zig.parseStringLiteral
+ const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name_token), "\"");
+ log.debug("extern fn symbol expected in lib '{s}'", .{lib_name_str});
+ const target = mod.comp.getTarget();
+ if (target_util.is_libc_lib_name(target, lib_name_str)) {
+ if (!mod.comp.bin_file.options.link_libc) {
+ return mod.failTok(
+ &fn_type_scope.base,
+ lib_name_token,
+ "dependency on libc must be explicitly specified in the build command",
+ .{},
+ );
}
- if (fn_proto.getCallconvExpr()) |callconv_expr| {
- return self.failNode(
+ break :blk;
+ }
+ if (target_util.is_libcpp_lib_name(target, lib_name_str)) {
+ if (!mod.comp.bin_file.options.link_libcpp) {
+ return mod.failTok(
&fn_type_scope.base,
- callconv_expr,
- "TODO implement function calling convention expression",
+ lib_name_token,
+ "dependency on libc++ must be explicitly specified in the build command",
.{},
);
}
- const return_type_expr = switch (fn_proto.return_type) {
- .Explicit => |node| node,
- .InferErrorSet => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement inferred error sets", .{}),
- .Invalid => |tok| return self.failTok(&fn_type_scope.base, tok, "unable to parse return type", .{}),
- };
-
- const return_type_inst = try astgen.expr(self, &fn_type_scope.base, type_type_rl, return_type_expr);
- const fn_type_inst = try astgen.addZIRInst(self, &fn_type_scope.base, fn_src, zir.Inst.FnType, .{
- .return_type = return_type_inst,
- .param_types = param_types,
- }, .{});
+ break :blk;
+ }
+ if (!target.isWasm() and !mod.comp.bin_file.options.pic) {
+ return mod.failTok(
+ &fn_type_scope.base,
+ lib_name_token,
+ "dependency on dynamic library '{s}' requires enabling Position Independent Code. Fixed by `-l{s}` or `-fPIC`.",
+ .{ lib_name_str, lib_name_str },
+ );
+ }
+ mod.comp.stage1AddLinkLib(lib_name_str) catch |err| {
+ return mod.failTok(
+ &fn_type_scope.base,
+ lib_name_token,
+ "unable to add link lib '{s}': {s}",
+ .{ lib_name_str, @errorName(err) },
+ );
+ };
+ }
+ if (fn_proto.ast.align_expr != 0) {
+ return mod.failNode(
+ &fn_type_scope.base,
+ fn_proto.ast.align_expr,
+ "TODO implement function align expression",
+ .{},
+ );
+ }
+ if (fn_proto.ast.section_expr != 0) {
+ return mod.failNode(
+ &fn_type_scope.base,
+ fn_proto.ast.section_expr,
+ "TODO implement function section expression",
+ .{},
+ );
+ }
- if (self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
- }
+ const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
+ if (token_tags[maybe_bang] == .bang) {
+ return mod.failTok(&fn_type_scope.base, maybe_bang, "TODO implement inferred error sets", .{});
+ }
+ const return_type_inst = try astgen.expr(
+ mod,
+ &fn_type_scope.base,
+ type_type_rl,
+ fn_proto.ast.return_type,
+ );
+ const fn_type_inst = if (fn_proto.ast.callconv_expr != 0) cc: {
+ // TODO instead of enum literal type, this needs to be the
+ // std.builtin.CallingConvention enum. We need to implement importing other files
+ // and enums in order to fix this.
+ const src = token_starts[tree.firstToken(fn_proto.ast.callconv_expr)];
+ const enum_lit_ty = try astgen.addZIRInstConst(mod, &fn_type_scope.base, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.enum_literal_type),
+ });
+ const cc = try astgen.comptimeExpr(mod, &fn_type_scope.base, .{
+ .ty = enum_lit_ty,
+ }, fn_proto.ast.callconv_expr);
+ break :cc try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type_cc, .{
+ .return_type = return_type_inst,
+ .param_types = param_types,
+ .cc = cc,
+ });
+ } else try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type, .{
+ .return_type = return_type_inst,
+ .param_types = param_types,
+ });
- // We need the memory for the Type to go into the arena for the Decl
- var decl_arena = std.heap.ArenaAllocator.init(self.gpa);
- errdefer decl_arena.deinit();
- const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
+ }
- var block_scope: Scope.Block = .{
- .parent = null,
- .func = null,
- .decl = decl,
- .instructions = .{},
- .arena = &decl_arena.allocator,
- .is_comptime = false,
- };
- defer block_scope.instructions.deinit(self.gpa);
+ // We need the memory for the Type to go into the arena for the Decl
+ var decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ errdefer decl_arena.deinit();
+ const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
- const fn_type = try zir_sema.analyzeBodyValueAsType(self, &block_scope, fn_type_inst, .{
- .instructions = fn_type_scope.instructions.items,
- });
- const new_func = try decl_arena.allocator.create(Fn);
- const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
-
- const fn_zir = blk: {
- // This scope's arena memory is discarded after the ZIR generation
- // pass completes, and semantic analysis of it completes.
- var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- errdefer gen_scope_arena.deinit();
- var gen_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &gen_scope_arena.allocator,
- .parent = decl.scope,
- };
- defer gen_scope.instructions.deinit(self.gpa);
-
- // We need an instruction for each parameter, and they must be first in the body.
- try gen_scope.instructions.resize(self.gpa, fn_proto.params_len);
- var params_scope = &gen_scope.base;
- for (fn_proto.params()) |param, i| {
- const name_token = param.name_token.?;
- const src = tree.token_locs[name_token].start;
- const param_name = tree.tokenSlice(name_token); // TODO: call identifierTokenString
- const arg = try gen_scope_arena.allocator.create(zir.Inst.Arg);
- arg.* = .{
- .base = .{
- .tag = .arg,
- .src = src,
- },
- .positionals = .{
- .name = param_name,
- },
- .kw_args = .{},
- };
- gen_scope.instructions.items[i] = &arg.base;
- const sub_scope = try gen_scope_arena.allocator.create(Scope.LocalVal);
- sub_scope.* = .{
- .parent = params_scope,
- .gen_zir = &gen_scope,
- .name = param_name,
- .inst = &arg.base,
- };
- params_scope = &sub_scope.base;
- }
+ var inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer inst_table.deinit();
- const body_block = body_node.cast(ast.Node.Block).?;
+ var branch_quota: u32 = default_eval_branch_quota;
- try astgen.blockExpr(self, params_scope, body_block);
+ var block_scope: Scope.Block = .{
+ .parent = null,
+ .inst_table = &inst_table,
+ .func = null,
+ .owner_decl = decl,
+ .src_decl = decl,
+ .instructions = .{},
+ .arena = &decl_arena.allocator,
+ .inlining = null,
+ .is_comptime = false,
+ .branch_quota = &branch_quota,
+ };
+ defer block_scope.instructions.deinit(mod.gpa);
- if (gen_scope.instructions.items.len == 0 or
- !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
- {
- const src = tree.token_locs[body_block.rbrace].start;
- _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid);
- }
+ const fn_type = try zir_sema.analyzeBodyValueAsType(mod, &block_scope, fn_type_inst, .{
+ .instructions = fn_type_scope.instructions.items,
+ });
+ if (body_node == 0) {
+ // Extern function.
+ var type_changed = true;
+ if (decl.typedValueManaged()) |tvm| {
+ type_changed = !tvm.typed_value.ty.eql(fn_type);
- if (self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
- }
+ tvm.deinit(mod.gpa);
+ }
+ const fn_val = try Value.Tag.extern_fn.create(&decl_arena.allocator, decl);
- const fn_zir = try gen_scope_arena.allocator.create(Fn.ZIR);
- fn_zir.* = .{
- .body = .{
- .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
- },
- .arena = gen_scope_arena.state,
- };
- break :blk fn_zir;
- };
+ decl_arena_state.* = decl_arena.state;
+ decl.typed_value = .{
+ .most_recent = .{
+ .typed_value = .{ .ty = fn_type, .val = fn_val },
+ .arena = decl_arena_state,
+ },
+ };
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
- new_func.* = .{
- .analysis = .{ .queued = fn_zir },
- .owner_decl = decl,
- };
- fn_payload.* = .{ .func = new_func };
+ try mod.comp.bin_file.allocateDeclIndexes(decl);
+ try mod.comp.work_queue.writeItem(.{ .codegen_decl = decl });
- var prev_type_has_bits = false;
- var type_changed = true;
+ if (type_changed and mod.emit_h != null) {
+ try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl });
+ }
- if (decl.typedValueManaged()) |tvm| {
- prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits();
- type_changed = !tvm.typed_value.ty.eql(fn_type);
+ return type_changed;
+ }
- tvm.deinit(self.gpa);
- }
+ const new_func = try decl_arena.allocator.create(Fn);
+ const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
- decl_arena_state.* = decl_arena.state;
- decl.typed_value = .{
- .most_recent = .{
- .typed_value = .{
- .ty = fn_type,
- .val = Value.initPayload(&fn_payload.base),
- },
- .arena = decl_arena_state,
+ const fn_zir: zir.Body = blk: {
+ // We put the ZIR inside the Decl arena.
+ var gen_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &decl_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = false,
+ };
+ defer gen_scope.instructions.deinit(mod.gpa);
+
+ // We need an instruction for each parameter, and they must be first in the body.
+ try gen_scope.instructions.resize(mod.gpa, param_count);
+ var params_scope = &gen_scope.base;
+ var i: usize = 0;
+ var it = fn_proto.iterate(tree);
+ while (it.next()) |param| : (i += 1) {
+ const name_token = param.name_token.?;
+ const src = token_starts[name_token];
+ const param_name = try mod.identifierTokenString(&gen_scope.base, name_token);
+ const arg = try decl_arena.allocator.create(zir.Inst.Arg);
+ arg.* = .{
+ .base = .{
+ .tag = .arg,
+ .src = src,
+ },
+ .positionals = .{
+ .name = param_name,
},
+ .kw_args = .{},
};
- decl.analysis = .complete;
- decl.generation = self.generation;
-
- if (fn_type.hasCodeGenBits()) {
- // We don't fully codegen the decl until later, but we do need to reserve a global
- // offset table index for it. This allows us to codegen decls out of dependency order,
- // increasing how many computations can be done in parallel.
- try self.comp.bin_file.allocateDeclIndexes(decl);
- try self.comp.work_queue.writeItem(.{ .codegen_decl = decl });
- } else if (prev_type_has_bits) {
- self.comp.bin_file.freeDecl(decl);
- }
+ gen_scope.instructions.items[i] = &arg.base;
+ const sub_scope = try decl_arena.allocator.create(Scope.LocalVal);
+ sub_scope.* = .{
+ .parent = params_scope,
+ .gen_zir = &gen_scope,
+ .name = param_name,
+ .inst = &arg.base,
+ };
+ params_scope = &sub_scope.base;
+ }
- if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- const export_src = tree.token_locs[maybe_export_token].start;
- const name_loc = tree.token_locs[fn_proto.getNameToken().?];
- const name = tree.tokenSliceLoc(name_loc);
- // The scope needs to have the decl in it.
- try self.analyzeExport(&block_scope.base, export_src, name, decl);
- }
- }
- return type_changed;
- },
- .VarDecl => {
- const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", ast_node);
+ _ = try astgen.expr(mod, params_scope, .none, body_node);
- decl.analysis = .in_progress;
+ if (gen_scope.instructions.items.len == 0 or
+ !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
+ {
+ const src = token_starts[tree.lastToken(body_node)];
+ _ = try astgen.addZIRNoOp(mod, &gen_scope.base, src, .return_void);
+ }
- // We need the memory for the Type to go into the arena for the Decl
- var decl_arena = std.heap.ArenaAllocator.init(self.gpa);
- errdefer decl_arena.deinit();
- const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
+ }
- var block_scope: Scope.Block = .{
- .parent = null,
- .func = null,
- .decl = decl,
- .instructions = .{},
- .arena = &decl_arena.allocator,
- .is_comptime = true,
- };
- defer block_scope.instructions.deinit(self.gpa);
-
- decl.is_pub = var_decl.getVisibToken() != null;
- const is_extern = blk: {
- const maybe_extern_token = var_decl.getExternExportToken() orelse
- break :blk false;
- if (tree.token_ids[maybe_extern_token] != .Keyword_extern) break :blk false;
- if (var_decl.getInitNode()) |some| {
- return self.failNode(&block_scope.base, some, "extern variables have no initializers", .{});
- }
- break :blk true;
- };
- if (var_decl.getLibName()) |lib_name| {
- assert(is_extern);
- return self.failNode(&block_scope.base, lib_name, "TODO implement function library name", .{});
- }
- const is_mutable = tree.token_ids[var_decl.mut_token] == .Keyword_var;
- const is_threadlocal = if (var_decl.getThreadLocalToken()) |some| blk: {
- if (!is_mutable) {
- return self.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{});
- }
- break :blk true;
- } else false;
- assert(var_decl.getComptimeToken() == null);
- if (var_decl.getAlignNode()) |align_expr| {
- return self.failNode(&block_scope.base, align_expr, "TODO implement function align expression", .{});
- }
- if (var_decl.getSectionNode()) |sect_expr| {
- return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{});
- }
+ break :blk .{
+ .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
+ };
+ };
- const var_info: struct { ty: Type, val: ?Value } = if (var_decl.getInitNode()) |init_node| vi: {
- var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer gen_scope_arena.deinit();
- var gen_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &gen_scope_arena.allocator,
- .parent = decl.scope,
- };
- defer gen_scope.instructions.deinit(self.gpa);
+ const is_inline = fn_type.fnCallingConvention() == .Inline;
+ const anal_state: Fn.Analysis = if (is_inline) .inline_only else .queued;
- const init_result_loc: astgen.ResultLoc = if (var_decl.getTypeNode()) |type_node| rl: {
- const src = tree.token_locs[type_node.firstToken()].start;
- const type_type = try astgen.addZIRInstConst(self, &gen_scope.base, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.type_type),
- });
- const var_type = try astgen.expr(self, &gen_scope.base, .{ .ty = type_type }, type_node);
- break :rl .{ .ty = var_type };
- } else .none;
-
- const src = tree.token_locs[init_node.firstToken()].start;
- const init_inst = try astgen.expr(self, &gen_scope.base, init_result_loc, init_node);
- if (self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {};
- }
+ new_func.* = .{
+ .state = anal_state,
+ .zir = fn_zir,
+ .body = undefined,
+ .owner_decl = decl,
+ };
+ fn_payload.* = .{
+ .base = .{ .tag = .function },
+ .data = new_func,
+ };
- var inner_block: Scope.Block = .{
- .parent = null,
- .func = null,
- .decl = decl,
- .instructions = .{},
- .arena = &gen_scope_arena.allocator,
- .is_comptime = true,
- };
- defer inner_block.instructions.deinit(self.gpa);
- try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items });
-
- // The result location guarantees the type coercion.
- const analyzed_init_inst = init_inst.analyzed_inst.?;
- // The is_comptime in the Scope.Block guarantees the result is comptime-known.
- const val = analyzed_init_inst.value().?;
-
- const ty = try analyzed_init_inst.ty.copy(block_scope.arena);
- break :vi .{
- .ty = ty,
- .val = try val.copy(block_scope.arena),
- };
- } else if (!is_extern) {
- return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{});
- } else if (var_decl.getTypeNode()) |type_node| vi: {
- // Temporary arena for the zir instructions.
- var type_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer type_scope_arena.deinit();
- var type_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &type_scope_arena.allocator,
- .parent = decl.scope,
- };
- defer type_scope.instructions.deinit(self.gpa);
+ var prev_type_has_bits = false;
+ var prev_is_inline = false;
+ var type_changed = true;
- const src = tree.token_locs[type_node.firstToken()].start;
- const type_type = try astgen.addZIRInstConst(self, &type_scope.base, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.type_type),
- });
- const var_type = try astgen.expr(self, &type_scope.base, .{ .ty = type_type }, type_node);
- if (self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {};
- }
+ if (decl.typedValueManaged()) |tvm| {
+ prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits();
+ type_changed = !tvm.typed_value.ty.eql(fn_type);
+ if (tvm.typed_value.val.castTag(.function)) |payload| {
+ const prev_func = payload.data;
+ prev_is_inline = prev_func.state == .inline_only;
+ }
- const ty = try zir_sema.analyzeBodyValueAsType(self, &block_scope, var_type, .{
- .instructions = type_scope.instructions.items,
- });
- break :vi .{
- .ty = ty,
- .val = null,
- };
- } else {
- return self.failTok(&block_scope.base, var_decl.firstToken(), "unable to infer variable type", .{});
- };
+ tvm.deinit(mod.gpa);
+ }
- if (is_mutable and !var_info.ty.isValidVarType(is_extern)) {
- return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_info.ty});
+ decl_arena_state.* = decl_arena.state;
+ decl.typed_value = .{
+ .most_recent = .{
+ .typed_value = .{
+ .ty = fn_type,
+ .val = Value.initPayload(&fn_payload.base),
+ },
+ .arena = decl_arena_state,
+ },
+ };
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
+
+ if (!is_inline and fn_type.hasCodeGenBits()) {
+ // We don't fully codegen the decl until later, but we do need to reserve a global
+ // offset table index for it. This allows us to codegen decls out of dependency order,
+ // increasing how many computations can be done in parallel.
+ try mod.comp.bin_file.allocateDeclIndexes(decl);
+ try mod.comp.work_queue.writeItem(.{ .codegen_decl = decl });
+ if (type_changed and mod.emit_h != null) {
+ try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl });
+ }
+ } else if (!prev_is_inline and prev_type_has_bits) {
+ mod.comp.bin_file.freeDecl(decl);
+ }
+
+ if (fn_proto.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ if (is_inline) {
+ return mod.failTok(
+ &block_scope.base,
+ maybe_export_token,
+ "export of inline function",
+ .{},
+ );
}
+ const export_src = token_starts[maybe_export_token];
+ const name = tree.tokenSlice(fn_proto.name_token.?); // TODO identifierTokenString
+ // The scope needs to have the decl in it.
+ try mod.analyzeExport(&block_scope.base, export_src, name, decl);
+ }
+ }
+ return type_changed or is_inline != prev_is_inline;
+}
- var type_changed = true;
- if (decl.typedValueManaged()) |tvm| {
- type_changed = !tvm.typed_value.ty.eql(var_info.ty);
+fn astgenAndSemaVarDecl(
+ mod: *Module,
+ decl: *Decl,
+ tree: ast.Tree,
+ var_decl: ast.full.VarDecl,
+) !bool {
+ const tracy = trace(@src());
+ defer tracy.end();
- tvm.deinit(self.gpa);
- }
+ decl.analysis = .in_progress;
- const new_variable = try decl_arena.allocator.create(Var);
- const var_payload = try decl_arena.allocator.create(Value.Payload.Variable);
- new_variable.* = .{
- .owner_decl = decl,
- .init = var_info.val orelse undefined,
- .is_extern = is_extern,
- .is_mutable = is_mutable,
- .is_threadlocal = is_threadlocal,
- };
- var_payload.* = .{ .variable = new_variable };
-
- decl_arena_state.* = decl_arena.state;
- decl.typed_value = .{
- .most_recent = .{
- .typed_value = .{
- .ty = var_info.ty,
- .val = Value.initPayload(&var_payload.base),
- },
- .arena = decl_arena_state,
- },
- };
- decl.analysis = .complete;
- decl.generation = self.generation;
-
- if (var_decl.getExternExportToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- const export_src = tree.token_locs[maybe_export_token].start;
- const name_loc = tree.token_locs[var_decl.name_token];
- const name = tree.tokenSliceLoc(name_loc);
- // The scope needs to have the decl in it.
- try self.analyzeExport(&block_scope.base, export_src, name, decl);
- }
- }
- return type_changed;
- },
- .Comptime => {
- const comptime_decl = @fieldParentPtr(ast.Node.Comptime, "base", ast_node);
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
- decl.analysis = .in_progress;
+ // We need the memory for the Type to go into the arena for the Decl
+ var decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ errdefer decl_arena.deinit();
+ const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
- // A comptime decl does not store any value so we can just deinit this arena after analysis is done.
- var analysis_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer analysis_arena.deinit();
- var gen_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &analysis_arena.allocator,
- .parent = decl.scope,
- };
- defer gen_scope.instructions.deinit(self.gpa);
+ var decl_inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer decl_inst_table.deinit();
- _ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr);
- if (self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
- }
+ var branch_quota: u32 = default_eval_branch_quota;
- var block_scope: Scope.Block = .{
- .parent = null,
- .func = null,
- .decl = decl,
- .instructions = .{},
- .arena = &analysis_arena.allocator,
- .is_comptime = true,
- };
- defer block_scope.instructions.deinit(self.gpa);
+ var block_scope: Scope.Block = .{
+ .parent = null,
+ .inst_table = &decl_inst_table,
+ .func = null,
+ .owner_decl = decl,
+ .src_decl = decl,
+ .instructions = .{},
+ .arena = &decl_arena.allocator,
+ .inlining = null,
+ .is_comptime = true,
+ .branch_quota = &branch_quota,
+ };
+ defer block_scope.instructions.deinit(mod.gpa);
+
+ decl.is_pub = var_decl.visib_token != null;
+ const is_extern = blk: {
+ const maybe_extern_token = var_decl.extern_export_token orelse break :blk false;
+ if (token_tags[maybe_extern_token] != .keyword_extern) break :blk false;
+ if (var_decl.ast.init_node != 0) {
+ return mod.failNode(
+ &block_scope.base,
+ var_decl.ast.init_node,
+ "extern variables have no initializers",
+ .{},
+ );
+ }
+ break :blk true;
+ };
+ if (var_decl.lib_name) |lib_name| {
+ assert(is_extern);
+ return mod.failTok(&block_scope.base, lib_name, "TODO implement function library name", .{});
+ }
+ const is_mutable = token_tags[var_decl.ast.mut_token] == .keyword_var;
+ const is_threadlocal = if (var_decl.threadlocal_token) |some| blk: {
+ if (!is_mutable) {
+ return mod.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{});
+ }
+ break :blk true;
+ } else false;
+ assert(var_decl.comptime_token == null);
+ if (var_decl.ast.align_node != 0) {
+ return mod.failNode(
+ &block_scope.base,
+ var_decl.ast.align_node,
+ "TODO implement function align expression",
+ .{},
+ );
+ }
+ if (var_decl.ast.section_node != 0) {
+ return mod.failNode(
+ &block_scope.base,
+ var_decl.ast.section_node,
+ "TODO implement function section expression",
+ .{},
+ );
+ }
- _ = try zir_sema.analyzeBody(self, &block_scope.base, .{
- .instructions = gen_scope.instructions.items,
+ const var_info: struct { ty: Type, val: ?Value } = if (var_decl.ast.init_node != 0) vi: {
+ var gen_scope_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer gen_scope_arena.deinit();
+ var gen_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &gen_scope_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = true,
+ };
+ defer gen_scope.instructions.deinit(mod.gpa);
+
+ const init_result_loc: astgen.ResultLoc = if (var_decl.ast.type_node != 0) rl: {
+ const type_node = var_decl.ast.type_node;
+ const src = token_starts[tree.firstToken(type_node)];
+ const type_type = try astgen.addZIRInstConst(mod, &gen_scope.base, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.type_type),
});
+ const var_type = try astgen.expr(mod, &gen_scope.base, .{ .ty = type_type }, type_node);
+ break :rl .{ .ty = var_type };
+ } else .none;
+
+ const init_inst = try astgen.comptimeExpr(
+ mod,
+ &gen_scope.base,
+ init_result_loc,
+ var_decl.ast.init_node,
+ );
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {};
+ }
- decl.analysis = .complete;
- decl.generation = self.generation;
- return true;
- },
- .Use => @panic("TODO usingnamespace decl"),
- else => unreachable,
- }
-}
+ var var_inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer var_inst_table.deinit();
+
+ var branch_quota_vi: u32 = default_eval_branch_quota;
+ var inner_block: Scope.Block = .{
+ .parent = null,
+ .inst_table = &var_inst_table,
+ .func = null,
+ .owner_decl = decl,
+ .src_decl = decl,
+ .instructions = .{},
+ .arena = &gen_scope_arena.allocator,
+ .inlining = null,
+ .is_comptime = true,
+ .branch_quota = &branch_quota_vi,
+ };
+ defer inner_block.instructions.deinit(mod.gpa);
+ try zir_sema.analyzeBody(mod, &inner_block, .{
+ .instructions = gen_scope.instructions.items,
+ });
-fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void {
- try depender.dependencies.ensureCapacity(self.gpa, depender.dependencies.items().len + 1);
- try dependee.dependants.ensureCapacity(self.gpa, dependee.dependants.items().len + 1);
+ // The result location guarantees the type coercion.
+ const analyzed_init_inst = var_inst_table.get(init_inst).?;
+ // The is_comptime in the Scope.Block guarantees the result is comptime-known.
+ const val = analyzed_init_inst.value().?;
- depender.dependencies.putAssumeCapacity(dependee, {});
- dependee.dependants.putAssumeCapacity(depender, {});
-}
+ const ty = try analyzed_init_inst.ty.copy(block_scope.arena);
+ break :vi .{
+ .ty = ty,
+ .val = try val.copy(block_scope.arena),
+ };
+ } else if (!is_extern) {
+ return mod.failTok(
+ &block_scope.base,
+ var_decl.ast.mut_token,
+ "variables must be initialized",
+ .{},
+ );
+ } else if (var_decl.ast.type_node != 0) vi: {
+ const type_node = var_decl.ast.type_node;
+ // Temporary arena for the zir instructions.
+ var type_scope_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer type_scope_arena.deinit();
+ var type_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &type_scope_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = true,
+ };
+ defer type_scope.instructions.deinit(mod.gpa);
-fn getSrcModule(self: *Module, root_scope: *Scope.ZIRModule) !*zir.Module {
- switch (root_scope.status) {
- .never_loaded, .unloaded_success => {
- try self.failed_files.ensureCapacity(self.gpa, self.failed_files.items().len + 1);
+ const var_type = try astgen.typeExpr(mod, &type_scope.base, type_node);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "var_type", decl.name, type_scope.instructions.items) catch {};
+ }
- const source = try root_scope.getSource(self);
+ const ty = try zir_sema.analyzeBodyValueAsType(mod, &block_scope, var_type, .{
+ .instructions = type_scope.instructions.items,
+ });
+ break :vi .{
+ .ty = ty,
+ .val = null,
+ };
+ } else {
+ return mod.failTok(
+ &block_scope.base,
+ var_decl.ast.mut_token,
+ "unable to infer variable type",
+ .{},
+ );
+ };
- var keep_zir_module = false;
- const zir_module = try self.gpa.create(zir.Module);
- defer if (!keep_zir_module) self.gpa.destroy(zir_module);
+ if (is_mutable and !var_info.ty.isValidVarType(is_extern)) {
+ return mod.failTok(
+ &block_scope.base,
+ var_decl.ast.mut_token,
+ "variable of type '{}' must be const",
+ .{var_info.ty},
+ );
+ }
- zir_module.* = try zir.parse(self.gpa, source);
- defer if (!keep_zir_module) zir_module.deinit(self.gpa);
+ var type_changed = true;
+ if (decl.typedValueManaged()) |tvm| {
+ type_changed = !tvm.typed_value.ty.eql(var_info.ty);
- if (zir_module.error_msg) |src_err_msg| {
- self.failed_files.putAssumeCapacityNoClobber(
- &root_scope.base,
- try Compilation.ErrorMsg.create(self.gpa, src_err_msg.byte_offset, "{}", .{src_err_msg.msg}),
- );
- root_scope.status = .unloaded_parse_failure;
- return error.AnalysisFail;
- }
+ tvm.deinit(mod.gpa);
+ }
- root_scope.status = .loaded_success;
- root_scope.contents = .{ .module = zir_module };
- keep_zir_module = true;
+ const new_variable = try decl_arena.allocator.create(Var);
+ new_variable.* = .{
+ .owner_decl = decl,
+ .init = var_info.val orelse undefined,
+ .is_extern = is_extern,
+ .is_mutable = is_mutable,
+ .is_threadlocal = is_threadlocal,
+ };
+ const var_val = try Value.Tag.variable.create(&decl_arena.allocator, new_variable);
- return zir_module;
+ decl_arena_state.* = decl_arena.state;
+ decl.typed_value = .{
+ .most_recent = .{
+ .typed_value = .{
+ .ty = var_info.ty,
+ .val = var_val,
+ },
+ .arena = decl_arena_state,
},
+ };
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
+
+ if (var_decl.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ const export_src = token_starts[maybe_export_token];
+ const name_token = var_decl.ast.mut_token + 1;
+ const name = tree.tokenSlice(name_token); // TODO identifierTokenString
+ // The scope needs to have the decl in it.
+ try mod.analyzeExport(&block_scope.base, export_src, name, decl);
+ }
+ }
+ return type_changed;
+}
- .unloaded_parse_failure,
- .unloaded_sema_failure,
- => return error.AnalysisFail,
+fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void {
+ try depender.dependencies.ensureCapacity(self.gpa, depender.dependencies.items().len + 1);
+ try dependee.dependants.ensureCapacity(self.gpa, dependee.dependants.items().len + 1);
- .loaded_success, .loaded_sema_failure => return root_scope.contents.module,
- }
+ depender.dependencies.putAssumeCapacity(dependee, {});
+ dependee.dependants.putAssumeCapacity(depender, {});
}
-fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree {
+pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*const ast.Tree {
const tracy = trace(@src());
defer tracy.end();
- const root_scope = container_scope.file_scope;
-
switch (root_scope.status) {
.never_loaded, .unloaded_success => {
try self.failed_files.ensureCapacity(self.gpa, self.failed_files.items().len + 1);
@@ -1476,8 +1708,10 @@ fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree {
const source = try root_scope.getSource(self);
var keep_tree = false;
- const tree = try std.zig.parse(self.gpa, source);
- defer if (!keep_tree) tree.deinit();
+ root_scope.tree = try std.zig.parse(self.gpa, source);
+ defer if (!keep_tree) root_scope.tree.deinit(self.gpa);
+
+ const tree = &root_scope.tree;
if (tree.errors.len != 0) {
const parse_err = tree.errors[0];
@@ -1485,11 +1719,14 @@ fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree {
var msg = std.ArrayList(u8).init(self.gpa);
defer msg.deinit();
- try parse_err.render(tree.token_ids, msg.outStream());
- const err_msg = try self.gpa.create(Compilation.ErrorMsg);
+ try tree.renderError(parse_err, msg.writer());
+ const err_msg = try self.gpa.create(ErrorMsg);
err_msg.* = .{
+ .src_loc = .{
+ .file_scope = root_scope,
+ .byte_offset = tree.tokens.items(.start)[parse_err.token],
+ },
.msg = msg.toOwnedSlice(),
- .byte_offset = tree.token_locs[parse_err.loc()].start,
};
self.failed_files.putAssumeCapacityNoClobber(&root_scope.base, err_msg);
@@ -1498,7 +1735,6 @@ fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree {
}
root_scope.status = .loaded_success;
- root_scope.contents = .{ .tree = tree };
keep_tree = true;
return tree;
@@ -1506,196 +1742,353 @@ fn getAstTree(self: *Module, container_scope: *Scope.Container) !*ast.Tree {
.unloaded_parse_failure => return error.AnalysisFail,
- .loaded_success => return root_scope.contents.tree,
+ .loaded_success => return &root_scope.tree,
}
}
-pub fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void {
+pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
const tracy = trace(@src());
defer tracy.end();
// We may be analyzing it for the first time, or this may be
// an incremental update. This code handles both cases.
- const tree = try self.getAstTree(container_scope);
- const decls = tree.root_node.decls();
+ const tree = try mod.getAstTree(container_scope.file_scope);
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const decls = tree.rootDecls();
- try self.comp.work_queue.ensureUnusedCapacity(decls.len);
- try container_scope.decls.ensureCapacity(self.gpa, decls.len);
+ try mod.comp.work_queue.ensureUnusedCapacity(decls.len);
+ try container_scope.decls.ensureCapacity(mod.gpa, decls.len);
// Keep track of the decls that we expect to see in this file so that
// we know which ones have been deleted.
- var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa);
+ var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(mod.gpa);
defer deleted_decls.deinit();
try deleted_decls.ensureCapacity(container_scope.decls.items().len);
for (container_scope.decls.items()) |entry| {
deleted_decls.putAssumeCapacityNoClobber(entry.key, {});
}
- for (decls) |src_decl, decl_i| {
- if (src_decl.cast(ast.Node.FnProto)) |fn_proto| {
- // We will create a Decl for it regardless of analysis status.
- const name_tok = fn_proto.getNameToken() orelse {
- @panic("TODO missing function name");
- };
-
- const name_loc = tree.token_locs[name_tok];
- const name = tree.tokenSliceLoc(name_loc);
- const name_hash = container_scope.fullyQualifiedNameHash(name);
- const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl));
- if (self.decl_table.get(name_hash)) |decl| {
- // Update the AST Node index of the decl, even if its contents are unchanged, it may
- // have been re-ordered.
- decl.src_index = decl_i;
- if (deleted_decls.remove(decl) == null) {
- decl.analysis = .sema_failure;
- const err_msg = try Compilation.ErrorMsg.create(self.gpa, tree.token_locs[name_tok].start, "redefinition of '{}'", .{decl.name});
- errdefer err_msg.destroy(self.gpa);
- try self.failed_decls.putNoClobber(self.gpa, decl, err_msg);
- } else {
- if (!srcHashEql(decl.contents_hash, contents_hash)) {
- try self.markOutdatedDecl(decl);
- decl.contents_hash = contents_hash;
- } else switch (self.comp.bin_file.tag) {
- .coff => {
- // TODO Implement for COFF
- },
- .elf => if (decl.fn_link.elf.len != 0) {
- // TODO Look into detecting when this would be unnecessary by storing enough state
- // in `Decl` to notice that the line number did not change.
- self.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl });
- },
- .macho => {
- // TODO Implement for MachO
- },
- .c, .wasm => {},
- }
- }
- } else {
- const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
- container_scope.decls.putAssumeCapacity(new_decl, {});
- if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
- }
- }
- }
- } else if (src_decl.castTag(.VarDecl)) |var_decl| {
- const name_loc = tree.token_locs[var_decl.name_token];
- const name = tree.tokenSliceLoc(name_loc);
- const name_hash = container_scope.fullyQualifiedNameHash(name);
- const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl));
- if (self.decl_table.get(name_hash)) |decl| {
- // Update the AST Node index of the decl, even if its contents are unchanged, it may
- // have been re-ordered.
- decl.src_index = decl_i;
- if (deleted_decls.remove(decl) == null) {
- decl.analysis = .sema_failure;
- const err_msg = try Compilation.ErrorMsg.create(self.gpa, name_loc.start, "redefinition of '{}'", .{decl.name});
- errdefer err_msg.destroy(self.gpa);
- try self.failed_decls.putNoClobber(self.gpa, decl, err_msg);
- } else if (!srcHashEql(decl.contents_hash, contents_hash)) {
- try self.markOutdatedDecl(decl);
- decl.contents_hash = contents_hash;
- }
- } else {
- const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
- container_scope.decls.putAssumeCapacity(new_decl, {});
- if (var_decl.getExternExportToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
- }
- }
+ for (decls) |decl_node, decl_i| switch (node_tags[decl_node]) {
+ .fn_decl => {
+ const fn_proto = node_datas[decl_node].lhs;
+ const body = node_datas[decl_node].rhs;
+ switch (node_tags[fn_proto]) {
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProtoSimple(&params, fn_proto),
+ );
+ },
+ .fn_proto_multi => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProtoMulti(fn_proto),
+ ),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProtoOne(&params, fn_proto),
+ );
+ },
+ .fn_proto => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProto(fn_proto),
+ ),
+ else => unreachable,
}
- } else if (src_decl.castTag(.Comptime)) |comptime_node| {
- const name_index = self.getNextAnonNameIndex();
- const name = try std.fmt.allocPrint(self.gpa, "__comptime_{}", .{name_index});
- defer self.gpa.free(name);
+ },
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProtoSimple(&params, decl_node),
+ );
+ },
+ .fn_proto_multi => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProtoMulti(decl_node),
+ ),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProtoOne(&params, decl_node),
+ );
+ },
+ .fn_proto => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProto(decl_node),
+ ),
+
+ .global_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.globalVarDecl(decl_node),
+ ),
+ .local_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.localVarDecl(decl_node),
+ ),
+ .simple_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.simpleVarDecl(decl_node),
+ ),
+ .aligned_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.alignedVarDecl(decl_node),
+ ),
+
+ .@"comptime" => {
+ const name_index = mod.getNextAnonNameIndex();
+ const name = try std.fmt.allocPrint(mod.gpa, "__comptime_{d}", .{name_index});
+ defer mod.gpa.free(name);
const name_hash = container_scope.fullyQualifiedNameHash(name);
- const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl));
+ const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
- const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
+ const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
container_scope.decls.putAssumeCapacity(new_decl, {});
- self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
- } else if (src_decl.castTag(.ContainerField)) |container_field| {
- log.err("TODO: analyze container field", .{});
- } else if (src_decl.castTag(.TestDecl)) |test_decl| {
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
+ },
+
+ .container_field_init => try mod.semaContainerField(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.containerFieldInit(decl_node),
+ ),
+ .container_field_align => try mod.semaContainerField(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.containerFieldAlign(decl_node),
+ ),
+ .container_field => try mod.semaContainerField(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.containerField(decl_node),
+ ),
+
+ .test_decl => {
log.err("TODO: analyze test decl", .{});
- } else if (src_decl.castTag(.Use)) |use_decl| {
+ },
+ .@"usingnamespace" => {
log.err("TODO: analyze usingnamespace decl", .{});
- } else {
- unreachable;
- }
- }
+ },
+ else => unreachable,
+ };
// Handle explicitly deleted decls from the source code. Not to be confused
// with when we delete decls because they are no longer referenced.
for (deleted_decls.items()) |entry| {
- log.debug("noticed '{}' deleted from source\n", .{entry.key.name});
- try self.deleteDecl(entry.key);
+ log.debug("noticed '{s}' deleted from source", .{entry.key.name});
+ try mod.deleteDecl(entry.key);
}
}
-pub fn analyzeRootZIRModule(self: *Module, root_scope: *Scope.ZIRModule) !void {
- // We may be analyzing it for the first time, or this may be
- // an incremental update. This code handles both cases.
- const src_module = try self.getSrcModule(root_scope);
-
- try self.comp.work_queue.ensureUnusedCapacity(src_module.decls.len);
- try root_scope.decls.ensureCapacity(self.gpa, src_module.decls.len);
-
- var exports_to_resolve = std.ArrayList(*zir.Decl).init(self.gpa);
- defer exports_to_resolve.deinit();
+fn semaContainerFn(
+ mod: *Module,
+ container_scope: *Scope.Container,
+ deleted_decls: *std.AutoArrayHashMap(*Decl, void),
+ decl_node: ast.Node.Index,
+ decl_i: usize,
+ tree: ast.Tree,
+ body_node: ast.Node.Index,
+ fn_proto: ast.full.FnProto,
+) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
- // Keep track of the decls that we expect to see in this file so that
- // we know which ones have been deleted.
- var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa);
- defer deleted_decls.deinit();
- try deleted_decls.ensureCapacity(self.decl_table.items().len);
- for (self.decl_table.items()) |entry| {
- deleted_decls.putAssumeCapacityNoClobber(entry.value, {});
- }
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
- for (src_module.decls) |src_decl, decl_i| {
- const name_hash = root_scope.fullyQualifiedNameHash(src_decl.name);
- if (self.decl_table.get(name_hash)) |decl| {
- deleted_decls.removeAssertDiscard(decl);
- if (!srcHashEql(src_decl.contents_hash, decl.contents_hash)) {
- try self.markOutdatedDecl(decl);
- decl.contents_hash = src_decl.contents_hash;
- }
+ // We will create a Decl for it regardless of analysis status.
+ const name_tok = fn_proto.name_token orelse {
+ @panic("TODO missing function name");
+ };
+ const name = tree.tokenSlice(name_tok); // TODO use identifierTokenString
+ const name_hash = container_scope.fullyQualifiedNameHash(name);
+ const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
+ if (mod.decl_table.get(name_hash)) |decl| {
+ // Update the AST Node index of the decl, even if its contents are unchanged, it may
+ // have been re-ordered.
+ decl.src_index = decl_i;
+ if (deleted_decls.swapRemove(decl) == null) {
+ decl.analysis = .sema_failure;
+ const msg = try ErrorMsg.create(mod.gpa, .{
+ .file_scope = container_scope.file_scope,
+ .byte_offset = token_starts[name_tok],
+ }, "redefinition of '{s}'", .{decl.name});
+ errdefer msg.destroy(mod.gpa);
+ try mod.failed_decls.putNoClobber(mod.gpa, decl, msg);
} else {
- const new_decl = try self.createNewDecl(
- &root_scope.base,
- src_decl.name,
- decl_i,
- name_hash,
- src_decl.contents_hash,
- );
- root_scope.decls.appendAssumeCapacity(new_decl);
- if (src_decl.inst.cast(zir.Inst.Export)) |export_inst| {
- try exports_to_resolve.append(src_decl);
+ if (!srcHashEql(decl.contents_hash, contents_hash)) {
+ try mod.markOutdatedDecl(decl);
+ decl.contents_hash = contents_hash;
+ } else switch (mod.comp.bin_file.tag) {
+ .coff => {
+ // TODO Implement for COFF
+ },
+ .elf => if (decl.fn_link.elf.len != 0) {
+ // TODO Look into detecting when this would be unnecessary by storing enough state
+ // in `Decl` to notice that the line number did not change.
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl });
+ },
+ .macho => if (decl.fn_link.macho.len != 0) {
+ // TODO Look into detecting when this would be unnecessary by storing enough state
+ // in `Decl` to notice that the line number did not change.
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl });
+ },
+ .c, .wasm, .spirv => {},
+ }
+ }
+ } else {
+ const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
+ container_scope.decls.putAssumeCapacity(new_decl, {});
+ if (fn_proto.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
}
}
}
- for (exports_to_resolve.items) |export_decl| {
- _ = try zir_sema.resolveZirDecl(self, &root_scope.base, export_decl);
- }
- // Handle explicitly deleted decls from the source code. Not to be confused
- // with when we delete decls because they are no longer referenced.
- for (deleted_decls.items()) |entry| {
- log.debug("noticed '{}' deleted from source\n", .{entry.key.name});
- try self.deleteDecl(entry.key);
+}
+
+fn semaContainerVar(
+ mod: *Module,
+ container_scope: *Scope.Container,
+ deleted_decls: *std.AutoArrayHashMap(*Decl, void),
+ decl_node: ast.Node.Index,
+ decl_i: usize,
+ tree: ast.Tree,
+ var_decl: ast.full.VarDecl,
+) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ const name_token = var_decl.ast.mut_token + 1;
+ const name_src = token_starts[name_token];
+ const name = tree.tokenSlice(name_token); // TODO identifierTokenString
+ const name_hash = container_scope.fullyQualifiedNameHash(name);
+ const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
+ if (mod.decl_table.get(name_hash)) |decl| {
+ // Update the AST Node index of the decl, even if its contents are unchanged, it may
+ // have been re-ordered.
+ decl.src_index = decl_i;
+ if (deleted_decls.swapRemove(decl) == null) {
+ decl.analysis = .sema_failure;
+ const err_msg = try ErrorMsg.create(mod.gpa, .{
+ .file_scope = container_scope.file_scope,
+ .byte_offset = name_src,
+ }, "redefinition of '{s}'", .{decl.name});
+ errdefer err_msg.destroy(mod.gpa);
+ try mod.failed_decls.putNoClobber(mod.gpa, decl, err_msg);
+ } else if (!srcHashEql(decl.contents_hash, contents_hash)) {
+ try mod.markOutdatedDecl(decl);
+ decl.contents_hash = contents_hash;
+ }
+ } else {
+ const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
+ container_scope.decls.putAssumeCapacity(new_decl, {});
+ if (var_decl.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
+ }
+ }
}
}
+fn semaContainerField(
+ mod: *Module,
+ container_scope: *Scope.Container,
+ deleted_decls: *std.AutoArrayHashMap(*Decl, void),
+ decl_node: ast.Node.Index,
+ decl_i: usize,
+ tree: ast.Tree,
+ field: ast.full.ContainerField,
+) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ log.err("TODO: analyze container field", .{});
+}
+
pub fn deleteDecl(self: *Module, decl: *Decl) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
try self.deletion_set.ensureCapacity(self.gpa, self.deletion_set.items.len + decl.dependencies.items().len);
// Remove from the namespace it resides in. In the case of an anonymous Decl it will
// not be present in the set, and this does nothing.
- decl.scope.removeDecl(decl);
+ decl.container.removeDecl(decl);
- log.debug("deleting decl '{}'\n", .{decl.name});
+ log.debug("deleting decl '{s}'", .{decl.name});
const name_hash = decl.fullyQualifiedNameHash();
self.decl_table.removeAssertDiscard(name_hash);
// Remove itself from its dependencies, because we are about to destroy the decl pointer.
@@ -1718,18 +2111,23 @@ pub fn deleteDecl(self: *Module, decl: *Decl) !void {
try self.markOutdatedDecl(dep);
}
}
- if (self.failed_decls.remove(decl)) |entry| {
+ if (self.failed_decls.swapRemove(decl)) |entry| {
entry.value.destroy(self.gpa);
}
+ if (self.emit_h_failed_decls.swapRemove(decl)) |entry| {
+ entry.value.destroy(self.gpa);
+ }
+ _ = self.compile_log_decls.swapRemove(decl);
self.deleteDeclExports(decl);
self.comp.bin_file.freeDecl(decl);
- decl.destroy(self.gpa);
+
+ decl.destroy(self);
}
/// Delete all the Export objects that are caused by this Decl. Re-analysis of
/// this Decl will cause them to be re-created (or not).
fn deleteDeclExports(self: *Module, decl: *Decl) void {
- const kv = self.export_owners.remove(decl) orelse return;
+ const kv = self.export_owners.swapRemove(decl) orelse return;
for (kv.value) |exp| {
if (self.decl_exports.getEntry(exp.exported_decl)) |decl_exports_kv| {
@@ -1756,10 +2154,10 @@ fn deleteDeclExports(self: *Module, decl: *Decl) void {
if (self.comp.bin_file.cast(link.File.MachO)) |macho| {
macho.deleteExport(exp.link.macho);
}
- if (self.failed_exports.remove(exp)) |entry| {
+ if (self.failed_exports.swapRemove(exp)) |entry| {
entry.value.destroy(self.gpa);
}
- _ = self.symbol_exports.remove(exp.options.name);
+ _ = self.symbol_exports.swapRemove(exp.options.name);
self.gpa.free(exp.options.name);
self.gpa.destroy(exp);
}
@@ -1773,65 +2171,87 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
// Use the Decl's arena for function memory.
var arena = decl.typed_value.most_recent.arena.?.promote(self.gpa);
defer decl.typed_value.most_recent.arena.?.* = arena.state;
+ var inst_table = Scope.Block.InstTable.init(self.gpa);
+ defer inst_table.deinit();
+ var branch_quota: u32 = default_eval_branch_quota;
+
var inner_block: Scope.Block = .{
.parent = null,
+ .inst_table = &inst_table,
.func = func,
- .decl = decl,
+ .owner_decl = decl,
+ .src_decl = decl,
.instructions = .{},
.arena = &arena.allocator,
+ .inlining = null,
.is_comptime = false,
+ .branch_quota = &branch_quota,
};
defer inner_block.instructions.deinit(self.gpa);
- const fn_zir = func.analysis.queued;
- defer fn_zir.arena.promote(self.gpa).deinit();
- func.analysis = .{ .in_progress = {} };
- log.debug("set {} to in_progress\n", .{decl.name});
+ func.state = .in_progress;
+ log.debug("set {s} to in_progress", .{decl.name});
- try zir_sema.analyzeBody(self, &inner_block.base, fn_zir.body);
+ try zir_sema.analyzeBody(self, &inner_block, func.zir);
const instructions = try arena.allocator.dupe(*Inst, inner_block.instructions.items);
- func.analysis = .{ .success = .{ .instructions = instructions } };
- log.debug("set {} to success\n", .{decl.name});
+ func.state = .success;
+ func.body = .{ .instructions = instructions };
+ log.debug("set {s} to success", .{decl.name});
}
fn markOutdatedDecl(self: *Module, decl: *Decl) !void {
- log.debug("mark {} outdated\n", .{decl.name});
+ log.debug("mark {s} outdated", .{decl.name});
try self.comp.work_queue.writeItem(.{ .analyze_decl = decl });
- if (self.failed_decls.remove(decl)) |entry| {
+ if (self.failed_decls.swapRemove(decl)) |entry| {
entry.value.destroy(self.gpa);
}
+ if (self.emit_h_failed_decls.swapRemove(decl)) |entry| {
+ entry.value.destroy(self.gpa);
+ }
+ _ = self.compile_log_decls.swapRemove(decl);
decl.analysis = .outdated;
}
fn allocateNewDecl(
- self: *Module,
+ mod: *Module,
scope: *Scope,
src_index: usize,
contents_hash: std.zig.SrcHash,
) !*Decl {
- const new_decl = try self.gpa.create(Decl);
+ // If we have emit-h then we must allocate a bigger structure to store the emit-h state.
+ const new_decl: *Decl = if (mod.emit_h != null) blk: {
+ const parent_struct = try mod.gpa.create(DeclPlusEmitH);
+ parent_struct.* = .{
+ .emit_h = .{},
+ .decl = undefined,
+ };
+ break :blk &parent_struct.decl;
+ } else try mod.gpa.create(Decl);
+
new_decl.* = .{
.name = "",
- .scope = scope.namespace(),
+ .container = scope.namespace(),
.src_index = src_index,
.typed_value = .{ .never_succeeded = {} },
.analysis = .unreferenced,
.deletion_flag = false,
.contents_hash = contents_hash,
- .link = switch (self.comp.bin_file.tag) {
+ .link = switch (mod.comp.bin_file.tag) {
.coff => .{ .coff = link.File.Coff.TextBlock.empty },
.elf => .{ .elf = link.File.Elf.TextBlock.empty },
.macho => .{ .macho = link.File.MachO.TextBlock.empty },
- .c => .{ .c = {} },
+ .c => .{ .c = link.File.C.DeclBlock.empty },
.wasm => .{ .wasm = {} },
+ .spirv => .{ .spirv = {} },
},
- .fn_link = switch (self.comp.bin_file.tag) {
+ .fn_link = switch (mod.comp.bin_file.tag) {
.coff => .{ .coff = {} },
.elf => .{ .elf = link.File.Elf.SrcFn.empty },
.macho => .{ .macho = link.File.MachO.SrcFn.empty },
- .c => .{ .c = {} },
+ .c => .{ .c = link.File.C.FnBlock.empty },
.wasm => .{ .wasm = null },
+ .spirv => .{ .spirv = .{} },
},
.generation = 0,
.is_pub = false,
@@ -1895,34 +2315,41 @@ pub fn resolveDefinedValue(self: *Module, scope: *Scope, base: *Inst) !?Value {
return null;
}
-pub fn analyzeExport(self: *Module, scope: *Scope, src: usize, borrowed_symbol_name: []const u8, exported_decl: *Decl) !void {
- try self.ensureDeclAnalyzed(exported_decl);
+pub fn analyzeExport(
+ mod: *Module,
+ scope: *Scope,
+ src: usize,
+ borrowed_symbol_name: []const u8,
+ exported_decl: *Decl,
+) !void {
+ try mod.ensureDeclAnalyzed(exported_decl);
const typed_value = exported_decl.typed_value.most_recent.typed_value;
switch (typed_value.ty.zigTypeTag()) {
.Fn => {},
- else => return self.fail(scope, src, "unable to export type '{}'", .{typed_value.ty}),
+ else => return mod.fail(scope, src, "unable to export type '{}'", .{typed_value.ty}),
}
- try self.decl_exports.ensureCapacity(self.gpa, self.decl_exports.items().len + 1);
- try self.export_owners.ensureCapacity(self.gpa, self.export_owners.items().len + 1);
+ try mod.decl_exports.ensureCapacity(mod.gpa, mod.decl_exports.items().len + 1);
+ try mod.export_owners.ensureCapacity(mod.gpa, mod.export_owners.items().len + 1);
- const new_export = try self.gpa.create(Export);
- errdefer self.gpa.destroy(new_export);
+ const new_export = try mod.gpa.create(Export);
+ errdefer mod.gpa.destroy(new_export);
- const symbol_name = try self.gpa.dupe(u8, borrowed_symbol_name);
- errdefer self.gpa.free(symbol_name);
+ const symbol_name = try mod.gpa.dupe(u8, borrowed_symbol_name);
+ errdefer mod.gpa.free(symbol_name);
- const owner_decl = scope.decl().?;
+ const owner_decl = scope.ownerDecl().?;
new_export.* = .{
.options = .{ .name = symbol_name },
.src = src,
- .link = switch (self.comp.bin_file.tag) {
+ .link = switch (mod.comp.bin_file.tag) {
.coff => .{ .coff = {} },
.elf => .{ .elf = link.File.Elf.Export{} },
.macho => .{ .macho = link.File.MachO.Export{} },
.c => .{ .c = {} },
.wasm => .{ .wasm = {} },
+ .spirv => .{ .spirv = {} },
},
.owner_decl = owner_decl,
.exported_decl = exported_decl,
@@ -1930,48 +2357,53 @@ pub fn analyzeExport(self: *Module, scope: *Scope, src: usize, borrowed_symbol_n
};
// Add to export_owners table.
- const eo_gop = self.export_owners.getOrPutAssumeCapacity(owner_decl);
+ const eo_gop = mod.export_owners.getOrPutAssumeCapacity(owner_decl);
if (!eo_gop.found_existing) {
eo_gop.entry.value = &[0]*Export{};
}
- eo_gop.entry.value = try self.gpa.realloc(eo_gop.entry.value, eo_gop.entry.value.len + 1);
+ eo_gop.entry.value = try mod.gpa.realloc(eo_gop.entry.value, eo_gop.entry.value.len + 1);
eo_gop.entry.value[eo_gop.entry.value.len - 1] = new_export;
- errdefer eo_gop.entry.value = self.gpa.shrink(eo_gop.entry.value, eo_gop.entry.value.len - 1);
+ errdefer eo_gop.entry.value = mod.gpa.shrink(eo_gop.entry.value, eo_gop.entry.value.len - 1);
// Add to exported_decl table.
- const de_gop = self.decl_exports.getOrPutAssumeCapacity(exported_decl);
+ const de_gop = mod.decl_exports.getOrPutAssumeCapacity(exported_decl);
if (!de_gop.found_existing) {
de_gop.entry.value = &[0]*Export{};
}
- de_gop.entry.value = try self.gpa.realloc(de_gop.entry.value, de_gop.entry.value.len + 1);
+ de_gop.entry.value = try mod.gpa.realloc(de_gop.entry.value, de_gop.entry.value.len + 1);
de_gop.entry.value[de_gop.entry.value.len - 1] = new_export;
- errdefer de_gop.entry.value = self.gpa.shrink(de_gop.entry.value, de_gop.entry.value.len - 1);
+ errdefer de_gop.entry.value = mod.gpa.shrink(de_gop.entry.value, de_gop.entry.value.len - 1);
- if (self.symbol_exports.get(symbol_name)) |_| {
- try self.failed_exports.ensureCapacity(self.gpa, self.failed_exports.items().len + 1);
- self.failed_exports.putAssumeCapacityNoClobber(new_export, try Compilation.ErrorMsg.create(
- self.gpa,
+ if (mod.symbol_exports.get(symbol_name)) |other_export| {
+ new_export.status = .failed_retryable;
+ try mod.failed_exports.ensureCapacity(mod.gpa, mod.failed_exports.items().len + 1);
+ const msg = try mod.errMsg(
+ scope,
src,
- "exported symbol collision: {}",
+ "exported symbol collision: {s}",
.{symbol_name},
- ));
- // TODO: add a note
+ );
+ errdefer msg.destroy(mod.gpa);
+ try mod.errNote(
+ &other_export.owner_decl.container.base,
+ other_export.src,
+ msg,
+ "other symbol here",
+ .{},
+ );
+ mod.failed_exports.putAssumeCapacityNoClobber(new_export, msg);
new_export.status = .failed;
return;
}
- try self.symbol_exports.putNoClobber(self.gpa, symbol_name, new_export);
- self.comp.bin_file.updateDeclExports(self, exported_decl, de_gop.entry.value) catch |err| switch (err) {
+ try mod.symbol_exports.putNoClobber(mod.gpa, symbol_name, new_export);
+ mod.comp.bin_file.updateDeclExports(mod, exported_decl, de_gop.entry.value) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => {
- try self.failed_exports.ensureCapacity(self.gpa, self.failed_exports.items().len + 1);
- self.failed_exports.putAssumeCapacityNoClobber(new_export, try Compilation.ErrorMsg.create(
- self.gpa,
- src,
- "unable to export: {}",
- .{@errorName(err)},
- ));
new_export.status = .failed_retryable;
+ try mod.failed_exports.ensureCapacity(mod.gpa, mod.failed_exports.items().len + 1);
+ const msg = try mod.errMsg(scope, src, "unable to export: {s}", .{@errorName(err)});
+ mod.failed_exports.putAssumeCapacityNoClobber(new_export, msg);
},
};
}
@@ -2059,7 +2491,7 @@ pub fn addBr(
src: usize,
target_block: *Inst.Block,
operand: *Inst,
-) !*Inst {
+) !*Inst.Br {
const inst = try scope_block.arena.create(Inst.Br);
inst.* = .{
.base = .{
@@ -2071,7 +2503,7 @@ pub fn addBr(
.block = target_block,
};
try scope_block.instructions.append(self.gpa, &inst.base);
- return &inst.base;
+ return inst;
}
pub fn addCondBr(
@@ -2123,7 +2555,7 @@ pub fn addSwitchBr(
self: *Module,
block: *Scope.Block,
src: usize,
- target_ptr: *Inst,
+ target: *Inst,
cases: []Inst.SwitchBr.Case,
else_body: ir.Body,
) !*Inst {
@@ -2134,7 +2566,7 @@ pub fn addSwitchBr(
.ty = Type.initTag(.noreturn),
.src = src,
},
- .target_ptr = target_ptr,
+ .target = target,
.cases = cases,
.else_body = else_body,
};
@@ -2191,52 +2623,43 @@ pub fn constBool(self: *Module, scope: *Scope, src: usize, v: bool) !*Inst {
}
pub fn constIntUnsigned(self: *Module, scope: *Scope, src: usize, ty: Type, int: u64) !*Inst {
- const int_payload = try scope.arena().create(Value.Payload.Int_u64);
- int_payload.* = .{ .int = int };
-
return self.constInst(scope, src, .{
.ty = ty,
- .val = Value.initPayload(&int_payload.base),
+ .val = try Value.Tag.int_u64.create(scope.arena(), int),
});
}
pub fn constIntSigned(self: *Module, scope: *Scope, src: usize, ty: Type, int: i64) !*Inst {
- const int_payload = try scope.arena().create(Value.Payload.Int_i64);
- int_payload.* = .{ .int = int };
-
return self.constInst(scope, src, .{
.ty = ty,
- .val = Value.initPayload(&int_payload.base),
+ .val = try Value.Tag.int_i64.create(scope.arena(), int),
});
}
pub fn constIntBig(self: *Module, scope: *Scope, src: usize, ty: Type, big_int: BigIntConst) !*Inst {
- const val_payload = if (big_int.positive) blk: {
+ if (big_int.positive) {
if (big_int.to(u64)) |x| {
return self.constIntUnsigned(scope, src, ty, x);
} else |err| switch (err) {
error.NegativeIntoUnsigned => unreachable,
error.TargetTooSmall => {}, // handled below
}
- const big_int_payload = try scope.arena().create(Value.Payload.IntBigPositive);
- big_int_payload.* = .{ .limbs = big_int.limbs };
- break :blk &big_int_payload.base;
- } else blk: {
+ return self.constInst(scope, src, .{
+ .ty = ty,
+ .val = try Value.Tag.int_big_positive.create(scope.arena(), big_int.limbs),
+ });
+ } else {
if (big_int.to(i64)) |x| {
return self.constIntSigned(scope, src, ty, x);
} else |err| switch (err) {
error.NegativeIntoUnsigned => unreachable,
error.TargetTooSmall => {}, // handled below
}
- const big_int_payload = try scope.arena().create(Value.Payload.IntBigNegative);
- big_int_payload.* = .{ .limbs = big_int.limbs };
- break :blk &big_int_payload.base;
- };
-
- return self.constInst(scope, src, .{
- .ty = ty,
- .val = Value.initPayload(val_payload),
- });
+ return self.constInst(scope, src, .{
+ .ty = ty,
+ .val = try Value.Tag.int_big_negative.create(scope.arena(), big_int.limbs),
+ });
+ }
}
pub fn createAnonymousDecl(
@@ -2246,8 +2669,8 @@ pub fn createAnonymousDecl(
typed_value: TypedValue,
) !*Decl {
const name_index = self.getNextAnonNameIndex();
- const scope_decl = scope.decl().?;
- const name = try std.fmt.allocPrint(self.gpa, "{}__anon_{}", .{ scope_decl.name, name_index });
+ const scope_decl = scope.ownerDecl().?;
+ const name = try std.fmt.allocPrint(self.gpa, "{s}__anon_{d}", .{ scope_decl.name, name_index });
defer self.gpa.free(name);
const name_hash = scope.namespace().fullyQualifiedNameHash(name);
const src_hash: std.zig.SrcHash = undefined;
@@ -2275,6 +2698,49 @@ pub fn createAnonymousDecl(
return new_decl;
}
+pub fn createContainerDecl(
+ self: *Module,
+ scope: *Scope,
+ base_token: std.zig.ast.TokenIndex,
+ decl_arena: *std.heap.ArenaAllocator,
+ typed_value: TypedValue,
+) !*Decl {
+ const scope_decl = scope.ownerDecl().?;
+ const name = try self.getAnonTypeName(scope, base_token);
+ defer self.gpa.free(name);
+ const name_hash = scope.namespace().fullyQualifiedNameHash(name);
+ const src_hash: std.zig.SrcHash = undefined;
+ const new_decl = try self.createNewDecl(scope, name, scope_decl.src_index, name_hash, src_hash);
+ const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
+
+ decl_arena_state.* = decl_arena.state;
+ new_decl.typed_value = .{
+ .most_recent = .{
+ .typed_value = typed_value,
+ .arena = decl_arena_state,
+ },
+ };
+ new_decl.analysis = .complete;
+ new_decl.generation = self.generation;
+
+ return new_decl;
+}
+
+fn getAnonTypeName(self: *Module, scope: *Scope, base_token: std.zig.ast.TokenIndex) ![]u8 {
+ // TODO add namespaces, generic function signatrues
+ const tree = scope.tree();
+ const token_tags = tree.tokens.items(.tag);
+ const base_name = switch (token_tags[base_token]) {
+ .keyword_struct => "struct",
+ .keyword_enum => "enum",
+ .keyword_union => "union",
+ .keyword_opaque => "opaque",
+ else => unreachable,
+ };
+ const loc = tree.tokenLocation(0, base_token);
+ return std.fmt.allocPrint(self.gpa, "{s}:{d}:{d}", .{ base_name, loc.line, loc.column });
+}
+
fn getNextAnonNameIndex(self: *Module) usize {
return @atomicRmw(usize, &self.next_anon_name_index, .Add, 1, .Monotonic);
}
@@ -2285,15 +2751,20 @@ pub fn lookupDeclName(self: *Module, scope: *Scope, ident_name: []const u8) ?*De
return self.decl_table.get(name_hash);
}
+pub fn analyzeDeclVal(mod: *Module, scope: *Scope, src: usize, decl: *Decl) InnerError!*Inst {
+ const decl_ref = try mod.analyzeDeclRef(scope, src, decl);
+ return mod.analyzeDeref(scope, src, decl_ref, src);
+}
+
pub fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) InnerError!*Inst {
- const scope_decl = scope.decl().?;
+ const scope_decl = scope.ownerDecl().?;
try self.declareDeclDependency(scope_decl, decl);
self.ensureDeclAnalyzed(decl) catch |err| {
if (scope.cast(Scope.Block)) |block| {
if (block.func) |func| {
- func.analysis = .dependency_failure;
+ func.state = .dependency_failure;
} else {
- block.decl.analysis = .dependency_failure;
+ block.owner_decl.analysis = .dependency_failure;
}
} else {
scope_decl.analysis = .dependency_failure;
@@ -2305,26 +2776,20 @@ pub fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) Inn
if (decl_tv.val.tag() == .variable) {
return self.analyzeVarRef(scope, src, decl_tv);
}
- const ty = try self.simplePtrType(scope, src, decl_tv.ty, false, .One);
- const val_payload = try scope.arena().create(Value.Payload.DeclRef);
- val_payload.* = .{ .decl = decl };
-
return self.constInst(scope, src, .{
- .ty = ty,
- .val = Value.initPayload(&val_payload.base),
+ .ty = try self.simplePtrType(scope, src, decl_tv.ty, false, .One),
+ .val = try Value.Tag.decl_ref.create(scope.arena(), decl),
});
}
fn analyzeVarRef(self: *Module, scope: *Scope, src: usize, tv: TypedValue) InnerError!*Inst {
- const variable = tv.val.cast(Value.Payload.Variable).?.variable;
+ const variable = tv.val.castTag(.variable).?.data;
const ty = try self.simplePtrType(scope, src, tv.ty, variable.is_mutable, .One);
if (!variable.is_mutable and !variable.is_extern) {
- const val_payload = try scope.arena().create(Value.Payload.RefVal);
- val_payload.* = .{ .val = variable.init };
return self.constInst(scope, src, .{
.ty = ty,
- .val = Value.initPayload(&val_payload.base),
+ .val = try Value.Tag.ref_val.create(scope.arena(), variable.init),
});
}
@@ -2342,6 +2807,20 @@ fn analyzeVarRef(self: *Module, scope: *Scope, src: usize, tv: TypedValue) Inner
return &inst.base;
}
+pub fn analyzeRef(mod: *Module, scope: *Scope, src: usize, operand: *Inst) InnerError!*Inst {
+ const ptr_type = try mod.simplePtrType(scope, src, operand.ty, false, .One);
+
+ if (operand.value()) |val| {
+ return mod.constInst(scope, src, .{
+ .ty = ptr_type,
+ .val = try Value.Tag.ref_val.create(scope.arena(), val),
+ });
+ }
+
+ const b = try mod.requireRuntimeBlock(scope, src);
+ return mod.addUnOp(b, src, ptr_type, .ref, operand);
+}
+
pub fn analyzeDeref(self: *Module, scope: *Scope, src: usize, ptr: *Inst, ptr_src: usize) InnerError!*Inst {
const elem_ty = switch (ptr.ty.zigTypeTag()) {
.Pointer => ptr.ty.elemType(),
@@ -2360,7 +2839,7 @@ pub fn analyzeDeref(self: *Module, scope: *Scope, src: usize, ptr: *Inst, ptr_sr
pub fn analyzeDeclRefByName(self: *Module, scope: *Scope, src: usize, decl_name: []const u8) InnerError!*Inst {
const decl = self.lookupDeclName(scope, decl_name) orelse
- return self.fail(scope, src, "decl '{}' not found", .{decl_name});
+ return self.fail(scope, src, "decl '{s}' not found", .{decl_name});
return self.analyzeDeclRef(scope, src, decl);
}
@@ -2387,12 +2866,20 @@ pub fn analyzeIsNull(
return self.constBool(scope, src, bool_value);
}
const b = try self.requireRuntimeBlock(scope, src);
- const inst_tag: Inst.Tag = if (invert_logic) .isnonnull else .isnull;
+ const inst_tag: Inst.Tag = if (invert_logic) .is_non_null else .is_null;
return self.addUnOp(b, src, Type.initTag(.bool), inst_tag, operand);
}
pub fn analyzeIsErr(self: *Module, scope: *Scope, src: usize, operand: *Inst) InnerError!*Inst {
- return self.fail(scope, src, "TODO implement analysis of iserr", .{});
+ const ot = operand.ty.zigTypeTag();
+ if (ot != .ErrorSet and ot != .ErrorUnion) return self.constBool(scope, src, false);
+ if (ot == .ErrorSet) return self.constBool(scope, src, true);
+ assert(ot == .ErrorUnion);
+ if (operand.value()) |err_union| {
+ return self.constBool(scope, src, err_union.getError() != null);
+ }
+ const b = try self.requireRuntimeBlock(scope, src);
+ return self.addUnOp(b, src, Type.initTag(.bool), .is_err, operand);
}
pub fn analyzeSlice(self: *Module, scope: *Scope, src: usize, array_ptr: *Inst, start: *Inst, end_opt: ?*Inst, sentinel_opt: ?*Inst) InnerError!*Inst {
@@ -2462,7 +2949,7 @@ pub fn analyzeSlice(self: *Module, scope: *Scope, src: usize, array_ptr: *Inst,
}
pub fn analyzeImport(self: *Module, scope: *Scope, src: usize, target_string: []const u8) !*Scope.File {
- const cur_pkg = scope.getOwnerPkg();
+ const cur_pkg = scope.getFileScope().pkg;
const cur_pkg_dir_path = cur_pkg.root_src_directory.path orelse ".";
const found_pkg = cur_pkg.table.get(target_string);
@@ -2487,22 +2974,21 @@ pub fn analyzeImport(self: *Module, scope: *Scope, src: usize, target_string: []
}
// TODO Scope.Container arena for ty and sub_file_path
- const struct_payload = try self.gpa.create(Type.Payload.EmptyStruct);
- errdefer self.gpa.destroy(struct_payload);
const file_scope = try self.gpa.create(Scope.File);
errdefer self.gpa.destroy(file_scope);
+ const struct_ty = try Type.Tag.empty_struct.create(self.gpa, &file_scope.root_container);
+ errdefer self.gpa.destroy(struct_ty.castTag(.empty_struct).?);
- struct_payload.* = .{ .scope = &file_scope.root_container };
file_scope.* = .{
.sub_file_path = resolved_path,
.source = .{ .unloaded = {} },
- .contents = .{ .not_available = {} },
+ .tree = undefined,
.status = .never_loaded,
.pkg = found_pkg orelse cur_pkg,
.root_container = .{
.file_scope = file_scope,
.decls = .{},
- .ty = Type.initPayload(&struct_payload.base),
+ .ty = struct_ty,
},
};
self.analyzeContainer(&file_scope.root_container) catch |err| switch (err) {
@@ -2523,7 +3009,7 @@ pub fn cmpNumeric(
lhs: *Inst,
rhs: *Inst,
op: std.math.CompareOperator,
-) !*Inst {
+) InnerError!*Inst {
assert(lhs.ty.isNumeric());
assert(rhs.ty.isNumeric());
@@ -2532,7 +3018,7 @@ pub fn cmpNumeric(
if (lhs_ty_tag == .Vector and rhs_ty_tag == .Vector) {
if (lhs.ty.arrayLen() != rhs.ty.arrayLen()) {
- return self.fail(scope, src, "vector length mismatch: {} and {}", .{
+ return self.fail(scope, src, "vector length mismatch: {d} and {d}", .{
lhs.ty.arrayLen(),
rhs.ty.arrayLen(),
});
@@ -2677,7 +3163,7 @@ pub fn cmpNumeric(
const dest_type = if (dest_float_type) |ft| ft else blk: {
const max_bits = std.math.max(lhs_bits, rhs_bits);
const casted_bits = std.math.cast(u16, max_bits) catch |err| switch (err) {
- error.Overflow => return self.fail(scope, src, "{} exceeds maximum integer bit count", .{max_bits}),
+ error.Overflow => return self.fail(scope, src, "{d} exceeds maximum integer bit count", .{max_bits}),
};
break :blk try self.makeIntType(scope, dest_int_is_signed, casted_bits);
};
@@ -2696,18 +3182,63 @@ fn wrapOptional(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*In
return self.addUnOp(b, inst.src, dest_type, .wrap_optional, inst);
}
-fn makeIntType(self: *Module, scope: *Scope, signed: bool, bits: u16) !Type {
- if (signed) {
- const int_payload = try scope.arena().create(Type.Payload.IntSigned);
- int_payload.* = .{ .bits = bits };
- return Type.initPayload(&int_payload.base);
+fn wrapErrorUnion(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst {
+ // TODO deal with inferred error sets
+ const err_union = dest_type.castTag(.error_union).?;
+ if (inst.value()) |val| {
+ const to_wrap = if (inst.ty.zigTypeTag() != .ErrorSet) blk: {
+ _ = try self.coerce(scope, err_union.data.payload, inst);
+ break :blk val;
+ } else switch (err_union.data.error_set.tag()) {
+ .anyerror => val,
+ .error_set_single => blk: {
+ const n = err_union.data.error_set.castTag(.error_set_single).?.data;
+ if (!mem.eql(u8, val.castTag(.@"error").?.data.name, n))
+ return self.fail(scope, inst.src, "expected type '{}', found type '{}'", .{ err_union.data.error_set, inst.ty });
+ break :blk val;
+ },
+ .error_set => blk: {
+ const f = err_union.data.error_set.castTag(.error_set).?.data.typed_value.most_recent.typed_value.val.castTag(.error_set).?.data.fields;
+ if (f.get(val.castTag(.@"error").?.data.name) == null)
+ return self.fail(scope, inst.src, "expected type '{}', found type '{}'", .{ err_union.data.error_set, inst.ty });
+ break :blk val;
+ },
+ else => unreachable,
+ };
+
+ return self.constInst(scope, inst.src, .{
+ .ty = dest_type,
+ // creating a SubValue for the error_union payload
+ .val = try Value.Tag.error_union.create(
+ scope.arena(),
+ to_wrap,
+ ),
+ });
+ }
+
+ const b = try self.requireRuntimeBlock(scope, inst.src);
+
+ // we are coercing from E to E!T
+ if (inst.ty.zigTypeTag() == .ErrorSet) {
+ var coerced = try self.coerce(scope, err_union.data.error_set, inst);
+ return self.addUnOp(b, inst.src, dest_type, .wrap_errunion_err, coerced);
} else {
- const int_payload = try scope.arena().create(Type.Payload.IntUnsigned);
- int_payload.* = .{ .bits = bits };
- return Type.initPayload(&int_payload.base);
+ var coerced = try self.coerce(scope, err_union.data.payload, inst);
+ return self.addUnOp(b, inst.src, dest_type, .wrap_errunion_payload, coerced);
}
}
+fn makeIntType(self: *Module, scope: *Scope, signed: bool, bits: u16) !Type {
+ const int_payload = try scope.arena().create(Type.Payload.Bits);
+ int_payload.* = .{
+ .base = .{
+ .tag = if (signed) .int_signed else .int_unsigned,
+ },
+ .data = bits,
+ };
+ return Type.initPayload(&int_payload.base);
+}
+
pub fn resolvePeerTypes(self: *Module, scope: *Scope, instructions: []*Inst) !Type {
if (instructions.len == 0)
return Type.initTag(.noreturn);
@@ -2763,7 +3294,7 @@ pub fn resolvePeerTypes(self: *Module, scope: *Scope, instructions: []*Inst) !Ty
return chosen.ty;
}
-pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst {
+pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) InnerError!*Inst {
// If the types are the same, we can return the operand.
if (dest_type.eql(inst.ty))
return inst;
@@ -2788,7 +3319,7 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
// T to ?T
if (dest_type.zigTypeTag() == .Optional) {
- var buf: Type.Payload.PointerSimple = undefined;
+ var buf: Type.Payload.ElemType = undefined;
const child_type = dest_type.optionalChild(&buf);
if (child_type.eql(inst.ty)) {
return self.wrapOptional(scope, dest_type, inst);
@@ -2797,16 +3328,52 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
}
}
- // *[N]T to []T
- if (inst.ty.isSinglePointer() and dest_type.isSlice() and
- (!inst.ty.isConstPtr() or dest_type.isConstPtr()))
- {
+ // T to E!T or E to E!T
+ if (dest_type.tag() == .error_union) {
+ return try self.wrapErrorUnion(scope, dest_type, inst);
+ }
+
+ // Coercions where the source is a single pointer to an array.
+ src_array_ptr: {
+ if (!inst.ty.isSinglePointer()) break :src_array_ptr;
const array_type = inst.ty.elemType();
+ if (array_type.zigTypeTag() != .Array) break :src_array_ptr;
+ const array_elem_type = array_type.elemType();
+ if (inst.ty.isConstPtr() and !dest_type.isConstPtr()) break :src_array_ptr;
+ if (inst.ty.isVolatilePtr() and !dest_type.isVolatilePtr()) break :src_array_ptr;
+
const dst_elem_type = dest_type.elemType();
- if (array_type.zigTypeTag() == .Array and
- coerceInMemoryAllowed(dst_elem_type, array_type.elemType()) == .ok)
- {
- return self.coerceArrayPtrToSlice(scope, dest_type, inst);
+ switch (coerceInMemoryAllowed(dst_elem_type, array_elem_type)) {
+ .ok => {},
+ .no_match => break :src_array_ptr,
+ }
+
+ switch (dest_type.ptrSize()) {
+ .Slice => {
+ // *[N]T to []T
+ return self.coerceArrayPtrToSlice(scope, dest_type, inst);
+ },
+ .C => {
+ // *[N]T to [*c]T
+ return self.coerceArrayPtrToMany(scope, dest_type, inst);
+ },
+ .Many => {
+ // *[N]T to [*]T
+ // *[N:s]T to [*:s]T
+ const src_sentinel = array_type.sentinel();
+ const dst_sentinel = dest_type.sentinel();
+ if (src_sentinel == null and dst_sentinel == null)
+ return self.coerceArrayPtrToMany(scope, dest_type, inst);
+
+ if (src_sentinel) |src_s| {
+ if (dst_sentinel) |dst_s| {
+ if (src_s.eql(dst_s)) {
+ return self.coerceArrayPtrToMany(scope, dest_type, inst);
+ }
+ }
+ }
+ },
+ .One => {},
}
}
@@ -2844,7 +3411,7 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
return self.fail(scope, inst.src, "expected {}, found {}", .{ dest_type, inst.ty });
}
-pub fn coerceNum(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !?*Inst {
+pub fn coerceNum(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) InnerError!?*Inst {
const val = inst.value() orelse return null;
const src_zig_tag = inst.ty.zigTypeTag();
const dst_zig_tag = dest_type.zigTypeTag();
@@ -2914,10 +3481,59 @@ fn coerceArrayPtrToSlice(self: *Module, scope: *Scope, dest_type: Type, inst: *I
return self.fail(scope, inst.src, "TODO implement coerceArrayPtrToSlice runtime instruction", .{});
}
-pub fn fail(self: *Module, scope: *Scope, src: usize, comptime format: []const u8, args: anytype) InnerError {
- @setCold(true);
- const err_msg = try Compilation.ErrorMsg.create(self.gpa, src, format, args);
- return self.failWithOwnedErrorMsg(scope, src, err_msg);
+fn coerceArrayPtrToMany(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst {
+ if (inst.value()) |val| {
+ // The comptime Value representation is compatible with both types.
+ return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = val });
+ }
+ return self.fail(scope, inst.src, "TODO implement coerceArrayPtrToMany runtime instruction", .{});
+}
+
+/// We don't return a pointer to the new error note because the pointer
+/// becomes invalid when you add another one.
+pub fn errNote(
+ mod: *Module,
+ scope: *Scope,
+ src: usize,
+ parent: *ErrorMsg,
+ comptime format: []const u8,
+ args: anytype,
+) error{OutOfMemory}!void {
+ const msg = try std.fmt.allocPrint(mod.gpa, format, args);
+ errdefer mod.gpa.free(msg);
+
+ parent.notes = try mod.gpa.realloc(parent.notes, parent.notes.len + 1);
+ parent.notes[parent.notes.len - 1] = .{
+ .src_loc = .{
+ .file_scope = scope.getFileScope(),
+ .byte_offset = src,
+ },
+ .msg = msg,
+ };
+}
+
+pub fn errMsg(
+ mod: *Module,
+ scope: *Scope,
+ src_byte_offset: usize,
+ comptime format: []const u8,
+ args: anytype,
+) error{OutOfMemory}!*ErrorMsg {
+ return ErrorMsg.create(mod.gpa, .{
+ .file_scope = scope.getFileScope(),
+ .byte_offset = src_byte_offset,
+ }, format, args);
+}
+
+pub fn fail(
+ mod: *Module,
+ scope: *Scope,
+ src_byte_offset: usize,
+ comptime format: []const u8,
+ args: anytype,
+) InnerError {
+ const err_msg = try mod.errMsg(scope, src_byte_offset, format, args);
+ return mod.failWithOwnedErrorMsg(scope, err_msg);
}
pub fn failTok(
@@ -2927,45 +3543,48 @@ pub fn failTok(
comptime format: []const u8,
args: anytype,
) InnerError {
- @setCold(true);
- const src = scope.tree().token_locs[token_index].start;
+ const src = scope.tree().tokens.items(.start)[token_index];
return self.fail(scope, src, format, args);
}
pub fn failNode(
self: *Module,
scope: *Scope,
- ast_node: *ast.Node,
+ ast_node: ast.Node.Index,
comptime format: []const u8,
args: anytype,
) InnerError {
- @setCold(true);
- const src = scope.tree().token_locs[ast_node.firstToken()].start;
+ const tree = scope.tree();
+ const src = tree.tokens.items(.start)[tree.firstToken(ast_node)];
return self.fail(scope, src, format, args);
}
-fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Compilation.ErrorMsg) InnerError {
+pub fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, err_msg: *ErrorMsg) InnerError {
+ @setCold(true);
{
errdefer err_msg.destroy(self.gpa);
try self.failed_decls.ensureCapacity(self.gpa, self.failed_decls.items().len + 1);
try self.failed_files.ensureCapacity(self.gpa, self.failed_files.items().len + 1);
}
switch (scope.tag) {
- .decl => {
- const decl = scope.cast(Scope.DeclAnalysis).?.decl;
- decl.analysis = .sema_failure;
- decl.generation = self.generation;
- self.failed_decls.putAssumeCapacityNoClobber(decl, err_msg);
- },
.block => {
const block = scope.cast(Scope.Block).?;
- if (block.func) |func| {
- func.analysis = .sema_failure;
+ if (block.inlining) |inlining| {
+ if (inlining.shared.caller) |func| {
+ func.state = .sema_failure;
+ } else {
+ block.owner_decl.analysis = .sema_failure;
+ block.owner_decl.generation = self.generation;
+ }
} else {
- block.decl.analysis = .sema_failure;
- block.decl.generation = self.generation;
+ if (block.func) |func| {
+ func.state = .sema_failure;
+ } else {
+ block.owner_decl.analysis = .sema_failure;
+ block.owner_decl.generation = self.generation;
+ }
}
- self.failed_decls.putAssumeCapacityNoClobber(block.decl, err_msg);
+ self.failed_decls.putAssumeCapacityNoClobber(block.owner_decl, err_msg);
},
.gen_zir => {
const gen_zir = scope.cast(Scope.GenZIR).?;
@@ -2985,11 +3604,6 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Com
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
- .zir_module => {
- const zir_module = scope.cast(Scope.ZIRModule).?;
- zir_module.status = .loaded_sema_failure;
- self.failed_files.putAssumeCapacityNoClobber(scope, err_msg);
- },
.file => unreachable,
.container => unreachable,
}
@@ -3029,17 +3643,11 @@ pub fn intAdd(allocator: *Allocator, lhs: Value, rhs: Value) !Value {
result_bigint.add(lhs_bigint, rhs_bigint);
const result_limbs = result_bigint.limbs[0..result_bigint.len];
- const val_payload = if (result_bigint.positive) blk: {
- const val_payload = try allocator.create(Value.Payload.IntBigPositive);
- val_payload.* = .{ .limbs = result_limbs };
- break :blk &val_payload.base;
- } else blk: {
- const val_payload = try allocator.create(Value.Payload.IntBigNegative);
- val_payload.* = .{ .limbs = result_limbs };
- break :blk &val_payload.base;
- };
-
- return Value.initPayload(val_payload);
+ if (result_bigint.positive) {
+ return Value.Tag.int_big_positive.create(allocator, result_limbs);
+ } else {
+ return Value.Tag.int_big_negative.create(allocator, result_limbs);
+ }
}
pub fn intSub(allocator: *Allocator, lhs: Value, rhs: Value) !Value {
@@ -3057,95 +3665,98 @@ pub fn intSub(allocator: *Allocator, lhs: Value, rhs: Value) !Value {
result_bigint.sub(lhs_bigint, rhs_bigint);
const result_limbs = result_bigint.limbs[0..result_bigint.len];
- const val_payload = if (result_bigint.positive) blk: {
- const val_payload = try allocator.create(Value.Payload.IntBigPositive);
- val_payload.* = .{ .limbs = result_limbs };
- break :blk &val_payload.base;
- } else blk: {
- const val_payload = try allocator.create(Value.Payload.IntBigNegative);
- val_payload.* = .{ .limbs = result_limbs };
- break :blk &val_payload.base;
- };
-
- return Value.initPayload(val_payload);
+ if (result_bigint.positive) {
+ return Value.Tag.int_big_positive.create(allocator, result_limbs);
+ } else {
+ return Value.Tag.int_big_negative.create(allocator, result_limbs);
+ }
}
-pub fn floatAdd(self: *Module, scope: *Scope, float_type: Type, src: usize, lhs: Value, rhs: Value) !Value {
- var bit_count = switch (float_type.tag()) {
- .comptime_float => 128,
- else => float_type.floatBits(self.getTarget()),
- };
-
- const allocator = scope.arena();
- const val_payload = switch (bit_count) {
- 16 => {
- return self.fail(scope, src, "TODO Implement addition for soft floats", .{});
+pub fn floatAdd(
+ self: *Module,
+ scope: *Scope,
+ float_type: Type,
+ src: usize,
+ lhs: Value,
+ rhs: Value,
+) !Value {
+ const arena = scope.arena();
+ switch (float_type.tag()) {
+ .f16 => {
+ @panic("TODO add __trunctfhf2 to compiler-rt");
+ //const lhs_val = lhs.toFloat(f16);
+ //const rhs_val = rhs.toFloat(f16);
+ //return Value.Tag.float_16.create(arena, lhs_val + rhs_val);
},
- 32 => blk: {
+ .f32 => {
const lhs_val = lhs.toFloat(f32);
const rhs_val = rhs.toFloat(f32);
- const val_payload = try allocator.create(Value.Payload.Float_32);
- val_payload.* = .{ .val = lhs_val + rhs_val };
- break :blk &val_payload.base;
+ return Value.Tag.float_32.create(arena, lhs_val + rhs_val);
},
- 64 => blk: {
+ .f64 => {
const lhs_val = lhs.toFloat(f64);
const rhs_val = rhs.toFloat(f64);
- const val_payload = try allocator.create(Value.Payload.Float_64);
- val_payload.* = .{ .val = lhs_val + rhs_val };
- break :blk &val_payload.base;
+ return Value.Tag.float_64.create(arena, lhs_val + rhs_val);
},
- 128 => {
- return self.fail(scope, src, "TODO Implement addition for big floats", .{});
+ .f128, .comptime_float, .c_longdouble => {
+ const lhs_val = lhs.toFloat(f128);
+ const rhs_val = rhs.toFloat(f128);
+ return Value.Tag.float_128.create(arena, lhs_val + rhs_val);
},
else => unreachable,
- };
-
- return Value.initPayload(val_payload);
+ }
}
-pub fn floatSub(self: *Module, scope: *Scope, float_type: Type, src: usize, lhs: Value, rhs: Value) !Value {
- var bit_count = switch (float_type.tag()) {
- .comptime_float => 128,
- else => float_type.floatBits(self.getTarget()),
- };
-
- const allocator = scope.arena();
- const val_payload = switch (bit_count) {
- 16 => {
- return self.fail(scope, src, "TODO Implement substraction for soft floats", .{});
+pub fn floatSub(
+ self: *Module,
+ scope: *Scope,
+ float_type: Type,
+ src: usize,
+ lhs: Value,
+ rhs: Value,
+) !Value {
+ const arena = scope.arena();
+ switch (float_type.tag()) {
+ .f16 => {
+ @panic("TODO add __trunctfhf2 to compiler-rt");
+ //const lhs_val = lhs.toFloat(f16);
+ //const rhs_val = rhs.toFloat(f16);
+ //return Value.Tag.float_16.create(arena, lhs_val - rhs_val);
},
- 32 => blk: {
+ .f32 => {
const lhs_val = lhs.toFloat(f32);
const rhs_val = rhs.toFloat(f32);
- const val_payload = try allocator.create(Value.Payload.Float_32);
- val_payload.* = .{ .val = lhs_val - rhs_val };
- break :blk &val_payload.base;
+ return Value.Tag.float_32.create(arena, lhs_val - rhs_val);
},
- 64 => blk: {
+ .f64 => {
const lhs_val = lhs.toFloat(f64);
const rhs_val = rhs.toFloat(f64);
- const val_payload = try allocator.create(Value.Payload.Float_64);
- val_payload.* = .{ .val = lhs_val - rhs_val };
- break :blk &val_payload.base;
+ return Value.Tag.float_64.create(arena, lhs_val - rhs_val);
},
- 128 => {
- return self.fail(scope, src, "TODO Implement substraction for big floats", .{});
+ .f128, .comptime_float, .c_longdouble => {
+ const lhs_val = lhs.toFloat(f128);
+ const rhs_val = rhs.toFloat(f128);
+ return Value.Tag.float_128.create(arena, lhs_val - rhs_val);
},
else => unreachable,
- };
-
- return Value.initPayload(val_payload);
+ }
}
-pub fn simplePtrType(self: *Module, scope: *Scope, src: usize, elem_ty: Type, mutable: bool, size: std.builtin.TypeInfo.Pointer.Size) Allocator.Error!Type {
+pub fn simplePtrType(
+ self: *Module,
+ scope: *Scope,
+ src: usize,
+ elem_ty: Type,
+ mutable: bool,
+ size: std.builtin.TypeInfo.Pointer.Size,
+) Allocator.Error!Type {
if (!mutable and size == .Slice and elem_ty.eql(Type.initTag(.u8))) {
return Type.initTag(.const_slice_u8);
}
// TODO stage1 type inference bug
const T = Type.Tag;
- const type_payload = try scope.arena().create(Type.Payload.PointerSimple);
+ const type_payload = try scope.arena().create(Type.Payload.ElemType);
type_payload.* = .{
.base = .{
.tag = switch (size) {
@@ -3155,7 +3766,7 @@ pub fn simplePtrType(self: *Module, scope: *Scope, src: usize, elem_ty: Type, mu
.Slice => if (mutable) T.mut_slice else T.const_slice,
},
},
- .pointee_type = elem_ty,
+ .data = elem_ty,
};
return Type.initPayload(&type_payload.base);
}
@@ -3177,8 +3788,7 @@ pub fn ptrType(
assert(host_size == 0 or bit_offset < host_size * 8);
// TODO check if type can be represented by simplePtrType
- const type_payload = try scope.arena().create(Type.Payload.Pointer);
- type_payload.* = .{
+ return Type.Tag.pointer.create(scope.arena(), .{
.pointee_type = elem_ty,
.sentinel = sentinel,
.@"align" = @"align",
@@ -3188,95 +3798,73 @@ pub fn ptrType(
.mutable = mutable,
.@"volatile" = @"volatile",
.size = size,
- };
- return Type.initPayload(&type_payload.base);
+ });
}
pub fn optionalType(self: *Module, scope: *Scope, child_type: Type) Allocator.Error!Type {
- return Type.initPayload(switch (child_type.tag()) {
- .single_const_pointer => blk: {
- const payload = try scope.arena().create(Type.Payload.PointerSimple);
- payload.* = .{
- .base = .{ .tag = .optional_single_const_pointer },
- .pointee_type = child_type.elemType(),
- };
- break :blk &payload.base;
- },
- .single_mut_pointer => blk: {
- const payload = try scope.arena().create(Type.Payload.PointerSimple);
- payload.* = .{
- .base = .{ .tag = .optional_single_mut_pointer },
- .pointee_type = child_type.elemType(),
- };
- break :blk &payload.base;
- },
- else => blk: {
- const payload = try scope.arena().create(Type.Payload.Optional);
- payload.* = .{
- .child_type = child_type,
- };
- break :blk &payload.base;
- },
- });
+ switch (child_type.tag()) {
+ .single_const_pointer => return Type.Tag.optional_single_const_pointer.create(
+ scope.arena(),
+ child_type.elemType(),
+ ),
+ .single_mut_pointer => return Type.Tag.optional_single_mut_pointer.create(
+ scope.arena(),
+ child_type.elemType(),
+ ),
+ else => return Type.Tag.optional.create(scope.arena(), child_type),
+ }
}
-pub fn arrayType(self: *Module, scope: *Scope, len: u64, sentinel: ?Value, elem_type: Type) Allocator.Error!Type {
+pub fn arrayType(
+ self: *Module,
+ scope: *Scope,
+ len: u64,
+ sentinel: ?Value,
+ elem_type: Type,
+) Allocator.Error!Type {
if (elem_type.eql(Type.initTag(.u8))) {
if (sentinel) |some| {
if (some.eql(Value.initTag(.zero))) {
- const payload = try scope.arena().create(Type.Payload.Array_u8_Sentinel0);
- payload.* = .{
- .len = len,
- };
- return Type.initPayload(&payload.base);
+ return Type.Tag.array_u8_sentinel_0.create(scope.arena(), len);
}
} else {
- const payload = try scope.arena().create(Type.Payload.Array_u8);
- payload.* = .{
- .len = len,
- };
- return Type.initPayload(&payload.base);
+ return Type.Tag.array_u8.create(scope.arena(), len);
}
}
if (sentinel) |some| {
- const payload = try scope.arena().create(Type.Payload.ArraySentinel);
- payload.* = .{
+ return Type.Tag.array_sentinel.create(scope.arena(), .{
.len = len,
.sentinel = some,
.elem_type = elem_type,
- };
- return Type.initPayload(&payload.base);
+ });
}
- const payload = try scope.arena().create(Type.Payload.Array);
- payload.* = .{
+ return Type.Tag.array.create(scope.arena(), .{
.len = len,
.elem_type = elem_type,
- };
- return Type.initPayload(&payload.base);
+ });
}
-pub fn errorUnionType(self: *Module, scope: *Scope, error_set: Type, payload: Type) Allocator.Error!Type {
+pub fn errorUnionType(
+ self: *Module,
+ scope: *Scope,
+ error_set: Type,
+ payload: Type,
+) Allocator.Error!Type {
assert(error_set.zigTypeTag() == .ErrorSet);
if (error_set.eql(Type.initTag(.anyerror)) and payload.eql(Type.initTag(.void))) {
return Type.initTag(.anyerror_void_error_union);
}
- const result = try scope.arena().create(Type.Payload.ErrorUnion);
- result.* = .{
+ return Type.Tag.error_union.create(scope.arena(), .{
.error_set = error_set,
.payload = payload,
- };
- return Type.initPayload(&result.base);
+ });
}
pub fn anyframeType(self: *Module, scope: *Scope, return_type: Type) Allocator.Error!Type {
- const result = try scope.arena().create(Type.Payload.AnyFrame);
- result.* = .{
- .return_type = return_type,
- };
- return Type.initPayload(&result.base);
+ return Type.Tag.anyframe_T.create(scope.arena(), return_type);
}
pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void {
@@ -3284,7 +3872,7 @@ pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void {
const source = zir_module.getSource(self) catch @panic("dumpInst failed to get source");
const loc = std.zig.findLineColumn(source, inst.src);
if (inst.tag == .constant) {
- std.debug.print("constant ty={} val={} src={}:{}:{}\n", .{
+ std.debug.print("constant ty={} val={} src={s}:{d}:{d}\n", .{
inst.ty,
inst.castTag(.constant).?.val,
zir_module.subFilePath(),
@@ -3292,7 +3880,7 @@ pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void {
loc.column + 1,
});
} else if (inst.deaths == 0) {
- std.debug.print("{} ty={} src={}:{}:{}\n", .{
+ std.debug.print("{s} ty={} src={s}:{d}:{d}\n", .{
@tagName(inst.tag),
inst.ty,
zir_module.subFilePath(),
@@ -3300,7 +3888,7 @@ pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void {
loc.column + 1,
});
} else {
- std.debug.print("{} ty={} deaths={b} src={}:{}:{}\n", .{
+ std.debug.print("{s} ty={} deaths={b} src={s}:{d}:{d}\n", .{
@tagName(inst.tag),
inst.ty,
inst.deaths,
@@ -3314,6 +3902,7 @@ pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void {
pub const PanicId = enum {
unreach,
unwrap_null,
+ unwrap_errunion,
};
pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic_id: PanicId) !void {
@@ -3330,27 +3919,32 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic
};
const ok_body: ir.Body = .{
- .instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the brvoid.
+ .instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the br_void.
};
- const brvoid = try parent_block.arena.create(Inst.BrVoid);
- brvoid.* = .{
+ const br_void = try parent_block.arena.create(Inst.BrVoid);
+ br_void.* = .{
.base = .{
- .tag = .brvoid,
+ .tag = .br_void,
.ty = Type.initTag(.noreturn),
.src = ok.src,
},
.block = block_inst,
};
- ok_body.instructions[0] = &brvoid.base;
+ ok_body.instructions[0] = &br_void.base;
var fail_block: Scope.Block = .{
.parent = parent_block,
+ .inst_table = parent_block.inst_table,
.func = parent_block.func,
- .decl = parent_block.decl,
+ .owner_decl = parent_block.owner_decl,
+ .src_decl = parent_block.src_decl,
.instructions = .{},
.arena = parent_block.arena,
+ .inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
+ .branch_quota = parent_block.branch_quota,
};
+
defer fail_block.instructions.deinit(mod.gpa);
_ = try mod.safetyPanic(&fail_block, ok.src, panic_id);
@@ -3386,3 +3980,187 @@ pub fn getTarget(self: Module) Target {
pub fn optimizeMode(self: Module) std.builtin.Mode {
return self.comp.bin_file.options.optimize_mode;
}
+
+pub fn validateVarType(mod: *Module, scope: *Scope, src: usize, ty: Type) !void {
+ if (!ty.isValidVarType(false)) {
+ return mod.fail(scope, src, "variable of type '{}' must be const or comptime", .{ty});
+ }
+}
+
+/// Identifier token -> String (allocated in scope.arena())
+pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
+ const tree = scope.tree();
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+ assert(token_tags[token] == .identifier);
+
+ const ident_name = tree.tokenSlice(token);
+ if (mem.startsWith(u8, ident_name, "@")) {
+ const raw_string = ident_name[1..];
+ var bad_index: usize = undefined;
+ return std.zig.parseStringLiteral(scope.arena(), raw_string, &bad_index) catch |err| switch (err) {
+ error.InvalidCharacter => {
+ const bad_byte = raw_string[bad_index];
+ const src = token_starts[token];
+ return mod.fail(scope, src + 1 + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
+ },
+ else => |e| return e,
+ };
+ }
+ return ident_name;
+}
+
+pub fn emitBackwardBranch(mod: *Module, block: *Scope.Block, src: usize) !void {
+ const shared = block.inlining.?.shared;
+ shared.branch_count += 1;
+ if (shared.branch_count > block.branch_quota.*) {
+ // TODO show the "called from here" stack
+ return mod.fail(&block.base, src, "evaluation exceeded {d} backwards branches", .{
+ block.branch_quota.*,
+ });
+ }
+}
+
+pub fn namedFieldPtr(
+ mod: *Module,
+ scope: *Scope,
+ src: usize,
+ object_ptr: *Inst,
+ field_name: []const u8,
+ field_name_src: usize,
+) InnerError!*Inst {
+ const elem_ty = switch (object_ptr.ty.zigTypeTag()) {
+ .Pointer => object_ptr.ty.elemType(),
+ else => return mod.fail(scope, object_ptr.src, "expected pointer, found '{}'", .{object_ptr.ty}),
+ };
+ switch (elem_ty.zigTypeTag()) {
+ .Array => {
+ if (mem.eql(u8, field_name, "len")) {
+ return mod.constInst(scope, src, .{
+ .ty = Type.initTag(.single_const_pointer_to_comptime_int),
+ .val = try Value.Tag.ref_val.create(
+ scope.arena(),
+ try Value.Tag.int_u64.create(scope.arena(), elem_ty.arrayLen()),
+ ),
+ });
+ } else {
+ return mod.fail(
+ scope,
+ field_name_src,
+ "no member named '{s}' in '{}'",
+ .{ field_name, elem_ty },
+ );
+ }
+ },
+ .Pointer => {
+ const ptr_child = elem_ty.elemType();
+ switch (ptr_child.zigTypeTag()) {
+ .Array => {
+ if (mem.eql(u8, field_name, "len")) {
+ return mod.constInst(scope, src, .{
+ .ty = Type.initTag(.single_const_pointer_to_comptime_int),
+ .val = try Value.Tag.ref_val.create(
+ scope.arena(),
+ try Value.Tag.int_u64.create(scope.arena(), ptr_child.arrayLen()),
+ ),
+ });
+ } else {
+ return mod.fail(
+ scope,
+ field_name_src,
+ "no member named '{s}' in '{}'",
+ .{ field_name, elem_ty },
+ );
+ }
+ },
+ else => {},
+ }
+ },
+ .Type => {
+ _ = try mod.resolveConstValue(scope, object_ptr);
+ const result = try mod.analyzeDeref(scope, src, object_ptr, object_ptr.src);
+ const val = result.value().?;
+ const child_type = try val.toType(scope.arena());
+ switch (child_type.zigTypeTag()) {
+ .ErrorSet => {
+ // TODO resolve inferred error sets
+ const entry = if (val.castTag(.error_set)) |payload|
+ (payload.data.fields.getEntry(field_name) orelse
+ return mod.fail(scope, src, "no error named '{s}' in '{}'", .{ field_name, child_type })).*
+ else
+ try mod.getErrorValue(field_name);
+
+ const result_type = if (child_type.tag() == .anyerror)
+ try Type.Tag.error_set_single.create(scope.arena(), entry.key)
+ else
+ child_type;
+
+ return mod.constInst(scope, src, .{
+ .ty = try mod.simplePtrType(scope, src, result_type, false, .One),
+ .val = try Value.Tag.ref_val.create(
+ scope.arena(),
+ try Value.Tag.@"error".create(scope.arena(), .{
+ .name = entry.key,
+ .value = entry.value,
+ }),
+ ),
+ });
+ },
+ .Struct => {
+ const container_scope = child_type.getContainerScope();
+ if (mod.lookupDeclName(&container_scope.base, field_name)) |decl| {
+ // TODO if !decl.is_pub and inDifferentFiles() "{} is private"
+ return mod.analyzeDeclRef(scope, src, decl);
+ }
+
+ if (container_scope.file_scope == mod.root_scope) {
+ return mod.fail(scope, src, "root source file has no member called '{s}'", .{field_name});
+ } else {
+ return mod.fail(scope, src, "container '{}' has no member called '{s}'", .{ child_type, field_name });
+ }
+ },
+ else => return mod.fail(scope, src, "type '{}' does not support field access", .{child_type}),
+ }
+ },
+ else => {},
+ }
+ return mod.fail(scope, src, "type '{}' does not support field access", .{elem_ty});
+}
+
+pub fn elemPtr(
+ mod: *Module,
+ scope: *Scope,
+ src: usize,
+ array_ptr: *Inst,
+ elem_index: *Inst,
+) InnerError!*Inst {
+ const elem_ty = switch (array_ptr.ty.zigTypeTag()) {
+ .Pointer => array_ptr.ty.elemType(),
+ else => return mod.fail(scope, array_ptr.src, "expected pointer, found '{}'", .{array_ptr.ty}),
+ };
+ if (!elem_ty.isIndexable()) {
+ return mod.fail(scope, src, "array access of non-array type '{}'", .{elem_ty});
+ }
+
+ if (elem_ty.isSinglePointer() and elem_ty.elemType().zigTypeTag() == .Array) {
+ // we have to deref the ptr operand to get the actual array pointer
+ const array_ptr_deref = try mod.analyzeDeref(scope, src, array_ptr, array_ptr.src);
+ if (array_ptr_deref.value()) |array_ptr_val| {
+ if (elem_index.value()) |index_val| {
+ // Both array pointer and index are compile-time known.
+ const index_u64 = index_val.toUnsignedInt();
+ // @intCast here because it would have been impossible to construct a value that
+ // required a larger index.
+ const elem_ptr = try array_ptr_val.elemPtr(scope.arena(), @intCast(usize, index_u64));
+ const pointee_type = elem_ty.elemType().elemType();
+
+ return mod.constInst(scope, src, .{
+ .ty = try Type.Tag.single_const_pointer.create(scope.arena(), pointee_type),
+ .val = elem_ptr,
+ });
+ }
+ }
+ }
+
+ return mod.fail(scope, src, "TODO implement more analyze elemptr", .{});
+}