aboutsummaryrefslogtreecommitdiff
path: root/src/Zcu.zig
diff options
context:
space:
mode:
authormlugg <mlugg@mlugg.co.uk>2025-05-04 17:02:25 +0100
committermlugg <mlugg@mlugg.co.uk>2025-05-18 17:37:02 +0100
commit37a9a4e0f16c1df8de3a4add3a9566b24f024a95 (patch)
tree0fc8f1fc15193e8e3c3ccd5e97408cef7372f394 /src/Zcu.zig
parentd32829e053af2a0f382d4d692ede85c176c9f803 (diff)
downloadzig-37a9a4e0f16c1df8de3a4add3a9566b24f024a95.tar.gz
zig-37a9a4e0f16c1df8de3a4add3a9566b24f024a95.zip
compiler: refactor `Zcu.File` and path representation
This commit makes some big changes to how we track state for Zig source files. In particular, it changes: * How `File` tracks its path on-disk * How AstGen discovers files * How file-level errors are tracked * How `builtin.zig` files and modules are created The original motivation here was to address incremental compilation bugs with the handling of files, such as #22696. To fix this, a few changes are necessary. Just like declarations may become unreferenced on an incremental update, meaning we suppress analysis errors associated with them, it is also possible for all imports of a file to be removed on an incremental update, in which case file-level errors for that file should be suppressed. As such, after AstGen, the compiler must traverse files (starting from analysis roots) and discover the set of "live files" for this update. Additionally, the compiler's previous handling of retryable file errors was not very good; the source location the error was reported as was based only on the first discovered import of that file. This source location also disappeared on future incremental updates. So, as a part of the file traversal above, we also need to figure out the source locations of imports which errors should be reported against. Another observation I made is that the "file exists in multiple modules" error was not implemented in a particularly good way (I get to say that because I wrote it!). It was subject to races, where the order in which different imports of a file were discovered affects both how errors are printed, and which module the file is arbitrarily assigned, with the latter in turn affecting which other files are considered for import. The thing I realised here is that while the AstGen worker pool is running, we cannot know for sure which module(s) a file is in; we could always discover an import later which changes the answer. So, here's how the AstGen workers have changed. We initially ensure that `zcu.import_table` contains the root files for all modules in this Zcu, even if we don't know any imports for them yet. Then, the AstGen workers do not need to be aware of modules. Instead, they simply ignore module imports, and only spin off more workers when they see a by-path import. During AstGen, we can't use module-root-relative paths, since we don't know which modules files are in; but we don't want to unnecessarily use absolute files either, because those are non-portable and can make `error.NameTooLong` more likely. As such, I have introduced a new abstraction, `Compilation.Path`. This type is a way of representing a filesystem path which has a *canonical form*. The path is represented relative to one of a few special directories: the lib directory, the global cache directory, or the local cache directory. As a fallback, we use absolute (or cwd-relative on WASI) paths. This is kind of similar to `std.Build.Cache.Path` with a pre-defined list of possible `std.Build.Cache.Directory`, but has stricter canonicalization rules based on path resolution to make sure deduplicating files works properly. A `Compilation.Path` can be trivially converted to a `std.Build.Cache.Path` from a `Compilation`, but is smaller, has a canonical form, and has a digest which will be consistent across different compiler processes with the same lib and cache directories (important when we serialize incremental compilation state in the future). `Zcu.File` and `Zcu.EmbedFile` both contain a `Compilation.Path`, which is used to access the file on-disk; module-relative sub paths are used quite rarely (`EmbedFile` doesn't even have one now for simplicity). After the AstGen workers all complete, we know that any file which might be imported is definitely in `import_table` and up-to-date. So, we perform a single-threaded graph traversal; similar to what `resolveReferences` plays for `AnalUnit`s, but for files instead. We figure out which files are alive, and which module each file is in. If a file turns out to be in multiple modules, we set a field on `Zcu` to indicate this error. If a file is in a different module to a prior update, we set a flag instructing `updateZirRefs` to invalidate all dependencies on the file. This traversal also discovers "import errors"; these are errors associated with a specific `@import`. With Zig's current design, there is only one possible error here: "import outside of module root". This must be identified during this traversal instead of during AstGen, because it depends on which module the file is in. I tried also representing "module not found" errors in this same way, but it turns out to be much more useful to report those in Sema, because of use cases like optional dependencies where a module import is behind a comptime-known build option. For simplicity, `failed_files` now just maps to `?[]u8`, since the source location is always the whole file. In fact, this allows removing `LazySrcLoc.Offset.entire_file` completely, slightly simplifying some error reporting logic. File-level errors are now directly built in the `std.zig.ErrorBundle.Wip`. If the payload is not `null`, it is the message for a retryable error (i.e. an error loading the source file), and will be reported with a "file imported here" note pointing to the import site discovered during the single-threaded file traversal. The last piece of fallout here is how `Builtin` works. Rather than constructing "builtin" modules when creating `Package.Module`s, they are now constructed on-the-fly by `Zcu`. The map `Zcu.builtin_modules` maps from digests to `*Package.Module`s. These digests are abstract hashes of the `Builtin` value; i.e. all of the options which are placed into "builtin.zig". During the file traversal, we populate `builtin_modules` as needed, so that when we see this imports in Sema, we just grab the relevant entry from this map. This eliminates a bunch of awkward state tracking during construction of the module graph. It's also now clearer exactly what options the builtin module has, since previously it inherited some options arbitrarily from the first-created module with that "builtin" module! The user-visible effects of this commit are: * retryable file errors are now consistently reported against the whole file, with a note pointing to a live import of that file * some theoretical bugs where imports are wrongly considered distinct (when the import path moves out of the cwd and then back in) are fixed * some consistency issues with how file-level errors are reported are fixed; these errors will now always be printed in the same order regardless of how the AstGen pass assigns file indices * incremental updates do not print retryable file errors differently between updates or depending on file structure/contents * incremental updates support files changing modules * incremental updates support files becoming unreferenced Resolves: #22696
Diffstat (limited to 'src/Zcu.zig')
-rw-r--r--src/Zcu.zig703
1 files changed, 441 insertions, 262 deletions
diff --git a/src/Zcu.zig b/src/Zcu.zig
index 584eb8ca3d..59adfbe025 100644
--- a/src/Zcu.zig
+++ b/src/Zcu.zig
@@ -72,9 +72,9 @@ sema_prog_node: std.Progress.Node = std.Progress.Node.none,
codegen_prog_node: std.Progress.Node = std.Progress.Node.none,
/// Used by AstGen worker to load and store ZIR cache.
-global_zir_cache: Compilation.Directory,
+global_zir_cache: Cache.Directory,
/// Used by AstGen worker to load and store ZIR cache.
-local_zir_cache: Compilation.Directory,
+local_zir_cache: Cache.Directory,
/// This is where all `Export` values are stored. Not all values here are necessarily valid exports;
/// to enumerate all exports, `single_exports` and `multi_exports` must be consulted.
@@ -93,27 +93,72 @@ multi_exports: std.AutoArrayHashMapUnmanaged(AnalUnit, extern struct {
len: u32,
}) = .{},
+/// Key is the digest returned by `Builtin.hash`; value is the corresponding module.
+builtin_modules: std.AutoArrayHashMapUnmanaged(Cache.BinDigest, *Package.Module) = .empty,
+
+/// Populated as soon as the `Compilation` is created. Guaranteed to contain all modules, even builtin ones.
+/// Modules whose root file is not a Zig or ZON file have the value `.none`.
+module_roots: std.AutoArrayHashMapUnmanaged(*Package.Module, File.Index.Optional) = .empty,
+
/// The set of all the Zig source files in the Zig Compilation Unit. Tracked in
/// order to iterate over it and check which source files have been modified on
/// the file system when an update is requested, as well as to cache `@import`
/// results.
///
-/// Keys are fully resolved file paths. This table owns the keys and values.
+/// Always accessed through `ImportTableAdapter`, where keys are fully resolved
+/// file paths in order to ensure files are properly deduplicated. This table owns
+/// the keys and values.
///
/// Protected by Compilation's mutex.
///
/// Not serialized. This state is reconstructed during the first call to
/// `Compilation.update` of the process for a given `Compilation`.
-///
-/// Indexes correspond 1:1 to `files`.
-import_table: std.StringArrayHashMapUnmanaged(File.Index) = .empty,
+import_table: std.ArrayHashMapUnmanaged(
+ File.Index,
+ void,
+ struct {
+ pub const hash = @compileError("all accesses should be through ImportTableAdapter");
+ pub const eql = @compileError("all accesses should be through ImportTableAdapter");
+ },
+ true, // This is necessary! Without it, the map tries to use its Context to rehash. #21918
+) = .empty,
+
+/// The set of all files in `import_table` which are "alive" this update, meaning
+/// they are reachable by traversing imports starting from an analysis root. This
+/// is usually all files in `import_table`, but some could be omitted if an incremental
+/// update removes an import, or if a module specified on the CLI is never imported.
+/// Reconstructed on every update, after AstGen and before Sema.
+/// Value is why the file is alive.
+alive_files: std.AutoArrayHashMapUnmanaged(File.Index, File.Reference) = .empty,
+
+/// If this is populated, a "file exists in multiple modules" error should be emitted.
+/// This causes file errors to not be shown, because we don't really know which files
+/// should be alive (because the user has messed up their imports somewhere!).
+/// Cleared and recomputed every update, after AstGen and before Sema.
+multi_module_err: ?struct {
+ file: File.Index,
+ modules: [2]*Package.Module,
+ refs: [2]File.Reference,
+} = null,
/// The set of all the files which have been loaded with `@embedFile` in the Module.
/// We keep track of this in order to iterate over it and check which files have been
/// modified on the file system when an update is requested, as well as to cache
/// `@embedFile` results.
-/// Keys are fully resolved file paths. This table owns the keys and values.
-embed_table: std.StringArrayHashMapUnmanaged(*EmbedFile) = .empty,
+///
+/// Like `import_table`, this is accessed through `EmbedTableAdapter`, so that it is keyed
+/// on the `Compilation.Path` of the `EmbedFile`.
+///
+/// This table owns all of the `*EmbedFile` memory, which is allocated into gpa.
+embed_table: std.ArrayHashMapUnmanaged(
+ *EmbedFile,
+ void,
+ struct {
+ pub const hash = @compileError("all accesses should be through EmbedTableAdapter");
+ pub const eql = @compileError("all accesses should be through EmbedTableAdapter");
+ },
+ true, // This is necessary! Without it, the map tries to use its Context to rehash. #21918
+) = .empty,
/// Stores all Type and Value objects.
/// The idea is that this will be periodically garbage-collected, but such logic
@@ -147,9 +192,41 @@ compile_logs: std.AutoArrayHashMapUnmanaged(AnalUnit, extern struct {
}) = .empty,
compile_log_lines: std.ArrayListUnmanaged(CompileLogLine) = .empty,
free_compile_log_lines: std.ArrayListUnmanaged(CompileLogLine.Index) = .empty,
-/// Using a map here for consistency with the other fields here.
-/// The ErrorMsg memory is owned by the `File`, using Module's general purpose allocator.
-failed_files: std.AutoArrayHashMapUnmanaged(*File, ?*ErrorMsg) = .empty,
+/// This tracks files which triggered errors when generating AST/ZIR/ZOIR.
+/// If not `null`, the value is a retryable error (the file status is guaranteed
+/// to be `.retryable_failure`). Otherwise, the file status is `.astgen_failure`
+/// or `.success`, and there are ZIR/ZOIR errors which should be printed.
+/// We just store a `[]u8` instead of a full `*ErrorMsg`, because the source
+/// location is always the entire file. The `[]u8` memory is owned by the map
+/// and allocated into `gpa`.
+failed_files: std.AutoArrayHashMapUnmanaged(File.Index, ?[]u8) = .empty,
+/// AstGen is not aware of modules, and so cannot determine whether an import
+/// string makes sense. That is the job of a traversal after AstGen.
+///
+/// There are several ways in which an import can fail:
+///
+/// * It is an import of a file which does not exist. This case is not handled
+/// by this field, but with a `failed_files` entry on the *imported* file.
+/// * It is an import of a module which does not exist in the current module's
+/// dependency table. This happens at `Sema` time, so is not tracked by this
+/// field.
+/// * It is an import which reaches outside of the current module's root
+/// directory. This is tracked by this field.
+/// * It is an import which reaches into an "illegal import directory". Right now,
+/// the only such directory is 'global_cache/b/', but in general, these are
+/// directories the compiler treats specially. This is tracked by this field.
+///
+/// This is a flat array containing all of the relevant errors. It is cleared and
+/// recomputed on every update. The errors here are fatal, i.e. they block any
+/// semantic analysis this update.
+///
+/// Allocated into gpa.
+failed_imports: std.ArrayListUnmanaged(struct {
+ file_index: File.Index,
+ import_string: Zir.NullTerminatedString,
+ import_token: Ast.TokenIndex,
+ kind: enum { file_outside_module_root, illegal_zig_import },
+}) = .empty,
failed_exports: std.AutoArrayHashMapUnmanaged(Export.Index, *ErrorMsg) = .empty,
/// If analysis failed due to a cimport error, the corresponding Clang errors
/// are stored here.
@@ -235,6 +312,32 @@ generation: u32 = 0,
pub const PerThread = @import("Zcu/PerThread.zig");
+pub const ImportTableAdapter = struct {
+ zcu: *const Zcu,
+ pub fn hash(ctx: ImportTableAdapter, path: Compilation.Path) u32 {
+ _ = ctx;
+ return @truncate(std.hash.Wyhash.hash(@intFromEnum(path.root), path.sub_path));
+ }
+ pub fn eql(ctx: ImportTableAdapter, a_path: Compilation.Path, b_file: File.Index, b_index: usize) bool {
+ _ = b_index;
+ const b_path = ctx.zcu.fileByIndex(b_file).path;
+ return a_path.root == b_path.root and mem.eql(u8, a_path.sub_path, b_path.sub_path);
+ }
+};
+
+pub const EmbedTableAdapter = struct {
+ pub fn hash(ctx: EmbedTableAdapter, path: Compilation.Path) u32 {
+ _ = ctx;
+ return @truncate(std.hash.Wyhash.hash(@intFromEnum(path.root), path.sub_path));
+ }
+ pub fn eql(ctx: EmbedTableAdapter, a_path: Compilation.Path, b_file: *EmbedFile, b_index: usize) bool {
+ _ = ctx;
+ _ = b_index;
+ const b_path = b_file.path;
+ return a_path.root == b_path.root and mem.eql(u8, a_path.sub_path, b_path.sub_path);
+ }
+};
+
/// Names of declarations in `std.builtin` whose values are memoized in a `BuiltinDecl.Memoized`.
/// The name must exactly match the declaration name, as comptime logic is used to compute the namespace accesses.
/// Parent namespaces must be before their children in this enum. For instance, `.Type` must be before `.@"Type.Fn"`.
@@ -732,41 +835,61 @@ pub const Namespace = struct {
};
pub const File = struct {
- /// Relative to the owning package's root source directory.
- /// Memory is stored in gpa, owned by File.
- sub_file_path: []const u8,
-
status: enum {
/// We have not yet attempted to load this file.
/// `stat` is not populated and may be `undefined`.
never_loaded,
/// A filesystem access failed. It should be retried on the next update.
- /// There is a `failed_files` entry containing a non-`null` message.
+ /// There is guaranteed to be a `failed_files` entry with at least one message.
+ /// ZIR/ZOIR errors should not be emitted as `zir`/`zoir` is not up-to-date.
/// `stat` is not populated and may be `undefined`.
retryable_failure,
- /// Parsing/AstGen/ZonGen of this file has failed.
- /// There is an error in `zir` or `zoir`.
- /// There is a `failed_files` entry (with a `null` message).
+ /// This file has failed parsing, AstGen, or ZonGen.
+ /// There is guaranteed to be a `failed_files` entry, which may or may not have messages.
+ /// ZIR/ZOIR errors *should* be emitted as `zir`/`zoir` is up-to-date.
/// `stat` is populated.
astgen_failure,
/// Parsing and AstGen/ZonGen of this file has succeeded.
+ /// There may still be a `failed_files` entry, e.g. for non-fatal AstGen errors.
/// `stat` is populated.
success,
},
/// Whether this is populated depends on `status`.
stat: Cache.File.Stat,
+ /// Whether this file is the generated file of a "builtin" module. This matters because those
+ /// files are generated and stored in-nemory rather than being read off-disk. The rest of the
+ /// pipeline generally shouldn't care about this.
+ is_builtin: bool,
+
+ /// The path of this file. It is important that this path has a "canonical form" because files
+ /// are deduplicated based on path; `Compilation.Path` guarantees this. Owned by this `File`,
+ /// allocated into `gpa`.
+ path: Compilation.Path,
+
source: ?[:0]const u8,
tree: ?Ast,
zir: ?Zir,
zoir: ?Zoir,
/// Module that this file is a part of, managed externally.
- mod: *Package.Module,
- /// Whether this file is a part of multiple packages. This is an error condition which will be reported after AstGen.
- multi_pkg: bool = false,
- /// List of references to this file, used for multi-package errors.
- references: std.ArrayListUnmanaged(File.Reference) = .empty,
+ /// This is initially `null`. After AstGen, a pass is run to determine which module each
+ /// file belongs to, at which point this field is set. It is never set to `null` again;
+ /// this is so that if the file starts belonging to a different module instead, we can
+ /// tell, and invalidate dependencies as needed (see `module_changed`).
+ /// During semantic analysis, this is always non-`null` for alive files (i.e. those which
+ /// have imports targeting them).
+ mod: ?*Package.Module,
+ /// Relative to the root directory of `mod`. If `mod == null`, this field is `undefined`.
+ /// This memory is managed externally and must not be directly freed.
+ /// Its lifetime is at least equal to that of this `File`.
+ sub_file_path: []const u8,
+
+ /// If this file's module identity changes on an incremental update, this flag is set to signal
+ /// to `Zcu.updateZirRefs` that all references to this file must be invalidated. This matters
+ /// because changing your module changes things like your optimization mode and codegen flags,
+ /// so everything needs to be re-done. `updateZirRefs` is responsible for resetting this flag.
+ module_changed: bool,
/// The ZIR for this file from the last update with no file failures. As such, this ZIR is never
/// failed (although it may have compile errors).
@@ -777,7 +900,7 @@ pub const File = struct {
///
/// In other words, if `TrackedInst`s are tied to ZIR other than what's in the `zir` field, this
/// field is populated with that old ZIR.
- prev_zir: ?*Zir = null,
+ prev_zir: ?*Zir,
/// This field serves a similar purpose to `prev_zir`, but for ZOIR. However, since we do not
/// need to map old ZOIR to new ZOIR -- instead only invalidating dependencies if the ZOIR
@@ -785,27 +908,42 @@ pub const File = struct {
///
/// When `zoir` is updated, this field is set to `true`. In `updateZirRefs`, if this is `true`,
/// we invalidate the corresponding `zon_file` dependency, and reset it to `false`.
- zoir_invalidated: bool = false,
+ zoir_invalidated: bool,
+
+ pub const Path = struct {
+ root: enum {
+ cwd,
+ fs_root,
+ local_cache,
+ global_cache,
+ lib_dir,
+ },
+ };
/// A single reference to a file.
pub const Reference = union(enum) {
- /// The file is imported directly (i.e. not as a package) with @import.
+ analysis_root: *Package.Module,
import: struct {
- file: File.Index,
- token: Ast.TokenIndex,
+ importer: Zcu.File.Index,
+ tok: Ast.TokenIndex,
+ /// If the file is imported as the root of a module, this is that module.
+ /// `null` means the file was imported directly by path.
+ module: ?*Package.Module,
},
- /// The file is the root of a module.
- root: *Package.Module,
};
pub fn getMode(self: File) Ast.Mode {
- if (std.mem.endsWith(u8, self.sub_file_path, ".zon")) {
+ // We never create a `File` whose path doesn't give a mode.
+ return modeFromPath(self.path.sub_path).?;
+ }
+
+ pub fn modeFromPath(path: []const u8) ?Ast.Mode {
+ if (std.mem.endsWith(u8, path, ".zon")) {
return .zon;
- } else if (std.mem.endsWith(u8, self.sub_file_path, ".zig")) {
+ } else if (std.mem.endsWith(u8, path, ".zig")) {
return .zig;
} else {
- // `Module.importFile` rejects all other extensions
- unreachable;
+ return null;
}
}
@@ -842,15 +980,18 @@ pub const File = struct {
stat: Cache.File.Stat,
};
- pub fn getSource(file: *File, gpa: Allocator) !Source {
+ pub fn getSource(file: *File, zcu: *const Zcu) !Source {
+ const gpa = zcu.gpa;
+
if (file.source) |source| return .{
.bytes = source,
.stat = file.stat,
};
- // Keep track of inode, file size, mtime, hash so we can detect which files
- // have been modified when an incremental update is requested.
- var f = try file.mod.root.openFile(file.sub_file_path, .{});
+ var f = f: {
+ const dir, const sub_path = file.path.openInfo(zcu.comp.dirs);
+ break :f try dir.openFile(sub_path, .{});
+ };
defer f.close();
const stat = try f.stat();
@@ -882,28 +1023,14 @@ pub const File = struct {
};
}
- pub fn getTree(file: *File, gpa: Allocator) !*const Ast {
+ pub fn getTree(file: *File, zcu: *const Zcu) !*const Ast {
if (file.tree) |*tree| return tree;
- const source = try file.getSource(gpa);
- file.tree = try .parse(gpa, source.bytes, file.getMode());
+ const source = try file.getSource(zcu);
+ file.tree = try .parse(zcu.gpa, source.bytes, file.getMode());
return &file.tree.?;
}
- pub fn getZoir(file: *File, zcu: *Zcu) !*const Zoir {
- if (file.zoir) |*zoir| return zoir;
-
- const tree = file.tree.?;
- assert(tree.mode == .zon);
-
- file.zoir = try ZonGen.generate(zcu.gpa, tree, .{});
- if (file.zoir.?.hasCompileErrors()) {
- try zcu.failed_files.putNoClobber(zcu.gpa, file, null);
- return error.AnalysisFail;
- }
- return &file.zoir.?;
- }
-
pub fn fullyQualifiedNameLen(file: File) usize {
const ext = std.fs.path.extension(file.sub_file_path);
return file.sub_file_path.len - ext.len;
@@ -937,85 +1064,49 @@ pub const File = struct {
return ip.getOrPutTrailingString(gpa, pt.tid, @intCast(slice[0].len), .no_embedded_nulls);
}
- pub fn fullPath(file: File, ally: Allocator) ![]u8 {
- return file.mod.root.joinString(ally, file.sub_file_path);
- }
-
- pub fn dumpSrc(file: *File, src: LazySrcLoc) void {
- const loc = std.zig.findLineColumn(file.source.bytes, src);
- std.debug.print("{s}:{d}:{d}\n", .{ file.sub_file_path, loc.line + 1, loc.column + 1 });
- }
-
- /// Add a reference to this file during AstGen.
- pub fn addReference(file: *File, zcu: *Zcu, ref: File.Reference) !void {
- // Don't add the same module root twice. Note that since we always add module roots at the
- // front of the references array (see below), this loop is actually O(1) on valid code.
- if (ref == .root) {
- for (file.references.items) |other| {
- switch (other) {
- .root => |r| if (ref.root == r) return,
- else => break, // reached the end of the "is-root" references
- }
- }
- }
-
- switch (ref) {
- // We put root references at the front of the list both to make the above loop fast and
- // to make multi-module errors more helpful (since "root-of" notes are generally more
- // informative than "imported-from" notes). This path is hit very rarely, so the speed
- // of the insert operation doesn't matter too much.
- .root => try file.references.insert(zcu.gpa, 0, ref),
-
- // Other references we'll just put at the end.
- else => try file.references.append(zcu.gpa, ref),
- }
+ pub const Index = InternPool.FileIndex;
- const mod = switch (ref) {
- .import => |import| zcu.fileByIndex(import.file).mod,
- .root => |mod| mod,
- };
- if (mod != file.mod) file.multi_pkg = true;
+ pub fn errorBundleWholeFileSrc(
+ file: *File,
+ zcu: *const Zcu,
+ eb: *std.zig.ErrorBundle.Wip,
+ ) !std.zig.ErrorBundle.SourceLocationIndex {
+ return eb.addSourceLocation(.{
+ .src_path = try eb.printString("{}", .{file.path.fmt(zcu.comp)}),
+ .span_start = 0,
+ .span_main = 0,
+ .span_end = 0,
+ .line = 0,
+ .column = 0,
+ .source_line = 0,
+ });
}
-
- /// Mark this file and every file referenced by it as multi_pkg and report an
- /// astgen_failure error for them. AstGen must have completed in its entirety.
- pub fn recursiveMarkMultiPkg(file: *File, pt: Zcu.PerThread) void {
- file.multi_pkg = true;
- file.status = .astgen_failure;
-
- // We can only mark children as failed if the ZIR is loaded, which may not
- // be the case if there were other astgen failures in this file
- if (file.zir == null) return;
-
- const imports_index = file.zir.?.extra[@intFromEnum(Zir.ExtraIndex.imports)];
- if (imports_index == 0) return;
- const extra = file.zir.?.extraData(Zir.Inst.Imports, imports_index);
-
- var extra_index = extra.end;
- for (0..extra.data.imports_len) |_| {
- const item = file.zir.?.extraData(Zir.Inst.Imports.Item, extra_index);
- extra_index = item.end;
-
- const import_path = file.zir.?.nullTerminatedString(item.data.name);
- if (mem.eql(u8, import_path, "builtin")) continue;
-
- const res = pt.importFile(file, import_path) catch continue;
- if (!res.is_pkg and !res.file.multi_pkg) {
- res.file.recursiveMarkMultiPkg(pt);
- }
- }
+ pub fn errorBundleTokenSrc(
+ file: *File,
+ tok: Ast.TokenIndex,
+ zcu: *const Zcu,
+ eb: *std.zig.ErrorBundle.Wip,
+ ) !std.zig.ErrorBundle.SourceLocationIndex {
+ const source = try file.getSource(zcu);
+ const tree = try file.getTree(zcu);
+ const start = tree.tokenStart(tok);
+ const end = start + tree.tokenSlice(tok).len;
+ const loc = std.zig.findLineColumn(source.bytes, start);
+ return eb.addSourceLocation(.{
+ .src_path = try eb.printString("{}", .{file.path.fmt(zcu.comp)}),
+ .span_start = start,
+ .span_main = start,
+ .span_end = @intCast(end),
+ .line = @intCast(loc.line),
+ .column = @intCast(loc.column),
+ .source_line = try eb.addString(loc.source_line),
+ });
}
-
- pub const Index = InternPool.FileIndex;
};
/// Represents the contents of a file loaded with `@embedFile`.
pub const EmbedFile = struct {
- /// Module that this file is a part of, managed externally.
- owner: *Package.Module,
- /// Relative to the owning module's root directory.
- sub_file_path: InternPool.NullTerminatedString,
-
+ path: Compilation.Path,
/// `.none` means the file was not loaded, so `stat` is undefined.
val: InternPool.Index,
/// If this is `null` and `val` is `.none`, the file has never been loaded.
@@ -1025,7 +1116,7 @@ pub const EmbedFile = struct {
pub const Index = enum(u32) {
_,
pub fn get(idx: Index, zcu: *const Zcu) *EmbedFile {
- return zcu.embed_table.values()[@intFromEnum(idx)];
+ return zcu.embed_table.keys()[@intFromEnum(idx)];
}
};
};
@@ -1103,32 +1194,31 @@ pub const SrcLoc = struct {
pub const Span = Ast.Span;
- pub fn span(src_loc: SrcLoc, gpa: Allocator) !Span {
+ pub fn span(src_loc: SrcLoc, zcu: *const Zcu) !Span {
switch (src_loc.lazy) {
.unneeded => unreachable,
- .entire_file => return Span{ .start = 0, .end = 1, .main = 0 },
.byte_abs => |byte_index| return Span{ .start = byte_index, .end = byte_index + 1, .main = byte_index },
.token_abs => |tok_index| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.node_abs => |node| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
return tree.nodeToSpan(node);
},
.byte_offset => |byte_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const tok_index = src_loc.baseSrcToken();
const start = tree.tokenStart(tok_index) + byte_off;
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.token_offset => |tok_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const tok_index = tok_off.toAbsolute(src_loc.baseSrcToken());
const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
@@ -1136,23 +1226,23 @@ pub const SrcLoc = struct {
},
.node_offset => |traced_off| {
const node_off = traced_off.x;
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(node);
},
.node_offset_main_token => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const main_token = tree.nodeMainToken(node);
return tree.tokensToSpan(main_token, main_token, main_token);
},
.node_offset_bin_op => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(node);
},
.node_offset_initializer => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.tokensToSpan(
tree.firstToken(node) - 3,
@@ -1161,7 +1251,7 @@ pub const SrcLoc = struct {
);
},
.node_offset_var_decl_ty => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const full = switch (tree.nodeTag(node)) {
.global_var_decl,
@@ -1183,7 +1273,7 @@ pub const SrcLoc = struct {
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_var_decl_align => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const align_node = if (tree.fullVarDecl(node)) |v|
@@ -1195,7 +1285,7 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(align_node);
},
.node_offset_var_decl_section => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const section_node = if (tree.fullVarDecl(node)) |v|
@@ -1207,7 +1297,7 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(section_node);
},
.node_offset_var_decl_addrspace => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const addrspace_node = if (tree.fullVarDecl(node)) |v|
@@ -1219,7 +1309,7 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(addrspace_node);
},
.node_offset_var_decl_init => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const init_node = switch (tree.nodeTag(node)) {
.global_var_decl,
@@ -1233,14 +1323,14 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(init_node);
},
.node_offset_builtin_call_arg => |builtin_arg| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = builtin_arg.builtin_call_node.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
const params = tree.builtinCallParams(&buf, node).?;
return tree.nodeToSpan(params[builtin_arg.arg_index]);
},
.node_offset_ptrcast_operand => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
var node = node_off.toAbsolute(src_loc.base_node);
while (true) {
@@ -1273,7 +1363,7 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(node);
},
.node_offset_array_access_index => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(tree.nodeData(node).node_and_node[1]);
},
@@ -1282,7 +1372,7 @@ pub const SrcLoc = struct {
.node_offset_slice_end,
.node_offset_slice_sentinel,
=> |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullSlice(node).?;
const part_node = switch (src_loc.lazy) {
@@ -1295,14 +1385,14 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(part_node);
},
.node_offset_call_func => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullCall(&buf, node).?;
return tree.nodeToSpan(full.ast.fn_expr);
},
.node_offset_field_name => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const tok_index = switch (tree.nodeTag(node)) {
@@ -1326,7 +1416,7 @@ pub const SrcLoc = struct {
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_field_name_init => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const tok_index = tree.firstToken(node) - 2;
const start = tree.tokenStart(tok_index);
@@ -1334,18 +1424,18 @@ pub const SrcLoc = struct {
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_deref_ptr => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(node);
},
.node_offset_asm_source => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullAsm(node).?;
return tree.nodeToSpan(full.ast.template);
},
.node_offset_asm_ret_ty => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullAsm(node).?;
const asm_output = full.outputs[0];
@@ -1353,7 +1443,7 @@ pub const SrcLoc = struct {
},
.node_offset_if_cond => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const src_node = switch (tree.nodeTag(node)) {
.if_simple,
@@ -1381,14 +1471,14 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(src_node);
},
.for_input => |for_input| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = for_input.for_node_offset.toAbsolute(src_loc.base_node);
const for_full = tree.fullFor(node).?;
const src_node = for_full.ast.inputs[for_input.input_index];
return tree.nodeToSpan(src_node);
},
.for_capture_from_input => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const input_node = node_off.toAbsolute(src_loc.base_node);
// We have to actually linear scan the whole AST to find the for loop
// that contains this input.
@@ -1429,7 +1519,7 @@ pub const SrcLoc = struct {
} else unreachable;
},
.call_arg => |call_arg| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = call_arg.call_node_offset.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
const call_full = tree.fullCall(buf[0..1], node) orelse {
@@ -1466,7 +1556,7 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(call_full.ast.params[call_arg.arg_index]);
},
.fn_proto_param, .fn_proto_param_type => |fn_proto_param| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = fn_proto_param.fn_proto_node_offset.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
@@ -1494,17 +1584,17 @@ pub const SrcLoc = struct {
unreachable;
},
.node_offset_bin_lhs => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(tree.nodeData(node).node_and_node[0]);
},
.node_offset_bin_rhs => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(tree.nodeData(node).node_and_node[1]);
},
.array_cat_lhs, .array_cat_rhs => |cat| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = cat.array_cat_offset.toAbsolute(src_loc.base_node);
const arr_node = if (src_loc.lazy == .array_cat_lhs)
tree.nodeData(node).node_and_node[0]
@@ -1530,20 +1620,20 @@ pub const SrcLoc = struct {
},
.node_offset_try_operand => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(tree.nodeData(node).node);
},
.node_offset_switch_operand => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
const condition, _ = tree.nodeData(node).node_and_extra;
return tree.nodeToSpan(condition);
},
.node_offset_switch_special_prong => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const switch_node = node_off.toAbsolute(src_loc.base_node);
_, const extra_index = tree.nodeData(switch_node).node_and_extra;
const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
@@ -1560,7 +1650,7 @@ pub const SrcLoc = struct {
},
.node_offset_switch_range => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const switch_node = node_off.toAbsolute(src_loc.base_node);
_, const extra_index = tree.nodeData(switch_node).node_and_extra;
const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
@@ -1580,28 +1670,28 @@ pub const SrcLoc = struct {
} else unreachable;
},
.node_offset_fn_type_align => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return tree.nodeToSpan(full.ast.align_expr.unwrap().?);
},
.node_offset_fn_type_addrspace => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return tree.nodeToSpan(full.ast.addrspace_expr.unwrap().?);
},
.node_offset_fn_type_section => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return tree.nodeToSpan(full.ast.section_expr.unwrap().?);
},
.node_offset_fn_type_cc => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
@@ -1609,14 +1699,14 @@ pub const SrcLoc = struct {
},
.node_offset_fn_type_ret_ty => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return tree.nodeToSpan(full.ast.return_type.unwrap().?);
},
.node_offset_param => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
var first_tok = tree.firstToken(node);
@@ -1631,7 +1721,7 @@ pub const SrcLoc = struct {
);
},
.token_offset_param => |token_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const main_token = tree.nodeMainToken(src_loc.base_node);
const tok_index = token_off.toAbsolute(main_token);
@@ -1648,14 +1738,14 @@ pub const SrcLoc = struct {
},
.node_offset_anyframe_type => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
_, const child_type = tree.nodeData(parent_node).token_and_node;
return tree.nodeToSpan(child_type);
},
.node_offset_lib_name => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, parent_node).?;
@@ -1666,75 +1756,75 @@ pub const SrcLoc = struct {
},
.node_offset_array_type_len => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullArrayType(parent_node).?;
return tree.nodeToSpan(full.ast.elem_count);
},
.node_offset_array_type_sentinel => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullArrayType(parent_node).?;
return tree.nodeToSpan(full.ast.sentinel.unwrap().?);
},
.node_offset_array_type_elem => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullArrayType(parent_node).?;
return tree.nodeToSpan(full.ast.elem_type);
},
.node_offset_un_op => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(tree.nodeData(node).node);
},
.node_offset_ptr_elem => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
return tree.nodeToSpan(full.ast.child_type);
},
.node_offset_ptr_sentinel => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
return tree.nodeToSpan(full.ast.sentinel.unwrap().?);
},
.node_offset_ptr_align => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
return tree.nodeToSpan(full.ast.align_node.unwrap().?);
},
.node_offset_ptr_addrspace => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
return tree.nodeToSpan(full.ast.addrspace_node.unwrap().?);
},
.node_offset_ptr_bitoffset => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
return tree.nodeToSpan(full.ast.bit_range_start.unwrap().?);
},
.node_offset_ptr_hostsize => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
return tree.nodeToSpan(full.ast.bit_range_end.unwrap().?);
},
.node_offset_container_tag => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
switch (tree.nodeTag(parent_node)) {
@@ -1757,7 +1847,7 @@ pub const SrcLoc = struct {
}
},
.node_offset_field_default => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
const full: Ast.full.ContainerField = switch (tree.nodeTag(parent_node)) {
@@ -1768,7 +1858,7 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(full.ast.value_expr.unwrap().?);
},
.node_offset_init_ty => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const parent_node = node_off.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
@@ -1779,7 +1869,7 @@ pub const SrcLoc = struct {
return tree.nodeToSpan(type_expr);
},
.node_offset_store_ptr => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
switch (tree.nodeTag(node)) {
@@ -1806,7 +1896,7 @@ pub const SrcLoc = struct {
}
},
.node_offset_store_operand => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
switch (tree.nodeTag(node)) {
@@ -1833,7 +1923,7 @@ pub const SrcLoc = struct {
}
},
.node_offset_return_operand => |node_off| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = node_off.toAbsolute(src_loc.base_node);
if (tree.nodeTag(node) == .@"return") {
if (tree.nodeData(node).opt_node.unwrap()) |lhs| {
@@ -1847,7 +1937,7 @@ pub const SrcLoc = struct {
.container_field_type,
.container_field_align,
=> |field_idx| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = src_loc.base_node;
var buf: [2]Ast.Node.Index = undefined;
const container_decl = tree.fullContainerDecl(&buf, node) orelse
@@ -1875,7 +1965,7 @@ pub const SrcLoc = struct {
} else unreachable;
},
.tuple_field_type, .tuple_field_init => |field_info| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = src_loc.base_node;
var buf: [2]Ast.Node.Index = undefined;
const container_decl = tree.fullContainerDecl(&buf, node) orelse
@@ -1889,7 +1979,7 @@ pub const SrcLoc = struct {
});
},
.init_elem => |init_elem| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const init_node = init_elem.init_node_offset.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
if (tree.fullArrayInit(&buf, init_node)) |full| {
@@ -1928,7 +2018,7 @@ pub const SrcLoc = struct {
.init_field_dll_import => "dll_import",
else => unreachable,
};
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const node = builtin_call_node.toAbsolute(src_loc.base_node);
var builtin_buf: [2]Ast.Node.Index = undefined;
const args = tree.builtinCallParams(&builtin_buf, node).?;
@@ -1967,7 +2057,7 @@ pub const SrcLoc = struct {
else => unreachable,
};
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
const switch_node = switch_node_offset.toAbsolute(src_loc.base_node);
_, const extra_index = tree.nodeData(switch_node).node_and_extra;
const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
@@ -2062,7 +2152,7 @@ pub const SrcLoc = struct {
}
},
.func_decl_param_comptime => |param_idx| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, src_loc.base_node).?;
var param_it = full.iterate(tree);
@@ -2071,7 +2161,7 @@ pub const SrcLoc = struct {
return tree.tokenToSpan(param.comptime_noalias.?);
},
.func_decl_param_ty => |param_idx| {
- const tree = try src_loc.file_scope.getTree(gpa);
+ const tree = try src_loc.file_scope.getTree(zcu);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, src_loc.base_node).?;
var param_it = full.iterate(tree);
@@ -2100,9 +2190,6 @@ pub const LazySrcLoc = struct {
/// value is being set to this tag.
/// `base_node_inst` is unused.
unneeded,
- /// Means the source location points to an entire file; not any particular
- /// location within the file. `file_scope` union field will be active.
- entire_file,
/// The source location points to a byte offset within a source file,
/// offset from 0. The source file is determined contextually.
byte_abs: u32,
@@ -2521,10 +2608,7 @@ pub const LazySrcLoc = struct {
/// Like `upgrade`, but returns `null` if the source location has been lost across incremental updates.
pub fn upgradeOrLost(lazy: LazySrcLoc, zcu: *Zcu) ?SrcLoc {
- const file, const base_node: Ast.Node.Index = if (lazy.offset == .entire_file) .{
- zcu.fileByIndex(lazy.base_node_inst.resolveFile(&zcu.intern_pool)),
- .root,
- } else resolveBaseNode(lazy.base_node_inst, zcu) orelse return null;
+ const file, const base_node: Ast.Node.Index = resolveBaseNode(lazy.base_node_inst, zcu) orelse return null;
return .{
.file_scope = file,
.base_node = base_node,
@@ -2544,15 +2628,16 @@ pub const LazySrcLoc = struct {
return true;
};
if (lhs_src.file_scope != rhs_src.file_scope) {
- return std.mem.order(
- u8,
- lhs_src.file_scope.sub_file_path,
- rhs_src.file_scope.sub_file_path,
- ).compare(.lt);
+ const lhs_path = lhs_src.file_scope.path;
+ const rhs_path = rhs_src.file_scope.path;
+ if (lhs_path.root != rhs_path.root) {
+ return @intFromEnum(lhs_path.root) < @intFromEnum(rhs_path.root);
+ }
+ return std.mem.order(u8, lhs_path.sub_path, rhs_path.sub_path).compare(.lt);
}
- const lhs_span = try lhs_src.span(zcu.gpa);
- const rhs_span = try rhs_src.span(zcu.gpa);
+ const lhs_span = try lhs_src.span(zcu);
+ const rhs_span = try rhs_src.span(zcu);
return lhs_span.main < rhs_span.main;
}
};
@@ -2583,16 +2668,16 @@ pub fn deinit(zcu: *Zcu) void {
if (zcu.llvm_object) |llvm_object| llvm_object.deinit();
- for (zcu.import_table.keys()) |key| {
- gpa.free(key);
- }
- for (zcu.import_table.values()) |file_index| {
+ zcu.builtin_modules.deinit(gpa);
+ zcu.module_roots.deinit(gpa);
+ for (zcu.import_table.keys()) |file_index| {
pt.destroyFile(file_index);
}
zcu.import_table.deinit(gpa);
+ zcu.alive_files.deinit(gpa);
- for (zcu.embed_table.keys(), zcu.embed_table.values()) |path, embed_file| {
- gpa.free(path);
+ for (zcu.embed_table.keys()) |embed_file| {
+ embed_file.path.deinit(gpa);
gpa.destroy(embed_file);
}
zcu.embed_table.deinit(gpa);
@@ -2610,9 +2695,10 @@ pub fn deinit(zcu: *Zcu) void {
zcu.failed_types.deinit(gpa);
for (zcu.failed_files.values()) |value| {
- if (value) |msg| msg.destroy(gpa);
+ if (value) |msg| gpa.free(msg);
}
zcu.failed_files.deinit(gpa);
+ zcu.failed_imports.deinit(gpa);
for (zcu.failed_exports.values()) |value| {
value.destroy(gpa);
@@ -3404,27 +3490,21 @@ pub fn ensureNavValAnalysisQueued(zcu: *Zcu, nav_id: InternPool.Nav.Index) !void
zcu.nav_val_analysis_queued.putAssumeCapacityNoClobber(nav_id, {});
}
-pub const ImportFileResult = struct {
- file: *File,
- file_index: File.Index,
+pub const ImportResult = struct {
+ /// Whether `file` has been newly created; in other words, whether this is the first import of
+ /// this file. This should only be `true` when importing files during AstGen. After that, all
+ /// files should have already been discovered.
is_new: bool,
- is_pkg: bool,
-};
-pub fn computePathDigest(zcu: *Zcu, mod: *Package.Module, sub_file_path: []const u8) Cache.BinDigest {
- const want_local_cache = mod == zcu.main_mod;
- var path_hash: Cache.HashHelper = .{};
- path_hash.addBytes(build_options.version);
- path_hash.add(builtin.zig_backend);
- if (!want_local_cache) {
- path_hash.addOptionalBytes(mod.root.root_dir.path);
- path_hash.addBytes(mod.root.sub_path);
- }
- path_hash.addBytes(sub_file_path);
- var bin: Cache.BinDigest = undefined;
- path_hash.hasher.final(&bin);
- return bin;
-}
+ /// `file.mod` is not populated by this function, so if `is_new`, then it is `undefined`.
+ file: *Zcu.File,
+ file_index: File.Index,
+
+ /// If this import was a simple file path, this is `null`; the imported file should exist within
+ /// the importer's module. Otherwise, it's the module which the import resolved to. This module
+ /// could match the module of `cur_file`, since a module can depend on itself.
+ module: ?*Package.Module,
+};
/// Delete all the Export objects that are caused by this `AnalUnit`. Re-analysis of
/// this `AnalUnit` will cause them to be re-created (or not).
@@ -3938,15 +4018,7 @@ fn resolveReferencesInner(zcu: *Zcu) !std.AutoHashMapUnmanaged(AnalUnit, ?Resolv
try type_queue.ensureTotalCapacity(gpa, zcu.analysis_roots.len);
for (zcu.analysis_roots.slice()) |mod| {
- // Logic ripped from `Zcu.PerThread.importPkg`.
- // TODO: this is silly, `Module` should just store a reference to its root `File`.
- const resolved_path = try std.fs.path.resolve(gpa, &.{
- mod.root.root_dir.path orelse ".",
- mod.root.sub_path,
- mod.root_src_path,
- });
- defer gpa.free(resolved_path);
- const file = zcu.import_table.get(resolved_path).?;
+ const file = zcu.module_roots.get(mod).?.unwrap() orelse continue;
const root_ty = zcu.fileRootType(file);
if (root_ty == .none) continue;
type_queue.putAssumeCapacityNoClobber(root_ty, null);
@@ -4226,8 +4298,8 @@ fn formatAnalUnit(data: struct { unit: AnalUnit, zcu: *Zcu }, comptime fmt: []co
.@"comptime" => |cu_id| {
const cu = ip.getComptimeUnit(cu_id);
if (cu.zir_index.resolveFull(ip)) |resolved| {
- const file_path = zcu.fileByIndex(resolved.file).sub_file_path;
- return writer.print("comptime(inst=('{s}', %{}) [{}])", .{ file_path, @intFromEnum(resolved.inst), @intFromEnum(cu_id) });
+ const file_path = zcu.fileByIndex(resolved.file).path;
+ return writer.print("comptime(inst=('{}', %{}) [{}])", .{ file_path.fmt(zcu.comp), @intFromEnum(resolved.inst), @intFromEnum(cu_id) });
} else {
return writer.print("comptime(inst=<lost> [{}])", .{@intFromEnum(cu_id)});
}
@@ -4251,8 +4323,8 @@ fn formatDependee(data: struct { dependee: InternPool.Dependee, zcu: *Zcu }, com
const info = ti.resolveFull(ip) orelse {
return writer.writeAll("inst(<lost>)");
};
- const file_path = zcu.fileByIndex(info.file).sub_file_path;
- return writer.print("inst('{s}', %{d})", .{ file_path, @intFromEnum(info.inst) });
+ const file_path = zcu.fileByIndex(info.file).path;
+ return writer.print("inst('{}', %{d})", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst) });
},
.nav_val => |nav| {
const fqn = ip.getNav(nav).fqn;
@@ -4268,30 +4340,26 @@ fn formatDependee(data: struct { dependee: InternPool.Dependee, zcu: *Zcu }, com
else => unreachable,
},
.zon_file => |file| {
- const file_path = zcu.fileByIndex(file).sub_file_path;
- return writer.print("zon_file('{s}')", .{file_path});
+ const file_path = zcu.fileByIndex(file).path;
+ return writer.print("zon_file('{}')", .{file_path.fmt(zcu.comp)});
},
.embed_file => |ef_idx| {
const ef = ef_idx.get(zcu);
- return writer.print("embed_file('{s}')", .{std.fs.path.fmtJoin(&.{
- ef.owner.root.root_dir.path orelse "",
- ef.owner.root.sub_path,
- ef.sub_file_path.toSlice(ip),
- })});
+ return writer.print("embed_file('{}')", .{ef.path.fmt(zcu.comp)});
},
.namespace => |ti| {
const info = ti.resolveFull(ip) orelse {
return writer.writeAll("namespace(<lost>)");
};
- const file_path = zcu.fileByIndex(info.file).sub_file_path;
- return writer.print("namespace('{s}', %{d})", .{ file_path, @intFromEnum(info.inst) });
+ const file_path = zcu.fileByIndex(info.file).path;
+ return writer.print("namespace('{}', %{d})", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst) });
},
.namespace_name => |k| {
const info = k.namespace.resolveFull(ip) orelse {
return writer.print("namespace(<lost>, '{}')", .{k.name.fmt(ip)});
};
- const file_path = zcu.fileByIndex(info.file).sub_file_path;
- return writer.print("namespace('{s}', %{d}, '{}')", .{ file_path, @intFromEnum(info.inst), k.name.fmt(ip) });
+ const file_path = zcu.fileByIndex(info.file).path;
+ return writer.print("namespace('{}', %{d}, '{}')", .{ file_path.fmt(zcu.comp), @intFromEnum(info.inst), k.name.fmt(ip) });
},
.memoized_state => return writer.writeAll("memoized_state"),
}
@@ -4508,3 +4576,114 @@ pub fn codegenFailTypeMsg(zcu: *Zcu, ty_index: InternPool.Index, msg: *ErrorMsg)
zcu.failed_types.putAssumeCapacityNoClobber(ty_index, msg);
return error.CodegenFail;
}
+
+/// Asserts that `zcu.multi_module_err != null`.
+pub fn addFileInMultipleModulesError(
+ zcu: *Zcu,
+ eb: *std.zig.ErrorBundle.Wip,
+) !void {
+ const gpa = zcu.gpa;
+
+ const info = zcu.multi_module_err.?;
+ const file = info.file;
+
+ // error: file exists in modules 'root.foo' and 'root.bar'
+ // note: files must belong to only one module
+ // note: file is imported here
+ // note: which is imported here
+ // note: which is the root of module 'root.foo' imported here
+ // note: file is the root of module 'root.bar' imported here
+
+ const file_src = try zcu.fileByIndex(file).errorBundleWholeFileSrc(zcu, eb);
+ const root_msg = try eb.printString("file exists in modules '{s}' and '{s}'", .{
+ info.modules[0].fully_qualified_name,
+ info.modules[1].fully_qualified_name,
+ });
+
+ var notes: std.ArrayListUnmanaged(std.zig.ErrorBundle.MessageIndex) = .empty;
+ defer notes.deinit(gpa);
+
+ try notes.append(gpa, try eb.addErrorMessage(.{
+ .msg = try eb.addString("files must belong to only one module"),
+ .src_loc = file_src,
+ }));
+
+ try zcu.explainWhyFileIsInModule(eb, &notes, file, info.modules[0], info.refs[0]);
+ try zcu.explainWhyFileIsInModule(eb, &notes, file, info.modules[1], info.refs[1]);
+
+ try eb.addRootErrorMessage(.{
+ .msg = root_msg,
+ .src_loc = file_src,
+ .notes_len = @intCast(notes.items.len),
+ });
+ const notes_start = try eb.reserveNotes(@intCast(notes.items.len));
+ const notes_slice: []std.zig.ErrorBundle.MessageIndex = @ptrCast(eb.extra.items[notes_start..]);
+ @memcpy(notes_slice, notes.items);
+}
+
+fn explainWhyFileIsInModule(
+ zcu: *Zcu,
+ eb: *std.zig.ErrorBundle.Wip,
+ notes_out: *std.ArrayListUnmanaged(std.zig.ErrorBundle.MessageIndex),
+ file: File.Index,
+ in_module: *Package.Module,
+ ref: File.Reference,
+) !void {
+ const gpa = zcu.gpa;
+
+ // error: file is the root of module 'foo'
+ //
+ // error: file is imported here by the root of module 'foo'
+ //
+ // error: file is imported here
+ // note: which is imported here
+ // note: which is imported here by the root of module 'foo'
+
+ var import = switch (ref) {
+ .analysis_root => |mod| {
+ assert(mod == in_module);
+ try notes_out.append(gpa, try eb.addErrorMessage(.{
+ .msg = try eb.printString("file is the root of module '{s}'", .{mod.fully_qualified_name}),
+ .src_loc = try zcu.fileByIndex(file).errorBundleWholeFileSrc(zcu, eb),
+ }));
+ return;
+ },
+ .import => |import| if (import.module) |mod| {
+ assert(mod == in_module);
+ try notes_out.append(gpa, try eb.addErrorMessage(.{
+ .msg = try eb.printString("file is the root of module '{s}'", .{mod.fully_qualified_name}),
+ .src_loc = try zcu.fileByIndex(file).errorBundleWholeFileSrc(zcu, eb),
+ }));
+ return;
+ } else import,
+ };
+
+ var is_first = true;
+ while (true) {
+ const thing: []const u8 = if (is_first) "file" else "which";
+ is_first = false;
+
+ const import_src = try zcu.fileByIndex(import.importer).errorBundleTokenSrc(import.tok, zcu, eb);
+
+ const importer_ref = zcu.alive_files.get(import.importer).?;
+ const importer_root: ?*Package.Module = switch (importer_ref) {
+ .analysis_root => |mod| mod,
+ .import => |i| i.module,
+ };
+
+ if (importer_root) |m| {
+ try notes_out.append(gpa, try eb.addErrorMessage(.{
+ .msg = try eb.printString("{s} is imported here by the root of module '{s}'", .{ thing, m.fully_qualified_name }),
+ .src_loc = import_src,
+ }));
+ return;
+ }
+
+ try notes_out.append(gpa, try eb.addErrorMessage(.{
+ .msg = try eb.printString("{s} is imported here", .{thing}),
+ .src_loc = import_src,
+ }));
+
+ import = importer_ref.import;
+ }
+}