aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2023-02-21 11:43:31 -0500
committerGitHub <noreply@github.com>2023-02-21 11:43:31 -0500
commit7f691b3fe26f623d108a6b2b2018bbc3aa999224 (patch)
tree81814816cfde6610a4b965d9d3799de3cb3eaccd /src
parent05da5b32a820c031001098034840940964f41a81 (diff)
parentf94cbab3acc3b31464f45872c1f700874eecb23e (diff)
downloadzig-7f691b3fe26f623d108a6b2b2018bbc3aa999224.tar.gz
zig-7f691b3fe26f623d108a6b2b2018bbc3aa999224.zip
Merge pull request #14664 from mlugg/feat/new-module-cli
New module CLI
Diffstat (limited to 'src')
-rw-r--r--src/Autodoc.zig10
-rw-r--r--src/Compilation.zig222
-rw-r--r--src/Module.zig69
-rw-r--r--src/Package.zig123
-rw-r--r--src/Sema.zig6
-rw-r--r--src/main.zig208
-rw-r--r--src/test.zig40
7 files changed, 454 insertions, 224 deletions
diff --git a/src/Autodoc.zig b/src/Autodoc.zig
index 47dd4a28f7..3cf3fff4c0 100644
--- a/src/Autodoc.zig
+++ b/src/Autodoc.zig
@@ -860,17 +860,9 @@ fn walkInstruction(
const str_tok = data[inst_index].str_tok;
var path = str_tok.get(file.zir);
- const maybe_other_package: ?*Package = blk: {
- if (self.module.main_pkg_is_std and std.mem.eql(u8, path, "std")) {
- path = "std";
- break :blk self.module.main_pkg;
- } else {
- break :blk file.pkg.table.get(path);
- }
- };
// importFile cannot error out since all files
// are already loaded at this point
- if (maybe_other_package) |other_package| {
+ if (file.pkg.table.get(path)) |other_package| {
const result = try self.packages.getOrPut(self.arena, other_package);
// Immediately add this package to the import table of our
diff --git a/src/Compilation.zig b/src/Compilation.zig
index ebc0e9b563..717a396870 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -1596,36 +1596,53 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
const builtin_pkg = try Package.createWithDir(
gpa,
- "builtin",
zig_cache_artifact_directory,
null,
"builtin.zig",
);
errdefer builtin_pkg.destroy(gpa);
- const std_pkg = try Package.createWithDir(
- gpa,
- "std",
- options.zig_lib_directory,
- "std",
- "std.zig",
- );
- errdefer std_pkg.destroy(gpa);
+ // When you're testing std, the main module is std. In that case, we'll just set the std
+ // module to the main one, since avoiding the errors caused by duplicating it is more
+ // effort than it's worth.
+ const main_pkg_is_std = m: {
+ const std_path = try std.fs.path.resolve(arena, &[_][]const u8{
+ options.zig_lib_directory.path orelse ".",
+ "std",
+ "std.zig",
+ });
+ defer arena.free(std_path);
+ const main_path = try std.fs.path.resolve(arena, &[_][]const u8{
+ main_pkg.root_src_directory.path orelse ".",
+ main_pkg.root_src_path,
+ });
+ defer arena.free(main_path);
+ break :m mem.eql(u8, main_path, std_path);
+ };
+
+ const std_pkg = if (main_pkg_is_std)
+ main_pkg
+ else
+ try Package.createWithDir(
+ gpa,
+ options.zig_lib_directory,
+ "std",
+ "std.zig",
+ );
+
+ errdefer if (!main_pkg_is_std) std_pkg.destroy(gpa);
const root_pkg = if (options.is_test) root_pkg: {
- // TODO: we currently have two packages named 'root' here, which is weird. This
- // should be changed as part of the resolution of #12201
const test_pkg = if (options.test_runner_path) |test_runner| test_pkg: {
const test_dir = std.fs.path.dirname(test_runner);
const basename = std.fs.path.basename(test_runner);
- const pkg = try Package.create(gpa, "root", test_dir, basename);
+ const pkg = try Package.create(gpa, test_dir, basename);
// copy package table from main_pkg to root_pkg
pkg.table = try main_pkg.table.clone(gpa);
break :test_pkg pkg;
} else try Package.createWithDir(
gpa,
- "root",
options.zig_lib_directory,
null,
"test_runner.zig",
@@ -1639,7 +1656,6 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
const compiler_rt_pkg = if (include_compiler_rt and options.output_mode == .Obj) compiler_rt_pkg: {
break :compiler_rt_pkg try Package.createWithDir(
gpa,
- "compiler_rt",
options.zig_lib_directory,
null,
"compiler_rt.zig",
@@ -1647,28 +1663,14 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
} else null;
errdefer if (compiler_rt_pkg) |p| p.destroy(gpa);
- try main_pkg.addAndAdopt(gpa, builtin_pkg);
- try main_pkg.add(gpa, root_pkg);
- try main_pkg.addAndAdopt(gpa, std_pkg);
+ try main_pkg.add(gpa, "builtin", builtin_pkg);
+ try main_pkg.add(gpa, "root", root_pkg);
+ try main_pkg.add(gpa, "std", std_pkg);
if (compiler_rt_pkg) |p| {
- try main_pkg.addAndAdopt(gpa, p);
+ try main_pkg.add(gpa, "compiler_rt", p);
}
- const main_pkg_is_std = m: {
- const std_path = try std.fs.path.resolve(arena, &[_][]const u8{
- std_pkg.root_src_directory.path orelse ".",
- std_pkg.root_src_path,
- });
- defer arena.free(std_path);
- const main_path = try std.fs.path.resolve(arena, &[_][]const u8{
- main_pkg.root_src_directory.path orelse ".",
- main_pkg.root_src_path,
- });
- defer arena.free(main_path);
- break :m mem.eql(u8, main_path, std_path);
- };
-
// Pre-open the directory handles for cached ZIR code so that it does not need
// to redundantly happen for each AstGen operation.
const zir_sub_dir = "z";
@@ -1705,7 +1707,6 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
.gpa = gpa,
.comp = comp,
.main_pkg = main_pkg,
- .main_pkg_is_std = main_pkg_is_std,
.root_pkg = root_pkg,
.zig_cache_artifact_directory = zig_cache_artifact_directory,
.global_zir_cache = global_zir_cache,
@@ -2772,6 +2773,111 @@ fn emitOthers(comp: *Compilation) void {
}
}
+fn reportMultiModuleErrors(mod: *Module) !void {
+ // Some cases can give you a whole bunch of multi-module errors, which it's not helpful to
+ // print all of, so we'll cap the number of these to emit.
+ var num_errors: u32 = 0;
+ const max_errors = 5;
+ // Attach the "some omitted" note to the final error message
+ var last_err: ?*Module.ErrorMsg = null;
+
+ for (mod.import_table.values()) |file| {
+ if (!file.multi_pkg) continue;
+
+ num_errors += 1;
+ if (num_errors > max_errors) continue;
+
+ const err = err_blk: {
+ // Like with errors, let's cap the number of notes to prevent a huge error spew.
+ const max_notes = 5;
+ const omitted = file.references.items.len -| max_notes;
+ const num_notes = file.references.items.len - omitted;
+
+ const notes = try mod.gpa.alloc(Module.ErrorMsg, if (omitted > 0) num_notes + 1 else num_notes);
+ errdefer mod.gpa.free(notes);
+
+ for (notes[0..num_notes], file.references.items[0..num_notes], 0..) |*note, ref, i| {
+ errdefer for (notes[0..i]) |*n| n.deinit(mod.gpa);
+ note.* = switch (ref) {
+ .import => |loc| blk: {
+ const name = try loc.file_scope.pkg.getName(mod.gpa, mod.*);
+ defer mod.gpa.free(name);
+ break :blk try Module.ErrorMsg.init(
+ mod.gpa,
+ loc,
+ "imported from module {s}",
+ .{name},
+ );
+ },
+ .root => |pkg| blk: {
+ const name = try pkg.getName(mod.gpa, mod.*);
+ defer mod.gpa.free(name);
+ break :blk try Module.ErrorMsg.init(
+ mod.gpa,
+ .{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
+ "root of module {s}",
+ .{name},
+ );
+ },
+ };
+ }
+ errdefer for (notes[0..num_notes]) |*n| n.deinit(mod.gpa);
+
+ if (omitted > 0) {
+ notes[num_notes] = try Module.ErrorMsg.init(
+ mod.gpa,
+ .{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
+ "{} more references omitted",
+ .{omitted},
+ );
+ }
+ errdefer if (omitted > 0) notes[num_notes].deinit(mod.gpa);
+
+ const err = try Module.ErrorMsg.create(
+ mod.gpa,
+ .{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
+ "file exists in multiple modules",
+ .{},
+ );
+ err.notes = notes;
+ break :err_blk err;
+ };
+ errdefer err.destroy(mod.gpa);
+ try mod.failed_files.putNoClobber(mod.gpa, file, err);
+ last_err = err;
+ }
+
+ // If we omitted any errors, add a note saying that
+ if (num_errors > max_errors) {
+ const err = last_err.?;
+
+ // There isn't really any meaningful place to put this note, so just attach it to the
+ // last failed file
+ var note = try Module.ErrorMsg.init(
+ mod.gpa,
+ err.src_loc,
+ "{} more errors omitted",
+ .{num_errors - max_errors},
+ );
+ errdefer note.deinit(mod.gpa);
+
+ const i = err.notes.len;
+ err.notes = try mod.gpa.realloc(err.notes, i + 1);
+ err.notes[i] = note;
+ }
+
+ // Now that we've reported the errors, we need to deal with
+ // dependencies. Any file referenced by a multi_pkg file should also be
+ // marked multi_pkg and have its status set to astgen_failure, as it's
+ // ambiguous which package they should be analyzed as a part of. We need
+ // to add this flag after reporting the errors however, as otherwise
+ // we'd get an error for every single downstream file, which wouldn't be
+ // very useful.
+ for (mod.import_table.values()) |file| {
+ if (file.multi_pkg) file.recursiveMarkMultiPkg(mod);
+ }
+}
+
/// Having the file open for writing is problematic as far as executing the
/// binary is concerned. This will remove the write flag, or close the file,
/// or whatever is needed so that it can be executed.
@@ -3098,54 +3204,7 @@ pub fn performAllTheWork(
}
if (comp.bin_file.options.module) |mod| {
- for (mod.import_table.values()) |file| {
- if (!file.multi_pkg) continue;
- const err = err_blk: {
- const notes = try mod.gpa.alloc(Module.ErrorMsg, file.references.items.len);
- errdefer mod.gpa.free(notes);
-
- for (notes, 0..) |*note, i| {
- errdefer for (notes[0..i]) |*n| n.deinit(mod.gpa);
- note.* = switch (file.references.items[i]) {
- .import => |loc| try Module.ErrorMsg.init(
- mod.gpa,
- loc,
- "imported from package {s}",
- .{loc.file_scope.pkg.name},
- ),
- .root => |pkg| try Module.ErrorMsg.init(
- mod.gpa,
- .{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
- "root of package {s}",
- .{pkg.name},
- ),
- };
- }
- errdefer for (notes) |*n| n.deinit(mod.gpa);
-
- const err = try Module.ErrorMsg.create(
- mod.gpa,
- .{ .file_scope = file, .parent_decl_node = 0, .lazy = .entire_file },
- "file exists in multiple packages",
- .{},
- );
- err.notes = notes;
- break :err_blk err;
- };
- errdefer err.destroy(mod.gpa);
- try mod.failed_files.putNoClobber(mod.gpa, file, err);
- }
-
- // Now that we've reported the errors, we need to deal with
- // dependencies. Any file referenced by a multi_pkg file should also be
- // marked multi_pkg and have its status set to astgen_failure, as it's
- // ambiguous which package they should be analyzed as a part of. We need
- // to add this flag after reporting the errors however, as otherwise
- // we'd get an error for every single downstream file, which wouldn't be
- // very useful.
- for (mod.import_table.values()) |file| {
- if (file.multi_pkg) file.recursiveMarkMultiPkg(mod);
- }
+ try reportMultiModuleErrors(mod);
}
{
@@ -5408,7 +5467,6 @@ fn buildOutputFromZig(
var main_pkg: Package = .{
.root_src_directory = comp.zig_lib_directory,
.root_src_path = src_basename,
- .name = "root",
};
defer main_pkg.deinitTable(comp.gpa);
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];
diff --git a/src/Module.zig b/src/Module.zig
index 76777532ab..a2502d36d3 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -144,10 +144,6 @@ stage1_flags: packed struct {
} = .{},
job_queued_update_builtin_zig: bool = true,
-/// This makes it so that we can run `zig test` on the standard library.
-/// Otherwise, the logic for scanning test decls skips all of them because
-/// `main_pkg != std_pkg`.
-main_pkg_is_std: bool,
compile_log_text: ArrayListUnmanaged(u8) = .{},
@@ -1950,7 +1946,7 @@ pub const File = struct {
prev_zir: ?*Zir = null,
/// A single reference to a file.
- const Reference = union(enum) {
+ pub const Reference = union(enum) {
/// The file is imported directly (i.e. not as a package) with @import.
import: SrcLoc,
/// The file is the root of a package.
@@ -2113,7 +2109,27 @@ pub const File = struct {
/// Add a reference to this file during AstGen.
pub fn addReference(file: *File, mod: Module, ref: Reference) !void {
- try file.references.append(mod.gpa, ref);
+ // Don't add the same module root twice. Note that since we always add module roots at the
+ // front of the references array (see below), this loop is actually O(1) on valid code.
+ if (ref == .root) {
+ for (file.references.items) |other| {
+ switch (other) {
+ .root => |r| if (ref.root == r) return,
+ else => break, // reached the end of the "is-root" references
+ }
+ }
+ }
+
+ switch (ref) {
+ // We put root references at the front of the list both to make the above loop fast and
+ // to make multi-module errors more helpful (since "root-of" notes are generally more
+ // informative than "imported-from" notes). This path is hit very rarely, so the speed
+ // of the insert operation doesn't matter too much.
+ .root => try file.references.insert(mod.gpa, 0, ref),
+
+ // Other references we'll just put at the end.
+ else => try file.references.append(mod.gpa, ref),
+ }
const pkg = switch (ref) {
.import => |loc| loc.file_scope.pkg,
@@ -2128,7 +2144,10 @@ pub const File = struct {
file.multi_pkg = true;
file.status = .astgen_failure;
- std.debug.assert(file.zir_loaded);
+ // We can only mark children as failed if the ZIR is loaded, which may not
+ // be the case if there were other astgen failures in this file
+ if (!file.zir_loaded) return;
+
const imports_index = file.zir.extra[@enumToInt(Zir.ExtraIndex.imports)];
if (imports_index == 0) return;
const extra = file.zir.extraData(Zir.Inst.Imports, imports_index);
@@ -3323,10 +3342,19 @@ pub fn deinit(mod: *Module) void {
// The callsite of `Compilation.create` owns the `main_pkg`, however
// Module owns the builtin and std packages that it adds.
if (mod.main_pkg.table.fetchRemove("builtin")) |kv| {
+ gpa.free(kv.key);
kv.value.destroy(gpa);
}
if (mod.main_pkg.table.fetchRemove("std")) |kv| {
- kv.value.destroy(gpa);
+ gpa.free(kv.key);
+ // It's possible for main_pkg to be std when running 'zig test'! In this case, we must not
+ // destroy it, since it would lead to a double-free.
+ if (kv.value != mod.main_pkg) {
+ kv.value.destroy(gpa);
+ }
+ }
+ if (mod.main_pkg.table.fetchRemove("root")) |kv| {
+ gpa.free(kv.key);
}
if (mod.root_pkg != mod.main_pkg) {
mod.root_pkg.destroy(gpa);
@@ -4808,11 +4836,14 @@ pub fn importPkg(mod: *Module, pkg: *Package) !ImportFileResult {
const gop = try mod.import_table.getOrPut(gpa, resolved_path);
errdefer _ = mod.import_table.pop();
- if (gop.found_existing) return ImportFileResult{
- .file = gop.value_ptr.*,
- .is_new = false,
- .is_pkg = true,
- };
+ if (gop.found_existing) {
+ try gop.value_ptr.*.addReference(mod.*, .{ .root = pkg });
+ return ImportFileResult{
+ .file = gop.value_ptr.*,
+ .is_new = false,
+ .is_pkg = true,
+ };
+ }
const sub_file_path = try gpa.dupe(u8, pkg.root_src_path);
errdefer gpa.free(sub_file_path);
@@ -5208,22 +5239,14 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) Allocator.Err
// test decl with no name. Skip the part where we check against
// the test name filter.
if (!comp.bin_file.options.is_test) break :blk false;
- if (decl_pkg != mod.main_pkg) {
- if (!mod.main_pkg_is_std) break :blk false;
- const std_pkg = mod.main_pkg.table.get("std").?;
- if (std_pkg != decl_pkg) break :blk false;
- }
+ if (decl_pkg != mod.main_pkg) break :blk false;
try mod.test_functions.put(gpa, new_decl_index, {});
break :blk true;
},
else => blk: {
if (!is_named_test) break :blk false;
if (!comp.bin_file.options.is_test) break :blk false;
- if (decl_pkg != mod.main_pkg) {
- if (!mod.main_pkg_is_std) break :blk false;
- const std_pkg = mod.main_pkg.table.get("std").?;
- if (std_pkg != decl_pkg) break :blk false;
- }
+ if (decl_pkg != mod.main_pkg) break :blk false;
if (comp.test_filter) |test_filter| {
if (mem.indexOf(u8, decl_name, test_filter) == null) {
break :blk false;
diff --git a/src/Package.zig b/src/Package.zig
index 5878e7bad6..68d67a6d62 100644
--- a/src/Package.zig
+++ b/src/Package.zig
@@ -22,17 +22,16 @@ pub const Table = std.StringHashMapUnmanaged(*Package);
root_src_directory: Compilation.Directory,
/// Relative to `root_src_directory`. May contain path separators.
root_src_path: []const u8,
+/// The dependency table of this module. Shared dependencies such as 'std', 'builtin', and 'root'
+/// are not specified in every dependency table, but instead only in the table of `main_pkg`.
+/// `Module.importFile` is responsible for detecting these names and using the correct package.
table: Table = .{},
-parent: ?*Package = null,
/// Whether to free `root_src_directory` on `destroy`.
root_src_directory_owned: bool = false,
-/// This information can be recovered from 'table', but it's more convenient to store on the package.
-name: []const u8,
/// Allocate a Package. No references to the slices passed are kept.
pub fn create(
gpa: Allocator,
- name: []const u8,
/// Null indicates the current working directory
root_src_dir_path: ?[]const u8,
/// Relative to root_src_dir_path
@@ -47,9 +46,6 @@ pub fn create(
const owned_src_path = try gpa.dupe(u8, root_src_path);
errdefer gpa.free(owned_src_path);
- const owned_name = try gpa.dupe(u8, name);
- errdefer gpa.free(owned_name);
-
ptr.* = .{
.root_src_directory = .{
.path = owned_dir_path,
@@ -57,7 +53,6 @@ pub fn create(
},
.root_src_path = owned_src_path,
.root_src_directory_owned = true,
- .name = owned_name,
};
return ptr;
@@ -65,7 +60,6 @@ pub fn create(
pub fn createWithDir(
gpa: Allocator,
- name: []const u8,
directory: Compilation.Directory,
/// Relative to `directory`. If null, means `directory` is the root src dir
/// and is owned externally.
@@ -79,9 +73,6 @@ pub fn createWithDir(
const owned_src_path = try gpa.dupe(u8, root_src_path);
errdefer gpa.free(owned_src_path);
- const owned_name = try gpa.dupe(u8, name);
- errdefer gpa.free(owned_name);
-
if (root_src_dir_path) |p| {
const owned_dir_path = try directory.join(gpa, &[1][]const u8{p});
errdefer gpa.free(owned_dir_path);
@@ -93,14 +84,12 @@ pub fn createWithDir(
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
- .name = owned_name,
};
} else {
ptr.* = .{
.root_src_directory = directory,
.root_src_directory_owned = false,
.root_src_path = owned_src_path,
- .name = owned_name,
};
}
return ptr;
@@ -110,7 +99,6 @@ pub fn createWithDir(
/// inside its table; the caller is responsible for calling destroy() on them.
pub fn destroy(pkg: *Package, gpa: Allocator) void {
gpa.free(pkg.root_src_path);
- gpa.free(pkg.name);
if (pkg.root_src_directory_owned) {
// If root_src_directory.path is null then the handle is the cwd()
@@ -130,15 +118,97 @@ pub fn deinitTable(pkg: *Package, gpa: Allocator) void {
pkg.table.deinit(gpa);
}
-pub fn add(pkg: *Package, gpa: Allocator, package: *Package) !void {
+pub fn add(pkg: *Package, gpa: Allocator, name: []const u8, package: *Package) !void {
try pkg.table.ensureUnusedCapacity(gpa, 1);
- pkg.table.putAssumeCapacityNoClobber(package.name, package);
+ const name_dupe = try gpa.dupe(u8, name);
+ pkg.table.putAssumeCapacityNoClobber(name_dupe, package);
}
-pub fn addAndAdopt(parent: *Package, gpa: Allocator, child: *Package) !void {
- assert(child.parent == null); // make up your mind, who is the parent??
- child.parent = parent;
- return parent.add(gpa, child);
+/// Compute a readable name for the package. The returned name should be freed from gpa. This
+/// function is very slow, as it traverses the whole package hierarchy to find a path to this
+/// package. It should only be used for error output.
+pub fn getName(target: *const Package, gpa: Allocator, mod: Module) ![]const u8 {
+ // we'll do a breadth-first search from the root module to try and find a short name for this
+ // module, using a TailQueue of module/parent pairs. note that the "parent" there is just the
+ // first-found shortest path - a module may be children of arbitrarily many other modules.
+ // also, this path may vary between executions due to hashmap iteration order, but that doesn't
+ // matter too much.
+ var node_arena = std.heap.ArenaAllocator.init(gpa);
+ defer node_arena.deinit();
+ const Parented = struct {
+ parent: ?*const @This(),
+ mod: *const Package,
+ };
+ const Queue = std.TailQueue(Parented);
+ var to_check: Queue = .{};
+
+ {
+ const new = try node_arena.allocator().create(Queue.Node);
+ new.* = .{ .data = .{ .parent = null, .mod = mod.root_pkg } };
+ to_check.prepend(new);
+ }
+
+ if (mod.main_pkg != mod.root_pkg) {
+ const new = try node_arena.allocator().create(Queue.Node);
+ // TODO: once #12201 is resolved, we may want a way of indicating a different name for this
+ new.* = .{ .data = .{ .parent = null, .mod = mod.main_pkg } };
+ to_check.prepend(new);
+ }
+
+ // set of modules we've already checked to prevent loops
+ var checked = std.AutoHashMap(*const Package, void).init(gpa);
+ defer checked.deinit();
+
+ const linked = while (to_check.pop()) |node| {
+ const check = &node.data;
+
+ if (checked.contains(check.mod)) continue;
+ try checked.put(check.mod, {});
+
+ if (check.mod == target) break check;
+
+ var it = check.mod.table.iterator();
+ while (it.next()) |kv| {
+ var new = try node_arena.allocator().create(Queue.Node);
+ new.* = .{ .data = .{
+ .parent = check,
+ .mod = kv.value_ptr.*,
+ } };
+ to_check.prepend(new);
+ }
+ } else {
+ // this can happen for e.g. @cImport packages
+ return gpa.dupe(u8, "<unnamed>");
+ };
+
+ // we found a path to the module! unfortunately, we can only traverse *up* it, so we have to put
+ // all the names into a buffer so we can then print them in order.
+ var names = std.ArrayList([]const u8).init(gpa);
+ defer names.deinit();
+
+ var cur: *const Parented = linked;
+ while (cur.parent) |parent| : (cur = parent) {
+ // find cur's name in parent
+ var it = parent.mod.table.iterator();
+ const name = while (it.next()) |kv| {
+ if (kv.value_ptr.* == cur.mod) {
+ break kv.key_ptr.*;
+ }
+ } else unreachable;
+ try names.append(name);
+ }
+
+ // finally, print the names into a buffer!
+ var buf = std.ArrayList(u8).init(gpa);
+ defer buf.deinit();
+ try buf.writer().writeAll("root");
+ var i: usize = names.items.len;
+ while (i > 0) {
+ i -= 1;
+ try buf.writer().print(".{s}", .{names.items[i]});
+ }
+
+ return buf.toOwnedSlice();
}
pub const build_zig_basename = "build.zig";
@@ -236,7 +306,7 @@ pub fn fetchAndAddDependencies(
color,
);
- try addAndAdopt(pkg, gpa, sub_pkg);
+ try add(pkg, gpa, fqn, sub_pkg);
try dependencies_source.writer().print(" pub const {s} = @import(\"{}\");\n", .{
std.zig.fmtId(fqn), std.zig.fmtEscapes(fqn),
@@ -248,7 +318,6 @@ pub fn fetchAndAddDependencies(
pub fn createFilePkg(
gpa: Allocator,
- name: []const u8,
cache_directory: Compilation.Directory,
basename: []const u8,
contents: []const u8,
@@ -269,7 +338,7 @@ pub fn createFilePkg(
const o_dir_sub_path = "o" ++ fs.path.sep_str ++ hex_digest;
try renameTmpIntoCache(cache_directory.handle, tmp_dir_sub_path, o_dir_sub_path);
- return createWithDir(gpa, name, cache_directory, o_dir_sub_path, basename);
+ return createWithDir(gpa, cache_directory, o_dir_sub_path, basename);
}
const Report = struct {
@@ -363,9 +432,6 @@ fn fetchAndUnpack(
const owned_src_path = try gpa.dupe(u8, build_zig_basename);
errdefer gpa.free(owned_src_path);
- const owned_name = try gpa.dupe(u8, fqn);
- errdefer gpa.free(owned_name);
-
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
errdefer gpa.free(build_root);
@@ -380,7 +446,6 @@ fn fetchAndUnpack(
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
- .name = owned_name,
};
return ptr;
@@ -455,7 +520,7 @@ fn fetchAndUnpack(
std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root),
});
- return createWithDir(gpa, fqn, global_cache_directory, pkg_dir_sub_path, build_zig_basename);
+ return createWithDir(gpa, global_cache_directory, pkg_dir_sub_path, build_zig_basename);
}
fn unpackTarball(
diff --git a/src/Sema.zig b/src/Sema.zig
index 40a4a114b4..46b47cd23d 100644
--- a/src/Sema.zig
+++ b/src/Sema.zig
@@ -5311,7 +5311,6 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
}
const c_import_pkg = Package.create(
sema.gpa,
- "c_import", // TODO: should we make this unique?
null,
c_import_res.out_zig_path,
) catch |err| switch (err) {
@@ -11793,8 +11792,9 @@ fn zirImport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
return sema.fail(block, operand_src, "import of file outside package path: '{s}'", .{operand});
},
error.PackageNotFound => {
- const cur_pkg = block.getFileScope().pkg;
- return sema.fail(block, operand_src, "no package named '{s}' available within package '{s}'", .{ operand, cur_pkg.name });
+ const name = try block.getFileScope().pkg.getName(sema.gpa, mod.*);
+ defer sema.gpa.free(name);
+ return sema.fail(block, operand_src, "no package named '{s}' available within package '{s}'", .{ operand, name });
},
else => {
// TODO: these errors are file system errors; make sure an update() will
diff --git a/src/main.zig b/src/main.zig
index e42974944b..d544940779 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -403,8 +403,11 @@ const usage_build_generic =
\\ ReleaseFast Optimizations on, safety off
\\ ReleaseSafe Optimizations on, safety on
\\ ReleaseSmall Optimize for small binary, safety off
- \\ --pkg-begin [name] [path] Make pkg available to import and push current pkg
- \\ --pkg-end Pop current pkg
+ \\ --mod [name]:[deps]:[src] Make a module available for dependency under the given name
+ \\ deps: [dep],[dep],...
+ \\ dep: [[import=]name]
+ \\ --deps [dep],[dep],... Set dependency names for the root package
+ \\ dep: [[import=]name]
\\ --main-pkg-path Set the directory of the root package
\\ -fPIC Force-enable Position Independent Code
\\ -fno-PIC Force-disable Position Independent Code
@@ -858,15 +861,21 @@ fn buildOutputType(
var linker_export_symbol_names = std.ArrayList([]const u8).init(gpa);
defer linker_export_symbol_names.deinit();
- // This package only exists to clean up the code parsing --pkg-begin and
- // --pkg-end flags. Use dummy values that are safe for the destroy call.
- var pkg_tree_root: Package = .{
- .root_src_directory = .{ .path = null, .handle = fs.cwd() },
- .root_src_path = &[0]u8{},
- .name = &[0]u8{},
- };
- defer freePkgTree(gpa, &pkg_tree_root, false);
- var cur_pkg: *Package = &pkg_tree_root;
+ // Contains every module specified via --mod. The dependencies are added
+ // after argument parsing is completed. We use a StringArrayHashMap to make
+ // error output consistent.
+ var modules = std.StringArrayHashMap(struct {
+ mod: *Package,
+ deps_str: []const u8, // still in CLI arg format
+ }).init(gpa);
+ defer {
+ var it = modules.iterator();
+ while (it.next()) |kv| kv.value_ptr.mod.destroy(gpa);
+ modules.deinit();
+ }
+
+ // The dependency string for the root package
+ var root_deps_str: ?[]const u8 = null;
// before arg parsing, check for the NO_COLOR environment variable
// if it exists, default the color setting to .off
@@ -943,34 +952,44 @@ fn buildOutputType(
} else {
fatal("unexpected end-of-parameter mark: --", .{});
}
- } else if (mem.eql(u8, arg, "--pkg-begin")) {
- const opt_pkg_name = args_iter.next();
- const opt_pkg_path = args_iter.next();
- if (opt_pkg_name == null or opt_pkg_path == null)
- fatal("Expected 2 arguments after {s}", .{arg});
-
- const pkg_name = opt_pkg_name.?;
- const pkg_path = try introspect.resolvePath(arena, opt_pkg_path.?);
-
- const new_cur_pkg = Package.create(
- gpa,
- pkg_name,
- fs.path.dirname(pkg_path),
- fs.path.basename(pkg_path),
- ) catch |err| {
- fatal("Failed to add package at path {s}: {s}", .{ pkg_path, @errorName(err) });
- };
+ } else if (mem.eql(u8, arg, "--mod")) {
+ const info = args_iter.nextOrFatal();
+ var info_it = mem.split(u8, info, ":");
+ const mod_name = info_it.next() orelse fatal("expected non-empty argument after {s}", .{arg});
+ const deps_str = info_it.next() orelse fatal("expected 'name:deps:path' after {s}", .{arg});
+ const root_src_orig = info_it.rest();
+ if (root_src_orig.len == 0) fatal("expected 'name:deps:path' after {s}", .{arg});
+ if (mod_name.len == 0) fatal("empty name for module at '{s}'", .{root_src_orig});
+
+ const root_src = try introspect.resolvePath(arena, root_src_orig);
+
+ for ([_][]const u8{ "std", "root", "builtin" }) |name| {
+ if (mem.eql(u8, mod_name, name)) {
+ fatal("unable to add module '{s}' -> '{s}': conflicts with builtin module", .{ mod_name, root_src });
+ }
+ }
- if (mem.eql(u8, pkg_name, "std") or mem.eql(u8, pkg_name, "root") or mem.eql(u8, pkg_name, "builtin")) {
- fatal("unable to add package '{s}' -> '{s}': conflicts with builtin package", .{ pkg_name, pkg_path });
- } else if (cur_pkg.table.get(pkg_name)) |prev| {
- fatal("unable to add package '{s}' -> '{s}': already exists as '{s}", .{ pkg_name, pkg_path, prev.root_src_path });
+ var mod_it = modules.iterator();
+ while (mod_it.next()) |kv| {
+ if (std.mem.eql(u8, mod_name, kv.key_ptr.*)) {
+ fatal("unable to add module '{s}' -> '{s}': already exists as '{s}'", .{ mod_name, root_src, kv.value_ptr.mod.root_src_path });
+ }
+ }
+
+ try modules.ensureUnusedCapacity(1);
+ modules.put(mod_name, .{
+ .mod = try Package.create(
+ gpa,
+ fs.path.dirname(root_src),
+ fs.path.basename(root_src),
+ ),
+ .deps_str = deps_str,
+ }) catch unreachable;
+ } else if (mem.eql(u8, arg, "--deps")) {
+ if (root_deps_str != null) {
+ fatal("only one --deps argument is allowed", .{});
}
- try cur_pkg.addAndAdopt(gpa, new_cur_pkg);
- cur_pkg = new_cur_pkg;
- } else if (mem.eql(u8, arg, "--pkg-end")) {
- cur_pkg = cur_pkg.parent orelse
- fatal("encountered --pkg-end with no matching --pkg-begin", .{});
+ root_deps_str = args_iter.nextOrFatal();
} else if (mem.eql(u8, arg, "--main-pkg-path")) {
main_pkg_path = args_iter.nextOrFatal();
} else if (mem.eql(u8, arg, "-cflags")) {
@@ -2307,6 +2326,31 @@ fn buildOutputType(
},
}
+ {
+ // Resolve module dependencies
+ var it = modules.iterator();
+ while (it.next()) |kv| {
+ const deps_str = kv.value_ptr.deps_str;
+ var deps_it = ModuleDepIterator.init(deps_str);
+ while (deps_it.next()) |dep| {
+ if (dep.expose.len == 0) {
+ fatal("module '{s}' depends on '{s}' with a blank name", .{ kv.key_ptr.*, dep.name });
+ }
+
+ for ([_][]const u8{ "std", "root", "builtin" }) |name| {
+ if (mem.eql(u8, dep.expose, name)) {
+ fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{ dep.name, dep.expose });
+ }
+ }
+
+ const dep_mod = modules.get(dep.name) orelse
+ fatal("module '{s}' depends on module '{s}' which does not exist", .{ kv.key_ptr.*, dep.name });
+
+ try kv.value_ptr.mod.add(gpa, dep.expose, dep_mod.mod);
+ }
+ }
+ }
+
if (arg_mode == .build and optimize_mode == .ReleaseSmall and strip == null)
strip = true;
@@ -2886,14 +2930,14 @@ fn buildOutputType(
if (main_pkg_path) |unresolved_main_pkg_path| {
const p = try introspect.resolvePath(arena, unresolved_main_pkg_path);
if (p.len == 0) {
- break :blk try Package.create(gpa, "root", null, src_path);
+ break :blk try Package.create(gpa, null, src_path);
} else {
const rel_src_path = try fs.path.relative(arena, p, src_path);
- break :blk try Package.create(gpa, "root", p, rel_src_path);
+ break :blk try Package.create(gpa, p, rel_src_path);
}
} else {
const root_src_dir_path = fs.path.dirname(src_path);
- break :blk Package.create(gpa, "root", root_src_dir_path, fs.path.basename(src_path)) catch |err| {
+ break :blk Package.create(gpa, root_src_dir_path, fs.path.basename(src_path)) catch |err| {
if (root_src_dir_path) |p| {
fatal("unable to open '{s}': {s}", .{ p, @errorName(err) });
} else {
@@ -2904,23 +2948,24 @@ fn buildOutputType(
} else null;
defer if (main_pkg) |p| p.destroy(gpa);
- // Transfer packages added with --pkg-begin/--pkg-end to the root package
- if (main_pkg) |pkg| {
- var it = pkg_tree_root.table.valueIterator();
- while (it.next()) |p| {
- if (p.*.parent == &pkg_tree_root) {
- p.*.parent = pkg;
+ // Transfer packages added with --deps to the root package
+ if (main_pkg) |mod| {
+ var it = ModuleDepIterator.init(root_deps_str orelse "");
+ while (it.next()) |dep| {
+ if (dep.expose.len == 0) {
+ fatal("root module depends on '{s}' with a blank name", .{dep.name});
}
- }
- pkg.table = pkg_tree_root.table;
- pkg_tree_root.table = .{};
- } else {
- // Remove any dangling pointers just in case.
- var it = pkg_tree_root.table.valueIterator();
- while (it.next()) |p| {
- if (p.*.parent == &pkg_tree_root) {
- p.*.parent = null;
+
+ for ([_][]const u8{ "std", "root", "builtin" }) |name| {
+ if (mem.eql(u8, dep.expose, name)) {
+ fatal("unable to add module '{s}' under name '{s}': conflicts with builtin module", .{ dep.name, dep.expose });
+ }
}
+
+ const dep_mod = modules.get(dep.name) orelse
+ fatal("root module depends on module '{s}' which does not exist", .{dep.name});
+
+ try mod.add(gpa, dep.expose, dep_mod.mod);
}
}
@@ -3400,6 +3445,32 @@ fn buildOutputType(
return cleanExit();
}
+const ModuleDepIterator = struct {
+ split: mem.SplitIterator(u8),
+
+ fn init(deps_str: []const u8) ModuleDepIterator {
+ return .{ .split = mem.split(u8, deps_str, ",") };
+ }
+
+ const Dependency = struct {
+ expose: []const u8,
+ name: []const u8,
+ };
+
+ fn next(it: *ModuleDepIterator) ?Dependency {
+ if (it.split.buffer.len == 0) return null; // don't return "" for the first iteration on ""
+ const str = it.split.next() orelse return null;
+ if (mem.indexOfScalar(u8, str, '=')) |i| {
+ return .{
+ .expose = str[0..i],
+ .name = str[i + 1 ..],
+ };
+ } else {
+ return .{ .expose = str, .name = str };
+ }
+ }
+};
+
fn parseCrossTargetOrReportFatalError(
allocator: Allocator,
opts: std.zig.CrossTarget.ParseOptions,
@@ -3626,18 +3697,6 @@ fn updateModule(gpa: Allocator, comp: *Compilation, hook: AfterUpdateHook) !void
}
}
-fn freePkgTree(gpa: Allocator, pkg: *Package, free_parent: bool) void {
- {
- var it = pkg.table.valueIterator();
- while (it.next()) |value| {
- freePkgTree(gpa, value.*, true);
- }
- }
- if (free_parent) {
- pkg.destroy(gpa);
- }
-}
-
fn cmdTranslateC(comp: *Compilation, arena: Allocator, enable_cache: bool) !void {
if (!build_options.have_llvm)
fatal("cannot translate-c: compiler built without LLVM extensions", .{});
@@ -4141,7 +4200,6 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
var main_pkg: Package = .{
.root_src_directory = zig_lib_directory,
.root_src_path = "build_runner.zig",
- .name = "root",
};
if (!build_options.omit_pkg_fetching_code) {
@@ -4184,22 +4242,20 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
const deps_pkg = try Package.createFilePkg(
gpa,
- "@dependencies",
local_cache_directory,
"dependencies.zig",
dependencies_source.items,
);
mem.swap(Package.Table, &main_pkg.table, &deps_pkg.table);
- try main_pkg.addAndAdopt(gpa, deps_pkg);
+ try main_pkg.add(gpa, "@dependencies", deps_pkg);
}
var build_pkg: Package = .{
.root_src_directory = build_directory,
.root_src_path = build_zig_basename,
- .name = "@build",
};
- try main_pkg.addAndAdopt(gpa, &build_pkg);
+ try main_pkg.add(gpa, "@build", &build_pkg);
const comp = Compilation.create(gpa, .{
.zig_lib_directory = zig_lib_directory,
@@ -4434,7 +4490,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
.root_decl = .none,
};
- file.pkg = try Package.create(gpa, "root", null, file.sub_file_path);
+ file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.zir = try AstGen.generate(gpa, file.tree);
@@ -4645,7 +4701,7 @@ fn fmtPathFile(
.root_decl = .none,
};
- file.pkg = try Package.create(fmt.gpa, "root", null, file.sub_file_path);
+ file.pkg = try Package.create(fmt.gpa, null, file.sub_file_path);
defer file.pkg.destroy(fmt.gpa);
if (stat.size > max_src_size)
@@ -5357,7 +5413,7 @@ pub fn cmdAstCheck(
file.stat.size = source.len;
}
- file.pkg = try Package.create(gpa, "root", null, file.sub_file_path);
+ file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
file.tree = try Ast.parse(gpa, file.source, .zig);
@@ -5476,7 +5532,7 @@ pub fn cmdChangelist(
.root_decl = .none,
};
- file.pkg = try Package.create(gpa, "root", null, file.sub_file_path);
+ file.pkg = try Package.create(gpa, null, file.sub_file_path);
defer file.pkg.destroy(gpa);
const source = try arena.allocSentinel(u8, @intCast(usize, stat.size), 0);
diff --git a/src/test.zig b/src/test.zig
index acc1bcdc1f..61cdb705e3 100644
--- a/src/test.zig
+++ b/src/test.zig
@@ -583,6 +583,11 @@ pub const TestContext = struct {
path: []const u8,
};
+ pub const DepModule = struct {
+ name: []const u8,
+ path: []const u8,
+ };
+
pub const Backend = enum {
stage1,
stage2,
@@ -611,6 +616,7 @@ pub const TestContext = struct {
link_libc: bool = false,
files: std.ArrayList(File),
+ deps: std.ArrayList(DepModule),
result: anyerror!void = {},
@@ -618,6 +624,13 @@ pub const TestContext = struct {
case.files.append(.{ .path = name, .src = src }) catch @panic("out of memory");
}
+ pub fn addDepModule(case: *Case, name: []const u8, path: []const u8) void {
+ case.deps.append(.{
+ .name = name,
+ .path = path,
+ }) catch @panic("out of memory");
+ }
+
/// Adds a subcase in which the module is updated with `src`, and a C
/// header is generated.
pub fn addHeader(self: *Case, src: [:0]const u8, result: [:0]const u8) void {
@@ -767,6 +780,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.files = std.ArrayList(File).init(ctx.arena),
+ .deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -787,6 +801,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.files = std.ArrayList(File).init(ctx.arena),
+ .deps = std.ArrayList(DepModule).init(ctx.arena),
.link_libc = true,
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
@@ -801,6 +816,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.files = std.ArrayList(File).init(ctx.arena),
+ .deps = std.ArrayList(DepModule).init(ctx.arena),
.backend = .llvm,
.link_libc = true,
}) catch @panic("out of memory");
@@ -818,6 +834,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Obj,
.files = std.ArrayList(File).init(ctx.arena),
+ .deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -834,6 +851,7 @@ pub const TestContext = struct {
.output_mode = .Exe,
.is_test = true,
.files = std.ArrayList(File).init(ctx.arena),
+ .deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -858,6 +876,7 @@ pub const TestContext = struct {
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Obj,
.files = std.ArrayList(File).init(ctx.arena),
+ .deps = std.ArrayList(DepModule).init(ctx.arena),
}) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -1145,6 +1164,7 @@ pub const TestContext = struct {
.output_mode = output_mode,
.link_libc = backend == .llvm,
.files = std.ArrayList(TestContext.File).init(ctx.cases.allocator),
+ .deps = std.ArrayList(DepModule).init(ctx.cases.allocator),
});
try cases.append(next);
}
@@ -1497,9 +1517,25 @@ pub const TestContext = struct {
var main_pkg: Package = .{
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
.root_src_path = tmp_src_path,
- .name = "root",
};
- defer main_pkg.table.deinit(allocator);
+ defer {
+ var it = main_pkg.table.iterator();
+ while (it.next()) |kv| {
+ allocator.free(kv.key_ptr.*);
+ kv.value_ptr.*.destroy(allocator);
+ }
+ main_pkg.table.deinit(allocator);
+ }
+
+ for (case.deps.items) |dep| {
+ var pkg = try Package.create(
+ allocator,
+ tmp_dir_path,
+ dep.path,
+ );
+ errdefer pkg.destroy(allocator);
+ try main_pkg.add(allocator, dep.name, pkg);
+ }
const bin_name = try std.zig.binNameAlloc(arena, .{
.root_name = "test_case",