aboutsummaryrefslogtreecommitdiff
path: root/src/Compilation.zig
diff options
context:
space:
mode:
authorAndrew Kelley <andrew@ziglang.org>2021-01-02 12:32:30 -0700
committerAndrew Kelley <andrew@ziglang.org>2021-01-02 19:11:19 -0700
commit9362f382ab7023592cc1d71044217b847b122406 (patch)
tree3587f4c88b949673a94e995367414d80a5ef68af /src/Compilation.zig
parentfea8659b82ea1a785f933c58ba9d65ceb05a4094 (diff)
downloadzig-9362f382ab7023592cc1d71044217b847b122406.tar.gz
zig-9362f382ab7023592cc1d71044217b847b122406.zip
stage2: implement function call inlining in the frontend
* remove the -Ddump-zir thing. that's handled through --verbose-ir * rework Fn to have an is_inline flag without requiring any more memory on the heap per function. * implement a rough first version of dumping typed zir (tzir) which is a lot more helpful for debugging than what we had before. We don't have a way to parse it though. * keep track of whether the inline-ness of a function changes because if it does we have to go update callsites. * add compile error for inline and export used together. inline function calls and comptime function calls are implemented the same way. A block instruction is set up to capture the result, and then a scope is set up that has a flag for is_comptime and some state if the scope is being inlined. when analyzing `ret` instructions, zig looks for inlining state in the scope, and if found, treats `ret` as a `break` instruction instead, with the target block being the one set up at the inline callsite. Follow-up items: * Complete out the debug TZIR dumping code. * Don't redundantly generate ZIR for each inline/comptime function call. Instead we should add a new state enum tag to Fn. * comptime and inlining branch quotas. * Add more test cases.
Diffstat (limited to 'src/Compilation.zig')
-rw-r--r--src/Compilation.zig14
1 files changed, 9 insertions, 5 deletions
diff --git a/src/Compilation.zig b/src/Compilation.zig
index de115b9b40..a6f39a3154 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -1459,10 +1459,10 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
const module = self.bin_file.options.module.?;
if (decl.typed_value.most_recent.typed_value.val.castTag(.function)) |payload| {
const func = payload.data;
- switch (func.analysis) {
+ switch (func.bits.state) {
.queued => module.analyzeFnBody(decl, func) catch |err| switch (err) {
error.AnalysisFail => {
- assert(func.analysis != .in_progress);
+ assert(func.bits.state != .in_progress);
continue;
},
error.OutOfMemory => return error.OutOfMemory,
@@ -1471,12 +1471,16 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
.sema_failure, .dependency_failure => continue,
.success => {},
}
- // Here we tack on additional allocations to the Decl's arena. The allocations are
- // lifetime annotations in the ZIR.
+ // Here we tack on additional allocations to the Decl's arena. The allocations
+ // are lifetime annotations in the ZIR.
var decl_arena = decl.typed_value.most_recent.arena.?.promote(module.gpa);
defer decl.typed_value.most_recent.arena.?.* = decl_arena.state;
log.debug("analyze liveness of {s}\n", .{decl.name});
- try liveness.analyze(module.gpa, &decl_arena.allocator, func.analysis.success);
+ try liveness.analyze(module.gpa, &decl_arena.allocator, func.data.body);
+
+ if (self.verbose_ir) {
+ func.dump(module.*);
+ }
}
assert(decl.typed_value.most_recent.typed_value.ty.hasCodeGenBits());