aboutsummaryrefslogtreecommitdiff
path: root/src/zir.zig
diff options
context:
space:
mode:
authorVeikka Tuominen <git@vexu.eu>2020-10-31 09:39:28 +0200
committerGitHub <noreply@github.com>2020-10-31 09:39:28 +0200
commit7c8d9cfa40ab96aede2a7fe8ec9dce6f10bc910a (patch)
treebac19bf24748e7d63e3bbf86ca5b34c4cfa2c754 /src/zir.zig
parentbb6e39e274eb0a68bfb1029ab75d1791abeb2911 (diff)
parent22ec5e085914d9fd7b17a28a8d3ad01258f3ad03 (diff)
downloadzig-7c8d9cfa40ab96aede2a7fe8ec9dce6f10bc910a.tar.gz
zig-7c8d9cfa40ab96aede2a7fe8ec9dce6f10bc910a.zip
Merge pull request #6660 from Vexu/stage2
Stage2 switch and package imports
Diffstat (limited to 'src/zir.zig')
-rw-r--r--src/zir.zig198
1 files changed, 195 insertions, 3 deletions
diff --git a/src/zir.zig b/src/zir.zig
index 76ba89e9c7..a03b492bd5 100644
--- a/src/zir.zig
+++ b/src/zir.zig
@@ -85,8 +85,12 @@ pub const Inst = struct {
block_comptime,
/// Same as `block_flat` but additionally makes the inner instructions execute at comptime.
block_comptime_flat,
+ /// Boolean AND. See also `bitand`.
+ booland,
/// Boolean NOT. See also `bitnot`.
boolnot,
+ /// Boolean OR. See also `bitor`.
+ boolor,
/// Return a value from a `Block`.
@"break",
breakpoint,
@@ -272,6 +276,12 @@ pub const Inst = struct {
ensure_err_payload_void,
/// Enum literal
enum_literal,
+ /// A switch expression.
+ switchbr,
+ /// A range in a switch case, `lhs...rhs`.
+ /// Only checks that `lhs >= rhs` if they are ints, everything else is
+ /// validated by the .switch instruction.
+ switch_range,
pub fn Type(tag: Tag) type {
return switch (tag) {
@@ -327,6 +337,8 @@ pub const Inst = struct {
.array_type,
.bitand,
.bitor,
+ .booland,
+ .boolor,
.div,
.mod_rem,
.mul,
@@ -351,6 +363,7 @@ pub const Inst = struct {
.error_union_type,
.merge_error_sets,
.slice_start,
+ .switch_range,
=> BinOp,
.block,
@@ -389,6 +402,7 @@ pub const Inst = struct {
.enum_literal => EnumLiteral,
.error_set => ErrorSet,
.slice => Slice,
+ .switchbr => SwitchBr,
};
}
@@ -417,6 +431,8 @@ pub const Inst = struct {
.block_comptime,
.block_comptime_flat,
.boolnot,
+ .booland,
+ .boolor,
.breakpoint,
.call,
.cmp_lt,
@@ -493,6 +509,7 @@ pub const Inst = struct {
.slice,
.slice_start,
.import,
+ .switch_range,
=> false,
.@"break",
@@ -504,6 +521,7 @@ pub const Inst = struct {
.unreach_nocheck,
.@"unreachable",
.loop,
+ .switchbr,
=> true,
};
}
@@ -987,6 +1005,33 @@ pub const Inst = struct {
sentinel: ?*Inst = null,
},
};
+
+ pub const SwitchBr = struct {
+ pub const base_tag = Tag.switchbr;
+ base: Inst,
+
+ positionals: struct {
+ target_ptr: *Inst,
+ /// List of all individual items and ranges
+ items: []*Inst,
+ cases: []Case,
+ else_body: Module.Body,
+ },
+ kw_args: struct {
+ /// Pointer to first range if such exists.
+ range: ?*Inst = null,
+ special_prong: enum {
+ none,
+ @"else",
+ underscore,
+ } = .none,
+ },
+
+ pub const Case = struct {
+ item: *Inst,
+ body: Module.Body,
+ };
+ };
};
pub const ErrorMsg = struct {
@@ -1218,8 +1263,8 @@ const Writer = struct {
bool => return stream.writeByte("01"[@boolToInt(param)]),
[]u8, []const u8 => return stream.print("\"{Z}\"", .{param}),
BigIntConst, usize => return stream.print("{}", .{param}),
- TypedValue => unreachable, // this is a special case
- *IrModule.Decl => unreachable, // this is a special case
+ TypedValue => return stream.print("TypedValue{{ .ty = {}, .val = {}}}", .{ param.ty, param.val }),
+ *IrModule.Decl => return stream.print("Decl({s})", .{param.name}),
*Inst.Block => {
const name = self.block_table.get(param).?;
return stream.print("\"{Z}\"", .{name});
@@ -1238,6 +1283,26 @@ const Writer = struct {
}
try stream.writeByte(']');
},
+ []Inst.SwitchBr.Case => {
+ if (param.len == 0) {
+ return stream.writeAll("{}");
+ }
+ try stream.writeAll("{\n");
+ for (param) |*case, i| {
+ if (i != 0) {
+ try stream.writeAll(",\n");
+ }
+ try stream.writeByteNTimes(' ', self.indent);
+ self.indent += 2;
+ try self.writeParamToStream(stream, &case.item);
+ try stream.writeAll(" => ");
+ try self.writeParamToStream(stream, &case.body);
+ self.indent -= 2;
+ }
+ try stream.writeByte('\n');
+ try stream.writeByteNTimes(' ', self.indent - 2);
+ try stream.writeByte('}');
+ },
else => |T| @compileError("unimplemented: rendering parameter of type " ++ @typeName(T)),
}
}
@@ -1650,6 +1715,26 @@ const Parser = struct {
try requireEatBytes(self, "]");
return strings.toOwnedSlice();
},
+ []Inst.SwitchBr.Case => {
+ try requireEatBytes(self, "{");
+ skipSpace(self);
+ if (eatByte(self, '}')) return &[0]Inst.SwitchBr.Case{};
+
+ var cases = std.ArrayList(Inst.SwitchBr.Case).init(&self.arena.allocator);
+ while (true) {
+ const cur = try cases.addOne();
+ skipSpace(self);
+ cur.item = try self.parseParameterGeneric(*Inst, body_ctx);
+ skipSpace(self);
+ try requireEatBytes(self, "=>");
+ cur.body = try self.parseBody(body_ctx);
+ skipSpace(self);
+ if (!eatByte(self, ',')) break;
+ }
+ skipSpace(self);
+ try requireEatBytes(self, "}");
+ return cases.toOwnedSlice();
+ },
else => @compileError("Unimplemented: ir parseParameterGeneric for type " ++ @typeName(T)),
}
return self.fail("TODO parse parameter {}", .{@typeName(T)});
@@ -1747,7 +1832,7 @@ pub fn dumpFn(old_module: IrModule, module_fn: *IrModule.Fn) void {
.arena = std.heap.ArenaAllocator.init(allocator),
.old_module = &old_module,
.next_auto_name = 0,
- .names = std.StringHashMap(void).init(allocator),
+ .names = std.StringArrayHashMap(void).init(allocator),
.primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator),
.indent = 0,
.block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator),
@@ -2244,6 +2329,8 @@ const EmitZIR = struct {
.cmp_gte => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_gte).?, .cmp_gte),
.cmp_gt => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_gt).?, .cmp_gt),
.cmp_neq => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_neq).?, .cmp_neq),
+ .booland => try self.emitBinOp(inst.src, new_body, inst.castTag(.booland).?, .booland),
+ .boolor => try self.emitBinOp(inst.src, new_body, inst.castTag(.boolor).?, .boolor),
.bitcast => try self.emitCast(inst.src, new_body, inst.castTag(.bitcast).?, .bitcast),
.intcast => try self.emitCast(inst.src, new_body, inst.castTag(.intcast).?, .intcast),
@@ -2470,7 +2557,63 @@ const EmitZIR = struct {
};
break :blk &new_inst.base;
},
+ .switchbr => blk: {
+ const old_inst = inst.castTag(.switchbr).?;
+ const cases = try self.arena.allocator.alloc(Inst.SwitchBr.Case, old_inst.cases.len);
+ const new_inst = try self.arena.allocator.create(Inst.SwitchBr);
+ new_inst.* = .{
+ .base = .{
+ .src = inst.src,
+ .tag = Inst.SwitchBr.base_tag,
+ },
+ .positionals = .{
+ .target_ptr = try self.resolveInst(new_body, old_inst.target_ptr),
+ .cases = cases,
+ .items = &[_]*Inst{}, // TODO this should actually be populated
+ .else_body = undefined, // populated below
+ },
+ .kw_args = .{},
+ };
+ var body_tmp = std.ArrayList(*Inst).init(self.allocator);
+ defer body_tmp.deinit();
+
+ for (old_inst.cases) |*case, i| {
+ body_tmp.items.len = 0;
+
+ const case_deaths = try self.arena.allocator.alloc(*Inst, old_inst.caseDeaths(i).len);
+ for (old_inst.caseDeaths(i)) |death, j| {
+ case_deaths[j] = try self.resolveInst(new_body, death);
+ }
+ try self.body_metadata.put(&cases[i].body, .{ .deaths = case_deaths });
+
+ try self.emitBody(case.body, inst_table, &body_tmp);
+ const item = (try self.emitTypedValue(inst.src, .{
+ .ty = old_inst.target_ptr.ty.elemType(),
+ .val = case.item,
+ })).inst;
+
+ cases[i] = .{
+ .item = item,
+ .body = .{ .instructions = try self.arena.allocator.dupe(*Inst, body_tmp.items) },
+ };
+ }
+ { // else
+ const else_deaths = try self.arena.allocator.alloc(*Inst, old_inst.elseDeaths().len);
+ for (old_inst.elseDeaths()) |death, j| {
+ else_deaths[j] = try self.resolveInst(new_body, death);
+ }
+ try self.body_metadata.put(&new_inst.positionals.else_body, .{ .deaths = else_deaths });
+
+ body_tmp.items.len = 0;
+ try self.emitBody(old_inst.else_body, inst_table, &body_tmp);
+ new_inst.positionals.else_body = .{
+ .instructions = try self.arena.allocator.dupe(*Inst, body_tmp.items),
+ };
+ }
+
+ break :blk &new_inst.base;
+ },
.varptr => @panic("TODO"),
};
try self.metadata.put(new_inst, .{
@@ -2703,3 +2846,52 @@ const EmitZIR = struct {
return decl;
}
};
+
+/// For debugging purposes, like dumpFn but for unanalyzed zir blocks
+pub fn dumpZir(allocator: *Allocator, kind: []const u8, decl_name: [*:0]const u8, instructions: []*Inst) !void {
+ var fib = std.heap.FixedBufferAllocator.init(&[_]u8{});
+ var module = Module{
+ .decls = &[_]*Decl{},
+ .arena = std.heap.ArenaAllocator.init(&fib.allocator),
+ .metadata = std.AutoHashMap(*Inst, Module.MetaData).init(&fib.allocator),
+ .body_metadata = std.AutoHashMap(*Module.Body, Module.BodyMetaData).init(&fib.allocator),
+ };
+ var write = Writer{
+ .module = &module,
+ .inst_table = InstPtrTable.init(allocator),
+ .block_table = std.AutoHashMap(*Inst.Block, []const u8).init(allocator),
+ .loop_table = std.AutoHashMap(*Inst.Loop, []const u8).init(allocator),
+ .arena = std.heap.ArenaAllocator.init(allocator),
+ .indent = 4,
+ .next_instr_index = 0,
+ };
+ defer write.arena.deinit();
+ defer write.inst_table.deinit();
+ defer write.block_table.deinit();
+ defer write.loop_table.deinit();
+
+ try write.inst_table.ensureCapacity(@intCast(u32, instructions.len));
+
+ const stderr = std.io.getStdErr().outStream();
+ try stderr.print("{} {s} {{ // unanalyzed\n", .{ kind, decl_name });
+
+ for (instructions) |inst| {
+ const my_i = write.next_instr_index;
+ write.next_instr_index += 1;
+
+ if (inst.cast(Inst.Block)) |block| {
+ const name = try std.fmt.allocPrint(&write.arena.allocator, "label_{}", .{my_i});
+ try write.block_table.put(block, name);
+ } else if (inst.cast(Inst.Loop)) |loop| {
+ const name = try std.fmt.allocPrint(&write.arena.allocator, "loop_{}", .{my_i});
+ try write.loop_table.put(loop, name);
+ }
+
+ try write.inst_table.putNoClobber(inst, .{ .inst = inst, .index = my_i, .name = "inst" });
+ try stderr.print(" %{} ", .{my_i});
+ try write.writeInstToStream(stderr, inst);
+ try stderr.writeByte('\n');
+ }
+
+ try stderr.print("}} // {} {s}\n\n", .{ kind, decl_name });
+}