aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJakub Konka <kubkon@jakubkonka.com>2022-01-23 16:34:38 +0100
committerJakub Konka <kubkon@jakubkonka.com>2022-01-25 23:51:04 +0100
commit05c5bb9edd51e73c0d4a2619817fbff73b82f230 (patch)
tree2c27073cd2dbd58a0e0a4a55a877c2a81eda86ce /src
parentef7eff393912cae322fbc755536bc060c0166b94 (diff)
downloadzig-05c5bb9edd51e73c0d4a2619817fbff73b82f230.tar.gz
zig-05c5bb9edd51e73c0d4a2619817fbff73b82f230.zip
stage2: populate debug info for args passed on stack
* implement cond_br when MCValue is a stack offset * implement passing compare flags and immediate on stack
Diffstat (limited to 'src')
-rw-r--r--src/arch/x86_64/CodeGen.zig224
-rw-r--r--src/arch/x86_64/Emit.zig27
2 files changed, 148 insertions, 103 deletions
diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig
index e53ff57144..64a91fc2bf 100644
--- a/src/arch/x86_64/CodeGen.zig
+++ b/src/arch/x86_64/CodeGen.zig
@@ -2506,6 +2506,70 @@ fn airDbgStmt(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAirBookkeeping();
}
+fn genCondBrMir(self: *Self, ty: Type, mcv: MCValue) !u32 {
+ const abi_size = ty.abiSize(self.target.*);
+ switch (mcv) {
+ .compare_flags_unsigned,
+ .compare_flags_signed,
+ => |cmp_op| {
+ // Here we map the opposites since the jump is to the false branch.
+ const flags: u2 = switch (cmp_op) {
+ .gte => 0b10,
+ .gt => 0b11,
+ .neq => 0b01,
+ .lt => 0b00,
+ .lte => 0b01,
+ .eq => 0b00,
+ };
+ const tag: Mir.Inst.Tag = if (cmp_op == .neq or cmp_op == .eq)
+ .cond_jmp_eq_ne
+ else if (mcv == .compare_flags_unsigned)
+ Mir.Inst.Tag.cond_jmp_above_below
+ else
+ Mir.Inst.Tag.cond_jmp_greater_less;
+ return self.addInst(.{
+ .tag = tag,
+ .ops = (Mir.Ops{
+ .flags = flags,
+ }).encode(),
+ .data = .{ .inst = undefined },
+ });
+ },
+ .register => |reg| {
+ _ = try self.addInst(.{
+ .tag = .@"test",
+ .ops = (Mir.Ops{
+ .reg1 = reg,
+ .flags = 0b00,
+ }).encode(),
+ .data = .{ .imm = 1 },
+ });
+ return self.addInst(.{
+ .tag = .cond_jmp_eq_ne,
+ .ops = (Mir.Ops{
+ .flags = 0b01,
+ }).encode(),
+ .data = .{ .inst = undefined },
+ });
+ },
+ .immediate => {
+ if (abi_size <= 8) {
+ const reg = try self.copyToTmpRegister(ty, mcv);
+ return self.genCondBrMir(ty, .{ .register = reg });
+ }
+ return self.fail("TODO implement condbr when condition is immediate larger than 4 bytes", .{});
+ },
+ .stack_offset => {
+ if (abi_size <= 8) {
+ const reg = try self.copyToTmpRegister(ty, mcv);
+ return self.genCondBrMir(ty, .{ .register = reg });
+ }
+ return self.fail("TODO implement condbr when condition is stack offset with abi larger than 8 bytes", .{});
+ },
+ else => return self.fail("TODO implement condbr when condition is {s}", .{@tagName(mcv)}),
+ }
+}
+
fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
const pl_op = self.air.instructions.items(.data)[inst].pl_op;
const cond = try self.resolveInst(pl_op.operand);
@@ -2515,97 +2579,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len];
const liveness_condbr = self.liveness.getCondBr(inst);
- const reloc: Mir.Inst.Index = reloc: {
- switch (cond) {
- .compare_flags_signed => |cmp_op| {
- // Here we map the opposites since the jump is to the false branch.
- const flags: u2 = switch (cmp_op) {
- .gte => 0b10,
- .gt => 0b11,
- .neq => 0b01,
- .lt => 0b00,
- .lte => 0b01,
- .eq => 0b00,
- };
- const tag: Mir.Inst.Tag = if (cmp_op == .neq or cmp_op == .eq)
- .cond_jmp_eq_ne
- else
- .cond_jmp_greater_less;
- const reloc = try self.addInst(.{
- .tag = tag,
- .ops = (Mir.Ops{
- .flags = flags,
- }).encode(),
- .data = .{ .inst = undefined },
- });
- break :reloc reloc;
- },
- .compare_flags_unsigned => |cmp_op| {
- // Here we map the opposites since the jump is to the false branch.
- const flags: u2 = switch (cmp_op) {
- .gte => 0b10,
- .gt => 0b11,
- .neq => 0b01,
- .lt => 0b00,
- .lte => 0b01,
- .eq => 0b00,
- };
- const tag: Mir.Inst.Tag = if (cmp_op == .neq or cmp_op == .eq)
- .cond_jmp_eq_ne
- else
- .cond_jmp_above_below;
- const reloc = try self.addInst(.{
- .tag = tag,
- .ops = (Mir.Ops{
- .flags = flags,
- }).encode(),
- .data = .{ .inst = undefined },
- });
- break :reloc reloc;
- },
- .register => |reg| {
- _ = try self.addInst(.{
- .tag = .@"test",
- .ops = (Mir.Ops{
- .reg1 = reg,
- .flags = 0b00,
- }).encode(),
- .data = .{ .imm = 1 },
- });
- const reloc = try self.addInst(.{
- .tag = .cond_jmp_eq_ne,
- .ops = (Mir.Ops{
- .flags = 0b01,
- }).encode(),
- .data = .{ .inst = undefined },
- });
- break :reloc reloc;
- },
- .immediate => |imm| {
- if (cond_ty.abiSize(self.target.*) <= 4) {
- const reg = try self.copyToTmpRegister(cond_ty, .{ .immediate = imm });
- _ = try self.addInst(.{
- .tag = .@"test",
- .ops = (Mir.Ops{
- .reg1 = reg,
- .flags = 0b00,
- }).encode(),
- .data = .{ .imm = 1 },
- });
- const reloc = try self.addInst(.{
- .tag = .cond_jmp_eq_ne,
- .ops = (Mir.Ops{
- .flags = 0b01,
- }).encode(),
- .data = .{ .inst = undefined },
- });
- break :reloc reloc;
- }
- return self.fail("TODO implement condbr when condition is immediate larger than 4 bytes", .{});
- },
- else => return self.fail("TODO implement condbr when condition is {s}", .{@tagName(cond)}),
- }
- };
+ const reloc = try self.genCondBrMir(cond_ty, cond);
// Capture the state of register and stack allocation state so that we can revert to it.
const parent_next_stack_offset = self.next_stack_offset;
@@ -3158,6 +3132,53 @@ fn genSetStackArg(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue) InnerE
.dead => unreachable,
.ptr_embedded_in_code => unreachable,
.unreach, .none => return,
+ .compare_flags_unsigned,
+ .compare_flags_signed,
+ => {
+ const reg = try self.copyToTmpRegister(ty, mcv);
+ return self.genSetStackArg(ty, stack_offset, .{ .register = reg });
+ },
+ .immediate => |imm| {
+ const off = stack_offset + @intCast(i32, abi_size);
+ switch (abi_size) {
+ 1, 2, 4 => {
+ // We have a positive stack offset value but we want a twos complement negative
+ // offset from rbp, which is at the top of the stack frame.
+ // mov [rbp+offset], immediate
+ const payload = try self.addExtra(Mir.ImmPair{
+ .dest_off = @bitCast(u32, -off),
+ .operand = @truncate(u32, imm),
+ });
+ _ = try self.addInst(.{
+ .tag = .mov_mem_imm,
+ .ops = (Mir.Ops{
+ .reg1 = .rsp,
+ .flags = switch (abi_size) {
+ 1 => 0b00,
+ 2 => 0b01,
+ 4 => 0b10,
+ else => unreachable,
+ },
+ }).encode(),
+ .data = .{ .payload = payload },
+ });
+ },
+ 8 => {
+ const reg = try self.copyToTmpRegister(ty, mcv);
+ return self.genSetStackArg(ty, stack_offset, MCValue{ .register = reg });
+ },
+ else => return self.fail("TODO implement args on stack for {} with abi size > 8", .{mcv}),
+ }
+ },
+ .memory,
+ .embedded_in_code,
+ => {
+ if (abi_size <= 8) {
+ const reg = try self.copyToTmpRegister(ty, mcv);
+ return self.genSetStackArg(ty, stack_offset, MCValue{ .register = reg });
+ }
+ return self.fail("TODO implement memcpy for setting args on stack from {}", .{mcv});
+ },
.register => |reg| {
_ = try self.addInst(.{
.tag = .mov,
@@ -3227,13 +3248,11 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue) InnerErro
else => return self.genInlineMemset(ty, stack_offset, .{ .immediate = 0xaa }),
}
},
- .compare_flags_unsigned => |op| {
- _ = op;
- return self.fail("TODO implement set stack variable with compare flags value (unsigned)", .{});
- },
- .compare_flags_signed => |op| {
- _ = op;
- return self.fail("TODO implement set stack variable with compare flags value (signed)", .{});
+ .compare_flags_unsigned,
+ .compare_flags_signed,
+ => {
+ const reg = try self.copyToTmpRegister(ty, mcv);
+ return self.genSetStack(ty, stack_offset, .{ .register = reg });
},
.immediate => |x_big| {
const abi_size = ty.abiSize(self.target.*);
@@ -3321,7 +3340,9 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue) InnerErro
.data = .{ .imm = @bitCast(u32, -adj_off) },
});
},
- .memory, .embedded_in_code => {
+ .memory,
+ .embedded_in_code,
+ => {
if (ty.abiSize(self.target.*) <= 8) {
const reg = try self.copyToTmpRegister(ty, mcv);
return self.genSetStack(ty, stack_offset, MCValue{ .register = reg });
@@ -3605,7 +3626,10 @@ fn genSetReg(self: *Self, ty: Type, reg: Register, mcv: MCValue) InnerError!void
.compare_flags_signed,
=> |op| {
const tag: Mir.Inst.Tag = switch (op) {
- .gte, .gt, .lt, .lte => .cond_set_byte_above_below,
+ .gte, .gt, .lt, .lte => if (mcv == .compare_flags_unsigned)
+ Mir.Inst.Tag.cond_set_byte_above_below
+ else
+ Mir.Inst.Tag.cond_set_byte_greater_less,
.eq, .neq => .cond_set_byte_eq_ne,
};
const flags: u2 = switch (op) {
diff --git a/src/arch/x86_64/Emit.zig b/src/arch/x86_64/Emit.zig
index ba19a6ba86..e18807846d 100644
--- a/src/arch/x86_64/Emit.zig
+++ b/src/arch/x86_64/Emit.zig
@@ -841,15 +841,16 @@ fn mirArgDbgInfo(emit: *Emit, inst: Mir.Inst.Index) InnerError!void {
const payload = emit.mir.instructions.items(.data)[inst].payload;
const arg_dbg_info = emit.mir.extraData(Mir.ArgDbgInfo, payload).data;
const mcv = emit.mir.function.args[arg_dbg_info.arg_index];
- try emit.genArgDbgInfo(arg_dbg_info.air_inst, mcv);
+ try emit.genArgDbgInfo(arg_dbg_info.air_inst, mcv, arg_dbg_info.arg_index);
}
-fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue) !void {
+fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue, arg_index: u32) !void {
const ty_str = emit.mir.function.air.instructions.items(.data)[inst].ty_str;
const zir = &emit.mir.function.mod_fn.owner_decl.getFileScope().zir;
const name = zir.nullTerminatedString(ty_str.str);
const name_with_null = name.ptr[0 .. name.len + 1];
const ty = emit.mir.function.air.getRefType(ty_str.ty);
+ const abi_size = ty.abiSize(emit.bin_file.options.target);
switch (mcv) {
.register => |reg| {
@@ -871,7 +872,27 @@ fn genArgDbgInfo(emit: *Emit, inst: Air.Inst.Index, mcv: MCValue) !void {
},
.stack_offset => {
switch (emit.debug_output) {
- .dwarf => {},
+ .dwarf => |dbg_out| {
+ // we add here +16 like we do in airArg in CodeGen since we refer directly to
+ // rbp as the start of function frame minus 8 bytes for caller's rbp preserved in the
+ // prologue, and 8 bytes for return address.
+ // TODO we need to make this more generic if we don't use rbp as the frame pointer
+ // for example when -fomit-frame-pointer is set.
+ const disp = @intCast(i32, arg_index * abi_size + 16);
+ try dbg_out.dbg_info.ensureUnusedCapacity(8);
+ dbg_out.dbg_info.appendAssumeCapacity(link.File.Elf.abbrev_parameter);
+ const fixup = dbg_out.dbg_info.items.len;
+ dbg_out.dbg_info.appendSliceAssumeCapacity(&[2]u8{ // DW.AT.location, DW.FORM.exprloc
+ 1, // we will backpatch it after we encode the displacement in LEB128
+ DW.OP.breg6, // .rbp TODO handle -fomit-frame-pointer
+ });
+ leb128.writeILEB128(dbg_out.dbg_info.writer(), disp) catch unreachable;
+ dbg_out.dbg_info.items[fixup] += @intCast(u8, dbg_out.dbg_info.items.len - fixup - 2);
+ try dbg_out.dbg_info.ensureUnusedCapacity(5 + name_with_null.len);
+ try emit.addDbgInfoTypeReloc(ty); // DW.AT.type, DW.FORM.ref4
+ dbg_out.dbg_info.appendSliceAssumeCapacity(name_with_null); // DW.AT.name, DW.FORM.string
+
+ },
.plan9 => {},
.none => {},
}