aboutsummaryrefslogtreecommitdiff
path: root/src/codegen
diff options
context:
space:
mode:
Diffstat (limited to 'src/codegen')
-rw-r--r--src/codegen/c.zig3258
-rw-r--r--src/codegen/c/type.zig1896
-rw-r--r--src/codegen/llvm.zig4
3 files changed, 3491 insertions, 1667 deletions
diff --git a/src/codegen/c.zig b/src/codegen/c.zig
index 0beb00b236..cf428d4bd6 100644
--- a/src/codegen/c.zig
+++ b/src/codegen/c.zig
@@ -23,12 +23,14 @@ const libcFloatSuffix = target_util.libcFloatSuffix;
const compilerRtFloatAbbrev = target_util.compilerRtFloatAbbrev;
const compilerRtIntAbbrev = target_util.compilerRtIntAbbrev;
-const Mutability = enum { Const, ConstArgument, Mut };
const BigIntLimb = std.math.big.Limb;
const BigInt = std.math.big.int;
+pub const CType = @import("c/type.zig").CType;
+
pub const CValue = union(enum) {
none: void,
+ new_local: LocalIndex,
local: LocalIndex,
/// Address of a local.
local_ref: LocalIndex,
@@ -36,6 +38,8 @@ pub const CValue = union(enum) {
constant: Air.Inst.Ref,
/// Index into the parameters
arg: usize,
+ /// The array field of a parameter
+ arg_array: usize,
/// Index into a tuple's fields
field: usize,
/// By-value
@@ -45,6 +49,8 @@ pub const CValue = union(enum) {
undef: Type,
/// Render the slice as an identifier (using fmtIdent)
identifier: []const u8,
+ /// Render the slice as an payload.identifier (using fmtIdent)
+ payload_identifier: []const u8,
/// Render these bytes literally.
/// TODO make this a [*:0]const u8 to save memory
bytes: []const u8,
@@ -55,37 +61,43 @@ const BlockData = struct {
result: CValue,
};
-const TypedefKind = enum {
- Forward,
- Complete,
-};
-
pub const CValueMap = std.AutoHashMap(Air.Inst.Ref, CValue);
-pub const TypedefMap = std.ArrayHashMap(
- Type,
- struct { name: []const u8, rendered: []u8 },
- Type.HashContext32,
- true,
-);
+
+pub const LazyFnKey = union(enum) {
+ tag_name: Decl.Index,
+ never_tail: Decl.Index,
+ never_inline: Decl.Index,
+};
+pub const LazyFnValue = struct {
+ fn_name: []const u8,
+ data: Data,
+
+ pub const Data = union {
+ tag_name: Type,
+ never_tail: void,
+ never_inline: void,
+ };
+};
+pub const LazyFnMap = std.AutoArrayHashMapUnmanaged(LazyFnKey, LazyFnValue);
const LoopDepth = u16;
const Local = struct {
- ty: Type,
- alignment: u32,
+ cty_idx: CType.Index,
/// How many loops the last definition was nested in.
loop_depth: LoopDepth,
+ alignas: CType.AlignAs,
+
+ pub fn getType(local: Local) LocalType {
+ return .{ .cty_idx = local.cty_idx, .alignas = local.alignas };
+ }
};
const LocalIndex = u16;
-const LocalsList = std.ArrayListUnmanaged(LocalIndex);
-const LocalsMap = std.ArrayHashMapUnmanaged(Type, LocalsList, Type.HashContext32, true);
+const LocalType = struct { cty_idx: CType.Index, alignas: CType.AlignAs };
+const LocalsList = std.AutoArrayHashMapUnmanaged(LocalIndex, void);
+const LocalsMap = std.AutoArrayHashMapUnmanaged(LocalType, LocalsList);
const LocalsStack = std.ArrayListUnmanaged(LocalsMap);
-const FormatTypeAsCIdentContext = struct {
- ty: Type,
- mod: *Module,
-};
-
const ValueRenderLocation = enum {
FunctionArgument,
Initializer,
@@ -106,26 +118,6 @@ const BuiltinInfo = enum {
Bits,
};
-fn formatTypeAsCIdentifier(
- data: FormatTypeAsCIdentContext,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- var stack = std.heap.stackFallback(128, data.mod.gpa);
- const allocator = stack.get();
- const str = std.fmt.allocPrint(allocator, "{}", .{data.ty.fmt(data.mod)}) catch "";
- defer allocator.free(str);
- return formatIdent(str, fmt, options, writer);
-}
-
-pub fn typeToCIdentifier(ty: Type, mod: *Module) std.fmt.Formatter(formatTypeAsCIdentifier) {
- return .{ .data = .{
- .ty = ty,
- .mod = mod,
- } };
-}
-
const reserved_idents = std.ComptimeStringMap(void, .{
// C language
.{ "alignas", {
@@ -224,6 +216,15 @@ const reserved_idents = std.ComptimeStringMap(void, .{
.{ "volatile", {} },
.{ "while ", {} },
+ // stdarg.h
+ .{ "va_start", {} },
+ .{ "va_arg", {} },
+ .{ "va_end", {} },
+ .{ "va_copy", {} },
+
+ // stddef.h
+ .{ "offsetof", {} },
+
// windows.h
.{ "max", {} },
.{ "min", {} },
@@ -281,6 +282,7 @@ pub const Function = struct {
next_arg_index: usize = 0,
next_block_index: usize = 0,
object: Object,
+ lazy_fns: LazyFnMap,
func: *Module.Fn,
/// All the locals, to be emitted at the top of the function.
locals: std.ArrayListUnmanaged(Local) = .{},
@@ -299,10 +301,6 @@ pub const Function = struct {
/// Needed for memory used by the keys of free_locals_stack entries.
arena: std.heap.ArenaAllocator,
- fn tyHashCtx(f: Function) Type.HashContext32 {
- return .{ .mod = f.object.dg.module };
- }
-
fn resolveInst(f: *Function, inst: Air.Inst.Ref) !CValue {
const gop = try f.value_map.getOrPut(inst);
if (gop.found_existing) return gop.value_ptr.*;
@@ -310,19 +308,19 @@ pub const Function = struct {
const val = f.air.value(inst).?;
const ty = f.air.typeOf(inst);
- const result = if (lowersToArray(ty, f.object.dg.module.getTarget())) result: {
+ const result: CValue = if (lowersToArray(ty, f.object.dg.module.getTarget())) result: {
const writer = f.object.code_header.writer();
const alignment = 0;
const decl_c_value = try f.allocLocalValue(ty, alignment);
const gpa = f.object.dg.gpa;
- try f.allocs.put(gpa, decl_c_value.local, true);
+ try f.allocs.put(gpa, decl_c_value.new_local, true);
try writer.writeAll("static ");
- try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, .Const, alignment, .Complete);
+ try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, Const, alignment, .complete);
try writer.writeAll(" = ");
try f.object.dg.renderValue(writer, ty, val, .StaticInitializer);
try writer.writeAll(";\n ");
break :result decl_c_value;
- } else CValue{ .constant = inst };
+ } else .{ .constant = inst };
gop.value_ptr.* = result;
return result;
@@ -342,32 +340,32 @@ pub const Function = struct {
/// Skips the reuse logic.
fn allocLocalValue(f: *Function, ty: Type, alignment: u32) !CValue {
const gpa = f.object.dg.gpa;
+ const target = f.object.dg.module.getTarget();
try f.locals.append(gpa, .{
- .ty = ty,
- .alignment = alignment,
+ .cty_idx = try f.typeToIndex(ty, .complete),
.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1),
+ .alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target)),
});
- return CValue{ .local = @intCast(LocalIndex, f.locals.items.len - 1) };
+ return .{ .new_local = @intCast(LocalIndex, f.locals.items.len - 1) };
}
fn allocLocal(f: *Function, inst: Air.Inst.Index, ty: Type) !CValue {
- const result = try f.allocAlignedLocal(ty, .Mut, 0);
- log.debug("%{d}: allocating t{d}", .{ inst, result.local });
+ const result = try f.allocAlignedLocal(ty, .{}, 0);
+ log.debug("%{d}: allocating t{d}", .{ inst, result.new_local });
return result;
}
/// Only allocates the local; does not print anything.
- fn allocAlignedLocal(f: *Function, ty: Type, mutability: Mutability, alignment: u32) !CValue {
- _ = mutability;
-
- if (f.getFreeLocals().getPtrContext(ty, f.tyHashCtx())) |locals_list| {
- for (locals_list.items, 0..) |local_index, i| {
- const local = &f.locals.items[local_index];
- if (local.alignment >= alignment) {
- local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
- _ = locals_list.swapRemove(i);
- return CValue{ .local = local_index };
- }
+ fn allocAlignedLocal(f: *Function, ty: Type, _: CQualifiers, alignment: u32) !CValue {
+ const target = f.object.dg.module.getTarget();
+ if (f.getFreeLocals().getPtr(.{
+ .cty_idx = try f.typeToIndex(ty, .complete),
+ .alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target)),
+ })) |locals_list| {
+ if (locals_list.popOrNull()) |local_entry| {
+ const local = &f.locals.items[local_entry.key];
+ local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
+ return .{ .new_local = local_entry.key };
}
}
@@ -430,12 +428,20 @@ pub const Function = struct {
return f.object.dg.fail(format, args);
}
- fn renderType(f: *Function, w: anytype, t: Type) !void {
- return f.object.dg.renderType(w, t, .Complete);
+ fn indexToCType(f: *Function, idx: CType.Index) CType {
+ return f.object.dg.indexToCType(idx);
+ }
+
+ fn typeToIndex(f: *Function, ty: Type, kind: CType.Kind) !CType.Index {
+ return f.object.dg.typeToIndex(ty, kind);
}
- fn renderTypecast(f: *Function, w: anytype, t: Type) !void {
- return f.object.dg.renderTypecast(w, t);
+ fn typeToCType(f: *Function, ty: Type, kind: CType.Kind) !CType {
+ return f.object.dg.typeToCType(ty, kind);
+ }
+
+ fn renderType(f: *Function, w: anytype, t: Type) !void {
+ return f.object.dg.renderType(w, t);
}
fn renderIntCast(f: *Function, w: anytype, dest_ty: Type, src: CValue, src_ty: Type, location: ValueRenderLocation) !void {
@@ -446,7 +452,39 @@ pub const Function = struct {
return f.object.dg.fmtIntLiteral(ty, val);
}
- pub fn deinit(f: *Function, gpa: mem.Allocator) void {
+ fn getLazyFnName(f: *Function, key: LazyFnKey, data: LazyFnValue.Data) ![]const u8 {
+ const gpa = f.object.dg.gpa;
+ const gop = try f.lazy_fns.getOrPut(gpa, key);
+ if (!gop.found_existing) {
+ errdefer _ = f.lazy_fns.pop();
+
+ var promoted = f.object.dg.ctypes.promote(gpa);
+ defer f.object.dg.ctypes.demote(promoted);
+ const arena = promoted.arena.allocator();
+
+ gop.value_ptr.* = .{
+ .fn_name = switch (key) {
+ .tag_name,
+ .never_tail,
+ .never_inline,
+ => |owner_decl| try std.fmt.allocPrint(arena, "zig_{s}_{}__{d}", .{
+ @tagName(key),
+ fmtIdent(mem.span(f.object.dg.module.declPtr(owner_decl).name)),
+ @enumToInt(owner_decl),
+ }),
+ },
+ .data = switch (key) {
+ .tag_name => .{ .tag_name = try data.tag_name.copy(arena) },
+ .never_tail => .{ .never_tail = data.never_tail },
+ .never_inline => .{ .never_inline = data.never_inline },
+ },
+ };
+ }
+ return gop.value_ptr.fn_name;
+ }
+
+ pub fn deinit(f: *Function) void {
+ const gpa = f.object.dg.gpa;
f.allocs.deinit(gpa);
f.locals.deinit(gpa);
for (f.free_locals_stack.items) |*free_locals| {
@@ -455,11 +493,9 @@ pub const Function = struct {
f.free_locals_stack.deinit(gpa);
f.blocks.deinit(gpa);
f.value_map.deinit();
+ f.lazy_fns.deinit(gpa);
f.object.code.deinit();
- for (f.object.dg.typedefs.values()) |typedef| {
- gpa.free(typedef.rendered);
- }
- f.object.dg.typedefs.deinit();
+ f.object.dg.ctypes.deinit(gpa);
f.object.dg.fwd_decl.deinit();
f.arena.deinit();
}
@@ -483,30 +519,20 @@ pub const Object = struct {
pub const DeclGen = struct {
gpa: std.mem.Allocator,
module: *Module,
- decl: *Decl,
- decl_index: Decl.Index,
+ decl: ?*Decl,
+ decl_index: Decl.OptionalIndex,
fwd_decl: std.ArrayList(u8),
error_msg: ?*Module.ErrorMsg,
- /// The key of this map is Type which has references to typedefs_arena.
- typedefs: TypedefMap,
- typedefs_arena: std.mem.Allocator,
+ ctypes: CType.Store,
fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
@setCold(true);
const src = LazySrcLoc.nodeOffset(0);
- const src_loc = src.toSrcLoc(dg.decl);
+ const src_loc = src.toSrcLoc(dg.decl.?);
dg.error_msg = try Module.ErrorMsg.create(dg.gpa, src_loc, format, args);
return error.AnalysisFail;
}
- fn getTypedefName(dg: *DeclGen, t: Type) ?[]const u8 {
- if (dg.typedefs.get(t)) |typedef| {
- return typedef.name;
- } else {
- return null;
- }
- }
-
fn renderDeclValue(
dg: *DeclGen,
writer: anytype,
@@ -520,7 +546,7 @@ pub const DeclGen = struct {
// Render an undefined pointer if we have a pointer to a zero-bit or comptime type.
if (ty.isPtrAtRuntime() and !decl.ty.isFnOrHasRuntimeBits()) {
- return dg.writeCValue(writer, CValue{ .undef = ty });
+ return dg.writeCValue(writer, .{ .undef = ty });
}
// Chase function values in order to be able to reference the original function.
@@ -534,7 +560,7 @@ pub const DeclGen = struct {
try writer.writeByte('{');
} else {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeAll("){ .ptr = ");
}
@@ -561,7 +587,7 @@ pub const DeclGen = struct {
const need_typecast = if (ty.castPtrToFn()) |_| false else !ty.eql(decl.ty, dg.module);
if (need_typecast) {
try writer.writeAll("((");
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
try writer.writeByte('&');
@@ -576,7 +602,7 @@ pub const DeclGen = struct {
fn renderParentPtr(dg: *DeclGen, writer: anytype, ptr_val: Value, ptr_ty: Type, location: ValueRenderLocation) error{ OutOfMemory, AnalysisFail }!void {
if (!ptr_ty.isSlice()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ptr_ty);
+ try dg.renderType(writer, ptr_ty);
try writer.writeByte(')');
}
switch (ptr_val.tag()) {
@@ -591,90 +617,71 @@ pub const DeclGen = struct {
try dg.renderDeclValue(writer, ptr_ty, ptr_val, decl_index, location);
},
.field_ptr => {
- const ptr_info = ptr_ty.ptrInfo();
+ const target = dg.module.getTarget();
const field_ptr = ptr_val.castTag(.field_ptr).?.data;
- const container_ty = field_ptr.container_ty;
- const index = field_ptr.field_index;
-
- var container_ptr_ty_pl: Type.Payload.ElemType = .{
- .base = .{ .tag = .c_mut_pointer },
- .data = field_ptr.container_ty,
- };
- const container_ptr_ty = Type.initPayload(&container_ptr_ty_pl.base);
- const FieldInfo = struct { name: []const u8, ty: Type };
- const field_info: FieldInfo = switch (container_ty.zigTypeTag()) {
- .Struct => switch (container_ty.containerLayout()) {
- .Auto, .Extern => FieldInfo{
- .name = container_ty.structFields().keys()[index],
- .ty = container_ty.structFields().values()[index].ty,
- },
- .Packed => if (ptr_info.data.host_size == 0) {
- const target = dg.module.getTarget();
-
- const byte_offset = container_ty.packedStructFieldByteOffset(index, target);
- var byte_offset_pl = Value.Payload.U64{
- .base = .{ .tag = .int_u64 },
- .data = byte_offset,
- };
- const byte_offset_val = Value.initPayload(&byte_offset_pl.base);
-
- var u8_ptr_pl = ptr_info;
- u8_ptr_pl.data.pointee_type = Type.u8;
- const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
-
- try writer.writeAll("&((");
- try dg.renderTypecast(writer, u8_ptr_ty);
- try writer.writeByte(')');
- try dg.renderParentPtr(writer, field_ptr.container_ptr, container_ptr_ty, location);
- return writer.print(")[{}]", .{try dg.fmtIntLiteral(Type.usize, byte_offset_val)});
- } else {
- var host_pl = Type.Payload.Bits{
- .base = .{ .tag = .int_unsigned },
- .data = ptr_info.data.host_size * 8,
- };
- const host_ty = Type.initPayload(&host_pl.base);
-
- try writer.writeByte('(');
- try dg.renderTypecast(writer, ptr_ty);
- try writer.writeByte(')');
- return dg.renderParentPtr(writer, field_ptr.container_ptr, host_ty, location);
- },
- },
- .Union => switch (container_ty.containerLayout()) {
- .Auto, .Extern => FieldInfo{
- .name = container_ty.unionFields().keys()[index],
- .ty = container_ty.unionFields().values()[index].ty,
- },
- .Packed => {
- return dg.renderParentPtr(writer, field_ptr.container_ptr, ptr_ty, location);
- },
+ // Ensure complete type definition is visible before accessing fields.
+ _ = try dg.typeToIndex(field_ptr.container_ty, .complete);
+
+ var container_ptr_pl = ptr_ty.ptrInfo();
+ container_ptr_pl.data.pointee_type = field_ptr.container_ty;
+ const container_ptr_ty = Type.initPayload(&container_ptr_pl.base);
+
+ switch (fieldLocation(
+ field_ptr.container_ty,
+ ptr_ty,
+ @intCast(u32, field_ptr.field_index),
+ target,
+ )) {
+ .begin => try dg.renderParentPtr(
+ writer,
+ field_ptr.container_ptr,
+ container_ptr_ty,
+ location,
+ ),
+ .field => |field| {
+ try writer.writeAll("&(");
+ try dg.renderParentPtr(
+ writer,
+ field_ptr.container_ptr,
+ container_ptr_ty,
+ location,
+ );
+ try writer.writeAll(")->");
+ try dg.writeCValue(writer, field);
},
- .Pointer => field_info: {
- assert(container_ty.isSlice());
- break :field_info switch (index) {
- 0 => FieldInfo{ .name = "ptr", .ty = container_ty.childType() },
- 1 => FieldInfo{ .name = "len", .ty = Type.usize },
- else => unreachable,
+ .byte_offset => |byte_offset| {
+ var u8_ptr_pl = ptr_ty.ptrInfo();
+ u8_ptr_pl.data.pointee_type = Type.u8;
+ const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
+
+ var byte_offset_pl = Value.Payload.U64{
+ .base = .{ .tag = .int_u64 },
+ .data = byte_offset,
};
- },
- else => unreachable,
- };
-
- if (field_info.ty.hasRuntimeBitsIgnoreComptime()) {
- // Ensure complete type definition is visible before accessing fields.
- try dg.renderType(std.io.null_writer, field_ptr.container_ty, .Complete);
+ const byte_offset_val = Value.initPayload(&byte_offset_pl.base);
- try writer.writeAll("&(");
- try dg.renderParentPtr(writer, field_ptr.container_ptr, container_ptr_ty, location);
- try writer.writeAll(")->");
- switch (field_ptr.container_ty.tag()) {
- .union_tagged, .union_safety_tagged => try writer.writeAll("payload."),
- else => {},
- }
- try writer.print("{ }", .{fmtIdent(field_info.name)});
- } else {
- try dg.renderParentPtr(writer, field_ptr.container_ptr, container_ptr_ty, location);
+ try writer.writeAll("((");
+ try dg.renderType(writer, u8_ptr_ty);
+ try writer.writeByte(')');
+ try dg.renderParentPtr(
+ writer,
+ field_ptr.container_ptr,
+ container_ptr_ty,
+ location,
+ );
+ try writer.print(" + {})", .{try dg.fmtIntLiteral(Type.usize, byte_offset_val)});
+ },
+ .end => {
+ try writer.writeAll("((");
+ try dg.renderParentPtr(
+ writer,
+ field_ptr.container_ptr,
+ container_ptr_ty,
+ location,
+ );
+ try writer.print(") + {})", .{try dg.fmtIntLiteral(Type.usize, Value.one)});
+ },
}
},
.elem_ptr => {
@@ -698,7 +705,7 @@ pub const DeclGen = struct {
const container_ptr_ty = Type.initPayload(&container_ptr_ty_pl.base);
// Ensure complete type definition is visible before accessing fields.
- try dg.renderType(std.io.null_writer, payload_ptr.container_ty, .Complete);
+ _ = try dg.typeToIndex(payload_ptr.container_ty, .complete);
try writer.writeAll("&(");
try dg.renderParentPtr(writer, payload_ptr.container_ptr, container_ptr_ty, location);
@@ -747,7 +754,7 @@ pub const DeclGen = struct {
try writer.writeAll("zig_cast_");
try dg.renderTypeForBuiltinFnName(writer, ty);
- try writer.writeAll(" zig_as_");
+ try writer.writeAll(" zig_make_");
try dg.renderTypeForBuiltinFnName(writer, ty);
try writer.writeByte('(');
switch (bits) {
@@ -765,18 +772,18 @@ pub const DeclGen = struct {
.Pointer => if (ty.isSlice()) {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
try writer.writeAll("{(");
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const ptr_ty = ty.slicePtrFieldType(&buf);
- try dg.renderTypecast(writer, ptr_ty);
+ try dg.renderType(writer, ptr_ty);
return writer.print("){x}, {0x}}}", .{try dg.fmtIntLiteral(Type.usize, val)});
} else {
try writer.writeAll("((");
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
return writer.print("){x})", .{try dg.fmtIntLiteral(Type.usize, val)});
},
.Optional => {
@@ -793,7 +800,7 @@ pub const DeclGen = struct {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -807,7 +814,7 @@ pub const DeclGen = struct {
.Auto, .Extern => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -821,7 +828,7 @@ pub const DeclGen = struct {
empty = false;
}
- if (empty) try writer.print("{x}", .{try dg.fmtIntLiteral(Type.u8, Value.undef)});
+
return writer.writeByte('}');
},
.Packed => return writer.print("{x}", .{try dg.fmtIntLiteral(ty, Value.undef)}),
@@ -829,7 +836,7 @@ pub const DeclGen = struct {
.Union => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -854,7 +861,7 @@ pub const DeclGen = struct {
.ErrorUnion => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -867,7 +874,7 @@ pub const DeclGen = struct {
.Array, .Vector => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -876,14 +883,14 @@ pub const DeclGen = struct {
var literal = stringLiteral(writer);
try literal.start();
const c_len = ty.arrayLenIncludingSentinel();
- var index: usize = 0;
+ var index: u64 = 0;
while (index < c_len) : (index += 1)
try literal.writeChar(0xaa);
return literal.end();
} else {
try writer.writeByte('{');
const c_len = ty.arrayLenIncludingSentinel();
- var index: usize = 0;
+ var index: u64 = 0;
while (index < c_len) : (index += 1) {
if (index > 0) try writer.writeAll(", ");
try dg.renderValue(writer, ty.childType(), val, initializer_type);
@@ -957,7 +964,7 @@ pub const DeclGen = struct {
try writer.writeByte(' ');
var empty = true;
if (std.math.isFinite(f128_val)) {
- try writer.writeAll("zig_as_");
+ try writer.writeAll("zig_make_");
try dg.renderTypeForBuiltinFnName(writer, ty);
try writer.writeByte('(');
switch (bits) {
@@ -992,7 +999,7 @@ pub const DeclGen = struct {
// return dg.fail("Only quiet nans are supported in global variable initializers", .{});
}
- try writer.writeAll("zig_as_special_");
+ try writer.writeAll("zig_make_special_");
if (location == .StaticInitializer) try writer.writeAll("constant_");
try dg.renderTypeForBuiltinFnName(writer, ty);
try writer.writeByte('(');
@@ -1028,7 +1035,7 @@ pub const DeclGen = struct {
return dg.renderValue(writer, ty, slice_val, location);
} else {
try writer.writeAll("((");
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeAll(")NULL)");
},
.variable => {
@@ -1038,7 +1045,7 @@ pub const DeclGen = struct {
.slice => {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -1061,7 +1068,7 @@ pub const DeclGen = struct {
},
.int_u64, .one => {
try writer.writeAll("((");
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
return writer.print("){x})", .{try dg.fmtIntLiteral(Type.usize, val)});
},
.field_ptr,
@@ -1076,15 +1083,15 @@ pub const DeclGen = struct {
.Array, .Vector => {
if (location == .FunctionArgument) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
// First try specific tag representations for more efficiency.
switch (val.tag()) {
.undef, .empty_struct_value, .empty_array => {
- try writer.writeByte('{');
const ai = ty.arrayInfo();
+ try writer.writeByte('{');
if (ai.sentinel) |s| {
try dg.renderValue(writer, ai.elem_type, s, initializer_type);
} else {
@@ -1092,13 +1099,19 @@ pub const DeclGen = struct {
}
try writer.writeByte('}');
},
- .bytes => {
- try writer.print("{s}", .{fmtStringLiteral(val.castTag(.bytes).?.data)});
- },
- .str_lit => {
- const str_lit = val.castTag(.str_lit).?.data;
- const bytes = dg.module.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
- try writer.print("{s}", .{fmtStringLiteral(bytes)});
+ .bytes, .str_lit => |t| {
+ const bytes = switch (t) {
+ .bytes => val.castTag(.bytes).?.data,
+ .str_lit => bytes: {
+ const str_lit = val.castTag(.str_lit).?.data;
+ break :bytes dg.module.string_literal_bytes.items[str_lit.index..][0..str_lit.len];
+ },
+ else => unreachable,
+ };
+ const sentinel = if (ty.sentinel()) |sentinel| @intCast(u8, sentinel.toUnsignedInt(target)) else null;
+ try writer.print("{s}", .{
+ fmtStringLiteral(bytes[0..@intCast(usize, ty.arrayLen())], sentinel),
+ });
},
else => {
// Fall back to generic implementation.
@@ -1122,7 +1135,7 @@ pub const DeclGen = struct {
}
if (ai.sentinel) |s| {
const s_u8 = @intCast(u8, s.toUnsignedInt(target));
- try literal.writeChar(s_u8);
+ if (s_u8 != 0) try literal.writeChar(s_u8);
}
try literal.end();
} else {
@@ -1179,7 +1192,7 @@ pub const DeclGen = struct {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -1213,7 +1226,7 @@ pub const DeclGen = struct {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -1277,7 +1290,7 @@ pub const DeclGen = struct {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -1292,7 +1305,6 @@ pub const DeclGen = struct {
empty = false;
}
- if (empty) try writer.print("{}", .{try dg.fmtIntLiteral(Type.u8, Value.zero)});
try writer.writeByte('}');
},
.Packed => {
@@ -1309,7 +1321,7 @@ pub const DeclGen = struct {
const bit_offset_val = Value.initPayload(&bit_offset_val_pl.base);
var eff_num_fields: usize = 0;
- for (field_vals, 0..) |_, index| {
+ for (0..field_vals.len) |index| {
const field_ty = ty.structFieldType(index);
if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -1365,7 +1377,7 @@ pub const DeclGen = struct {
if (!empty) try writer.writeAll(" | ");
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
if (bit_offset_val_pl.data != 0) {
@@ -1388,7 +1400,7 @@ pub const DeclGen = struct {
if (!location.isInitializer()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
@@ -1399,11 +1411,11 @@ pub const DeclGen = struct {
if (field_ty.hasRuntimeBits()) {
if (field_ty.isPtrAtRuntime()) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
} else if (field_ty.zigTypeTag() == .Float) {
try writer.writeByte('(');
- try dg.renderTypecast(writer, ty);
+ try dg.renderType(writer, ty);
try writer.writeByte(')');
}
try dg.renderValue(writer, field_ty, union_obj.val, initializer_type);
@@ -1413,6 +1425,7 @@ pub const DeclGen = struct {
return;
}
+ var has_payload_init = false;
try writer.writeByte('{');
if (ty.unionTagTypeSafety()) |tag_ty| {
const layout = ty.unionGetLayout(target);
@@ -1421,7 +1434,10 @@ pub const DeclGen = struct {
try dg.renderValue(writer, tag_ty, union_obj.tag, initializer_type);
try writer.writeAll(", ");
}
- try writer.writeAll(".payload = {");
+ if (!ty.unionHasAllZeroBitFieldTypes()) {
+ try writer.writeAll(".payload = {");
+ has_payload_init = true;
+ }
}
var it = ty.unionFields().iterator();
@@ -1433,8 +1449,8 @@ pub const DeclGen = struct {
try writer.print(".{ } = ", .{fmtIdent(field.key_ptr.*)});
try dg.renderValue(writer, field.value_ptr.ty, Value.undef, initializer_type);
break;
- } else try writer.writeAll(".empty_union = 0");
- if (ty.unionTagTypeSafety()) |_| try writer.writeByte('}');
+ }
+ if (has_payload_init) try writer.writeByte('}');
try writer.writeByte('}');
},
@@ -1456,497 +1472,93 @@ pub const DeclGen = struct {
}
}
- fn renderFunctionSignature(dg: *DeclGen, w: anytype, kind: TypedefKind, export_index: u32) !void {
- const fn_info = dg.decl.ty.fnInfo();
+ fn renderFunctionSignature(
+ dg: *DeclGen,
+ w: anytype,
+ fn_decl_index: Decl.Index,
+ kind: CType.Kind,
+ name: union(enum) {
+ export_index: u32,
+ string: []const u8,
+ },
+ ) !void {
+ const store = &dg.ctypes.set;
+ const module = dg.module;
+
+ const fn_decl = module.declPtr(fn_decl_index);
+ const fn_cty_idx = try dg.typeToIndex(fn_decl.ty, kind);
+
+ const fn_info = fn_decl.ty.fnInfo();
if (fn_info.cc == .Naked) {
switch (kind) {
- .Forward => try w.writeAll("zig_naked_decl "),
- .Complete => try w.writeAll("zig_naked "),
+ .forward => try w.writeAll("zig_naked_decl "),
+ .complete => try w.writeAll("zig_naked "),
+ else => unreachable,
}
}
- if (dg.decl.val.castTag(.function)) |func_payload|
+ if (fn_decl.val.castTag(.function)) |func_payload|
if (func_payload.data.is_cold) try w.writeAll("zig_cold ");
-
- const target = dg.module.getTarget();
- var ret_buf: LowerFnRetTyBuffer = undefined;
- const ret_ty = lowerFnRetTy(fn_info.return_type, &ret_buf, target);
-
- try dg.renderType(w, ret_ty, kind);
- try w.writeByte(' ');
+ if (fn_info.return_type.tag() == .noreturn) try w.writeAll("zig_noreturn ");
+
+ const trailing = try renderTypePrefix(
+ dg.decl_index,
+ store.*,
+ module,
+ w,
+ fn_cty_idx,
+ .suffix,
+ .{},
+ );
+ try w.print("{}", .{trailing});
if (toCallingConvention(fn_info.cc)) |call_conv| {
try w.print("zig_callconv({s}) ", .{call_conv});
}
- if (fn_info.alignment > 0 and kind == .Complete) try w.print(" zig_align_fn({})", .{fn_info.alignment});
-
- try dg.renderDeclName(w, dg.decl_index, export_index);
- try w.writeByte('(');
-
- var index: usize = 0;
- for (fn_info.param_types) |param_type| {
- if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
- if (index > 0) try w.writeAll(", ");
- const name = CValue{ .arg = index };
- try dg.renderTypeAndName(w, param_type, name, .ConstArgument, 0, kind);
- index += 1;
- }
-
- if (fn_info.is_var_args) {
- if (index > 0) try w.writeAll(", ");
- try w.writeAll("...");
- } else if (index == 0) {
- try dg.renderType(w, Type.void, kind);
- }
- try w.writeByte(')');
- if (fn_info.alignment > 0 and kind == .Forward) try w.print(" zig_align_fn({})", .{fn_info.alignment});
- }
-
- fn renderPtrToFnTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
- const bw = buffer.writer();
-
- const fn_info = t.fnInfo();
-
- const target = dg.module.getTarget();
- var ret_buf: LowerFnRetTyBuffer = undefined;
- const ret_ty = lowerFnRetTy(fn_info.return_type, &ret_buf, target);
-
- try bw.writeAll("typedef ");
- try dg.renderType(bw, ret_ty, .Forward);
- try bw.writeAll(" (*");
- const name_begin = buffer.items.len;
- try bw.print("zig_F_{}", .{typeToCIdentifier(t, dg.module)});
- const name_end = buffer.items.len;
- try bw.writeAll(")(");
-
- const param_len = fn_info.param_types.len;
-
- var params_written: usize = 0;
- var index: usize = 0;
- while (index < param_len) : (index += 1) {
- const param_ty = fn_info.param_types[index];
- if (!param_ty.hasRuntimeBitsIgnoreComptime()) continue;
- if (params_written > 0) {
- try bw.writeAll(", ");
- }
- try dg.renderTypeAndName(bw, param_ty, .{ .bytes = "" }, .Mut, 0, .Forward);
- params_written += 1;
- }
-
- if (fn_info.is_var_args) {
- if (params_written != 0) try bw.writeAll(", ");
- try bw.writeAll("...");
- } else if (params_written == 0) {
- try dg.renderType(bw, Type.void, .Forward);
- }
- try bw.writeAll(");\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
- const name = rendered[name_begin..name_end];
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
- }
-
- fn renderSliceTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- std.debug.assert(t.sentinel() == null); // expected canonical type
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
- const bw = buffer.writer();
-
- var ptr_ty_buf: Type.SlicePtrFieldTypeBuffer = undefined;
- const ptr_ty = t.slicePtrFieldType(&ptr_ty_buf);
- const ptr_name = CValue{ .identifier = "ptr" };
- const len_ty = Type.usize;
- const len_name = CValue{ .identifier = "len" };
-
- try bw.writeAll("typedef struct {\n ");
- try dg.renderTypeAndName(bw, ptr_ty, ptr_name, .Mut, 0, .Complete);
- try bw.writeAll(";\n ");
- try dg.renderTypeAndName(bw, len_ty, len_name, .Mut, 0, .Complete);
-
- try bw.writeAll(";\n} ");
- const name_begin = buffer.items.len;
- try bw.print("zig_{c}_{}", .{
- @as(u8, if (t.isConstPtr()) 'L' else 'M'),
- typeToCIdentifier(t.childType(), dg.module),
- });
- const name_end = buffer.items.len;
- try bw.writeAll(";\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
- const name = rendered[name_begin..name_end];
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
- }
-
- fn renderFwdTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- // The forward declaration for T is stored with a key of *const T.
- const child_ty = t.childType();
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
- const bw = buffer.writer();
-
- const tag = switch (child_ty.zigTypeTag()) {
- .Struct, .ErrorUnion, .Optional => "struct",
- .Union => if (child_ty.unionTagTypeSafety()) |_| "struct" else "union",
- else => unreachable,
- };
- try bw.writeAll("typedef ");
- try bw.writeAll(tag);
- const name_begin = buffer.items.len + " ".len;
- try bw.writeAll(" zig_");
- switch (child_ty.zigTypeTag()) {
- .Struct, .Union => {
- var fqn_buf = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer fqn_buf.deinit();
-
- const owner_decl_index = child_ty.getOwnerDecl();
- const owner_decl = dg.module.declPtr(owner_decl_index);
- try owner_decl.renderFullyQualifiedName(dg.module, fqn_buf.writer());
-
- try bw.print("S_{}__{d}", .{ fmtIdent(fqn_buf.items), @enumToInt(owner_decl_index) });
- },
- .ErrorUnion => {
- try bw.print("E_{}", .{typeToCIdentifier(child_ty.errorUnionPayload(), dg.module)});
- },
- .Optional => {
- var opt_buf: Type.Payload.ElemType = undefined;
- try bw.print("Q_{}", .{typeToCIdentifier(child_ty.optionalChild(&opt_buf), dg.module)});
- },
+ switch (kind) {
+ .forward => {},
+ .complete => if (fn_info.alignment > 0)
+ try w.print(" zig_align_fn({})", .{fn_info.alignment}),
else => unreachable,
}
- const name_end = buffer.items.len;
- try buffer.ensureUnusedCapacity(" ".len + (name_end - name_begin) + ";\n".len);
- buffer.appendAssumeCapacity(' ');
- buffer.appendSliceAssumeCapacity(buffer.items[name_begin..name_end]);
- buffer.appendSliceAssumeCapacity(";\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
- const name = rendered[name_begin..name_end];
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
- }
-
- fn renderStructTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- var ptr_pl = Type.Payload.ElemType{ .base = .{ .tag = .single_const_pointer }, .data = t };
- const ptr_ty = Type.initPayload(&ptr_pl.base);
- const name = dg.getTypedefName(ptr_ty) orelse
- try dg.renderFwdTypedef(ptr_ty);
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
-
- try buffer.appendSlice("struct ");
-
- var needs_pack_attr = false;
- {
- var it = t.structFields().iterator();
- while (it.next()) |field| {
- const field_ty = field.value_ptr.ty;
- if (!field_ty.hasRuntimeBits()) continue;
- const alignment = field.value_ptr.abi_align;
- if (alignment != 0 and alignment < field_ty.abiAlignment(dg.module.getTarget())) {
- needs_pack_attr = true;
- try buffer.appendSlice("zig_packed(");
- break;
- }
- }
- }
-
- try buffer.appendSlice(name);
- try buffer.appendSlice(" {\n");
- {
- var it = t.structFields().iterator();
- var empty = true;
- while (it.next()) |field| {
- const field_ty = field.value_ptr.ty;
- if (!field_ty.hasRuntimeBits()) continue;
-
- const alignment = field.value_ptr.alignment(dg.module.getTarget(), t.containerLayout());
- const field_name = CValue{ .identifier = field.key_ptr.* };
- try buffer.append(' ');
- try dg.renderTypeAndName(buffer.writer(), field_ty, field_name, .Mut, alignment, .Complete);
- try buffer.appendSlice(";\n");
-
- empty = false;
- }
- if (empty) try buffer.appendSlice(" char empty_struct;\n");
- }
- if (needs_pack_attr) try buffer.appendSlice("});\n") else try buffer.appendSlice("};\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
- }
-
- fn renderTupleTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
-
- try buffer.appendSlice("typedef struct {\n");
- {
- const fields = t.tupleFields();
- var field_id: usize = 0;
- for (fields.types, 0..) |field_ty, i| {
- if (!field_ty.hasRuntimeBits() or fields.values[i].tag() != .unreachable_value) continue;
-
- try buffer.append(' ');
- try dg.renderTypeAndName(buffer.writer(), field_ty, .{ .field = field_id }, .Mut, 0, .Complete);
- try buffer.appendSlice(";\n");
-
- field_id += 1;
- }
- if (field_id == 0) try buffer.appendSlice(" char empty_tuple;\n");
- }
- const name_begin = buffer.items.len + "} ".len;
- try buffer.writer().print("}} zig_T_{}_{d};\n", .{ typeToCIdentifier(t, dg.module), @truncate(u16, t.hash(dg.module)) });
- const name_end = buffer.items.len - ";\n".len;
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
- const name = rendered[name_begin..name_end];
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
- }
-
- fn renderUnionTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- var ptr_pl = Type.Payload.ElemType{ .base = .{ .tag = .single_const_pointer }, .data = t };
- const ptr_ty = Type.initPayload(&ptr_pl.base);
- const name = dg.getTypedefName(ptr_ty) orelse
- try dg.renderFwdTypedef(ptr_ty);
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
-
- try buffer.appendSlice(if (t.unionTagTypeSafety()) |_| "struct " else "union ");
- try buffer.appendSlice(name);
- try buffer.appendSlice(" {\n");
-
- const indent = if (t.unionTagTypeSafety()) |tag_ty| indent: {
- const target = dg.module.getTarget();
- const layout = t.unionGetLayout(target);
- if (layout.tag_size != 0) {
- try buffer.append(' ');
- try dg.renderTypeAndName(buffer.writer(), tag_ty, .{ .identifier = "tag" }, .Mut, 0, .Complete);
- try buffer.appendSlice(";\n");
- }
- try buffer.appendSlice(" union {\n");
- break :indent " ";
- } else " ";
- {
- var it = t.unionFields().iterator();
- var empty = true;
- while (it.next()) |field| {
- const field_ty = field.value_ptr.ty;
- if (!field_ty.hasRuntimeBits()) continue;
-
- const alignment = field.value_ptr.abi_align;
- const field_name = CValue{ .identifier = field.key_ptr.* };
- try buffer.appendSlice(indent);
- try dg.renderTypeAndName(buffer.writer(), field_ty, field_name, .Mut, alignment, .Complete);
- try buffer.appendSlice(";\n");
-
- empty = false;
- }
- if (empty) {
- try buffer.appendSlice(indent);
- try buffer.appendSlice("char empty_union;\n");
- }
+ switch (name) {
+ .export_index => |export_index| try dg.renderDeclName(w, fn_decl_index, export_index),
+ .string => |string| try w.writeAll(string),
}
- if (t.unionTagTypeSafety()) |_| try buffer.appendSlice(" } payload;\n");
- try buffer.appendSlice("};\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
+ try renderTypeSuffix(
+ dg.decl_index,
+ store.*,
+ module,
+ w,
+ fn_cty_idx,
+ .suffix,
+ CQualifiers.init(.{ .@"const" = switch (kind) {
+ .forward => false,
+ .complete => true,
+ else => unreachable,
+ } }),
);
- return name;
- }
-
- fn renderErrorUnionTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- assert(t.errorUnionSet().tag() == .anyerror);
-
- var ptr_pl = Type.Payload.ElemType{ .base = .{ .tag = .single_const_pointer }, .data = t };
- const ptr_ty = Type.initPayload(&ptr_pl.base);
- const name = dg.getTypedefName(ptr_ty) orelse
- try dg.renderFwdTypedef(ptr_ty);
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
- const bw = buffer.writer();
-
- const payload_ty = t.errorUnionPayload();
- const payload_name = CValue{ .identifier = "payload" };
- const error_ty = t.errorUnionSet();
- const error_name = CValue{ .identifier = "error" };
-
- const target = dg.module.getTarget();
- const payload_align = payload_ty.abiAlignment(target);
- const error_align = error_ty.abiAlignment(target);
- try bw.writeAll("struct ");
- try bw.writeAll(name);
- try bw.writeAll(" {\n ");
- if (error_align > payload_align) {
- try dg.renderTypeAndName(bw, payload_ty, payload_name, .Mut, 0, .Complete);
- try bw.writeAll(";\n ");
- try dg.renderTypeAndName(bw, error_ty, error_name, .Mut, 0, .Complete);
- } else {
- try dg.renderTypeAndName(bw, error_ty, error_name, .Mut, 0, .Complete);
- try bw.writeAll(";\n ");
- try dg.renderTypeAndName(bw, payload_ty, payload_name, .Mut, 0, .Complete);
+ switch (kind) {
+ .forward => if (fn_info.alignment > 0)
+ try w.print(" zig_align_fn({})", .{fn_info.alignment}),
+ .complete => {},
+ else => unreachable,
}
- try bw.writeAll(";\n};\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
}
- fn renderArrayTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- const info = t.arrayInfo();
- std.debug.assert(info.sentinel == null); // expected canonical type
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
- const bw = buffer.writer();
-
- try bw.writeAll("typedef ");
- try dg.renderType(bw, info.elem_type, .Complete);
-
- const name_begin = buffer.items.len + " ".len;
- try bw.print(" zig_A_{}_{d}", .{ typeToCIdentifier(info.elem_type, dg.module), info.len });
- const name_end = buffer.items.len;
-
- const c_len = if (info.len > 0) info.len else 1;
- var c_len_pl: Value.Payload.U64 = .{ .base = .{ .tag = .int_u64 }, .data = c_len };
- const c_len_val = Value.initPayload(&c_len_pl.base);
- try bw.print("[{}];\n", .{try dg.fmtIntLiteral(Type.usize, c_len_val)});
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
- const name = rendered[name_begin..name_end];
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
+ fn indexToCType(dg: *DeclGen, idx: CType.Index) CType {
+ return dg.ctypes.indexToCType(idx);
}
- fn renderOptionalTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- var ptr_pl = Type.Payload.ElemType{ .base = .{ .tag = .single_const_pointer }, .data = t };
- const ptr_ty = Type.initPayload(&ptr_pl.base);
- const name = dg.getTypedefName(ptr_ty) orelse
- try dg.renderFwdTypedef(ptr_ty);
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
- const bw = buffer.writer();
-
- var opt_buf: Type.Payload.ElemType = undefined;
- const child_ty = t.optionalChild(&opt_buf);
-
- try bw.writeAll("struct ");
- try bw.writeAll(name);
- try bw.writeAll(" {\n");
- try dg.renderTypeAndName(bw, child_ty, .{ .identifier = "payload" }, .Mut, 0, .Complete);
- try bw.writeAll(";\n ");
- try dg.renderTypeAndName(bw, Type.bool, .{ .identifier = "is_null" }, .Mut, 0, .Complete);
- try bw.writeAll(";\n};\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
+ fn typeToIndex(dg: *DeclGen, ty: Type, kind: CType.Kind) !CType.Index {
+ return dg.ctypes.typeToIndex(dg.gpa, ty, dg.module, kind);
}
- fn renderOpaqueTypedef(dg: *DeclGen, t: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- const opaque_ty = t.cast(Type.Payload.Opaque).?.data;
- const unqualified_name = dg.module.declPtr(opaque_ty.owner_decl).name;
- const fqn = try opaque_ty.getFullyQualifiedName(dg.module);
- defer dg.typedefs.allocator.free(fqn);
-
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
-
- try buffer.writer().print("typedef struct { } ", .{fmtIdent(std.mem.span(unqualified_name))});
-
- const name_begin = buffer.items.len;
- try buffer.writer().print("zig_O_{}", .{fmtIdent(fqn)});
- const name_end = buffer.items.len;
- try buffer.appendSlice(";\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
- const name = rendered[name_begin..name_end];
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try t.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
+ fn typeToCType(dg: *DeclGen, ty: Type, kind: CType.Kind) !CType {
+ return dg.ctypes.typeToCType(dg.gpa, ty, dg.module, kind);
}
/// Renders a type as a single identifier, generating intermediate typedefs
@@ -1957,277 +1569,15 @@ pub const DeclGen = struct {
/// There are three type formats in total that we support rendering:
/// | Function | Example 1 (*u8) | Example 2 ([10]*u8) |
/// |---------------------|-----------------|---------------------|
- /// | `renderTypecast` | "uint8_t *" | "uint8_t *[10]" |
/// | `renderTypeAndName` | "uint8_t *name" | "uint8_t *name[10]" |
- /// | `renderType` | "uint8_t *" | "zig_A_uint8_t_10" |
+ /// | `renderType` | "uint8_t *" | "uint8_t *[10]" |
///
- fn renderType(
- dg: *DeclGen,
- w: anytype,
- t: Type,
- kind: TypedefKind,
- ) error{ OutOfMemory, AnalysisFail }!void {
- const target = dg.module.getTarget();
-
- switch (t.zigTypeTag()) {
- .Void => try w.writeAll("void"),
- .Bool => try w.writeAll("bool"),
- .NoReturn, .Float => {
- try w.writeAll("zig_");
- try t.print(w, dg.module);
- },
- .Int => {
- if (t.isNamedInt()) {
- try w.writeAll("zig_");
- try t.print(w, dg.module);
- } else {
- return renderTypeUnnamed(dg, w, t, kind);
- }
- },
- .ErrorSet => {
- return renderTypeUnnamed(dg, w, t, kind);
- },
- .Pointer => {
- const ptr_info = t.ptrInfo().data;
- if (ptr_info.size == .Slice) {
- var slice_pl = Type.Payload.ElemType{
- .base = .{ .tag = if (t.ptrIsMutable()) .mut_slice else .const_slice },
- .data = ptr_info.pointee_type,
- };
- const slice_ty = Type.initPayload(&slice_pl.base);
-
- const name = dg.getTypedefName(slice_ty) orelse
- try dg.renderSliceTypedef(slice_ty);
-
- return w.writeAll(name);
- }
-
- if (ptr_info.pointee_type.zigTypeTag() == .Fn) {
- const name = dg.getTypedefName(ptr_info.pointee_type) orelse
- try dg.renderPtrToFnTypedef(ptr_info.pointee_type);
-
- return w.writeAll(name);
- }
-
- if (ptr_info.host_size != 0) {
- var host_pl = Type.Payload.Bits{
- .base = .{ .tag = .int_unsigned },
- .data = ptr_info.host_size * 8,
- };
- const host_ty = Type.initPayload(&host_pl.base);
-
- try dg.renderType(w, host_ty, .Forward);
- } else if (t.isCPtr() and ptr_info.pointee_type.eql(Type.u8, dg.module) and
- (dg.decl.val.tag() == .extern_fn or
- std.mem.eql(u8, std.mem.span(dg.decl.name), "main")))
- {
- // This is a hack, since the c compiler expects a lot of external
- // library functions to have char pointers in their signatures, but
- // u8 and i8 produce unsigned char and signed char respectively,
- // which in C are (not very usefully) different than char.
- try w.writeAll("char");
- } else try dg.renderType(w, switch (ptr_info.pointee_type.tag()) {
- .anyopaque => Type.void,
- else => ptr_info.pointee_type,
- }, .Forward);
- if (t.isConstPtr()) try w.writeAll(" const");
- if (t.isVolatilePtr()) try w.writeAll(" volatile");
- return w.writeAll(" *");
- },
- .Array, .Vector => {
- var array_pl = Type.Payload.Array{ .base = .{ .tag = .array }, .data = .{
- .len = t.arrayLenIncludingSentinel(),
- .elem_type = t.childType(),
- } };
- const array_ty = Type.initPayload(&array_pl.base);
-
- const name = dg.getTypedefName(array_ty) orelse
- try dg.renderArrayTypedef(array_ty);
-
- return w.writeAll(name);
- },
- .Optional => {
- var opt_buf: Type.Payload.ElemType = undefined;
- const child_ty = t.optionalChild(&opt_buf);
-
- if (!child_ty.hasRuntimeBitsIgnoreComptime())
- return dg.renderType(w, Type.bool, kind);
-
- if (t.optionalReprIsPayload())
- return dg.renderType(w, child_ty, kind);
-
- switch (kind) {
- .Complete => {
- const name = dg.getTypedefName(t) orelse
- try dg.renderOptionalTypedef(t);
-
- try w.writeAll(name);
- },
- .Forward => {
- var ptr_pl = Type.Payload.ElemType{
- .base = .{ .tag = .single_const_pointer },
- .data = t,
- };
- const ptr_ty = Type.initPayload(&ptr_pl.base);
-
- const name = dg.getTypedefName(ptr_ty) orelse
- try dg.renderFwdTypedef(ptr_ty);
-
- try w.writeAll(name);
- },
- }
- },
- .ErrorUnion => {
- const payload_ty = t.errorUnionPayload();
-
- if (!payload_ty.hasRuntimeBitsIgnoreComptime())
- return dg.renderType(w, Type.anyerror, kind);
-
- var error_union_pl = Type.Payload.ErrorUnion{
- .data = .{ .error_set = Type.anyerror, .payload = payload_ty },
- };
- const error_union_ty = Type.initPayload(&error_union_pl.base);
-
- switch (kind) {
- .Complete => {
- const name = dg.getTypedefName(error_union_ty) orelse
- try dg.renderErrorUnionTypedef(error_union_ty);
-
- try w.writeAll(name);
- },
- .Forward => {
- var ptr_pl = Type.Payload.ElemType{
- .base = .{ .tag = .single_const_pointer },
- .data = error_union_ty,
- };
- const ptr_ty = Type.initPayload(&ptr_pl.base);
-
- const name = dg.getTypedefName(ptr_ty) orelse
- try dg.renderFwdTypedef(ptr_ty);
-
- try w.writeAll(name);
- },
- }
- },
- .Struct, .Union => |tag| if (t.containerLayout() == .Packed) {
- if (t.castTag(.@"struct")) |struct_obj| {
- try dg.renderType(w, struct_obj.data.backing_int_ty, kind);
- } else {
- var buf: Type.Payload.Bits = .{
- .base = .{ .tag = .int_unsigned },
- .data = @intCast(u16, t.bitSize(target)),
- };
- try dg.renderType(w, Type.initPayload(&buf.base), kind);
- }
- } else if (t.isSimpleTupleOrAnonStruct()) {
- const ExpectedContents = struct { types: [8]Type, values: [8]Value };
- var stack align(@alignOf(ExpectedContents)) =
- std.heap.stackFallback(@sizeOf(ExpectedContents), dg.gpa);
- const allocator = stack.get();
-
- var tuple_storage = std.MultiArrayList(struct { type: Type, value: Value }){};
- defer tuple_storage.deinit(allocator);
- try tuple_storage.ensureTotalCapacity(allocator, t.structFieldCount());
-
- const fields = t.tupleFields();
- for (fields.values, 0..) |value, index|
- if (value.tag() == .unreachable_value)
- tuple_storage.appendAssumeCapacity(.{
- .type = fields.types[index],
- .value = value,
- });
-
- const tuple_slice = tuple_storage.slice();
- var tuple_pl = Type.Payload.Tuple{ .data = .{
- .types = tuple_slice.items(.type),
- .values = tuple_slice.items(.value),
- } };
- const tuple_ty = Type.initPayload(&tuple_pl.base);
-
- const name = dg.getTypedefName(tuple_ty) orelse
- try dg.renderTupleTypedef(tuple_ty);
-
- try w.writeAll(name);
- } else switch (kind) {
- .Complete => {
- const name = dg.getTypedefName(t) orelse switch (tag) {
- .Struct => try dg.renderStructTypedef(t),
- .Union => try dg.renderUnionTypedef(t),
- else => unreachable,
- };
-
- try w.writeAll(name);
- },
- .Forward => {
- var ptr_pl = Type.Payload.ElemType{
- .base = .{ .tag = .single_const_pointer },
- .data = t,
- };
- const ptr_ty = Type.initPayload(&ptr_pl.base);
-
- const name = dg.getTypedefName(ptr_ty) orelse
- try dg.renderFwdTypedef(ptr_ty);
-
- try w.writeAll(name);
- },
- },
- .Enum => {
- // For enums, we simply use the integer tag type.
- var int_tag_buf: Type.Payload.Bits = undefined;
- const int_tag_ty = t.intTagType(&int_tag_buf);
-
- try dg.renderType(w, int_tag_ty, kind);
- },
- .Opaque => switch (t.tag()) {
- .@"opaque" => {
- const name = dg.getTypedefName(t) orelse
- try dg.renderOpaqueTypedef(t);
-
- try w.writeAll(name);
- },
- else => unreachable,
- },
-
- .Frame,
- .AnyFrame,
- => |tag| return dg.fail("TODO: C backend: implement value of type {s}", .{
- @tagName(tag),
- }),
-
- .Fn => unreachable, // This is a function body, not a function pointer.
-
- .Null,
- .Undefined,
- .EnumLiteral,
- .ComptimeFloat,
- .ComptimeInt,
- .Type,
- => unreachable, // must be const or comptime
- }
- }
-
- fn renderTypeUnnamed(
- dg: *DeclGen,
- w: anytype,
- t: Type,
- kind: TypedefKind,
- ) error{ OutOfMemory, AnalysisFail }!void {
- const target = dg.module.getTarget();
- const int_info = t.intInfo(target);
- if (toCIntBits(int_info.bits)) |c_bits|
- return w.print("zig_{c}{d}", .{ signAbbrev(int_info.signedness), c_bits })
- else if (loweredArrayInfo(t, target)) |array_info| {
- assert(array_info.sentinel == null);
- var array_pl = Type.Payload.Array{
- .base = .{ .tag = .array },
- .data = .{ .len = array_info.len, .elem_type = array_info.elem_type },
- };
- const array_ty = Type.initPayload(&array_pl.base);
-
- return dg.renderType(w, array_ty, kind);
- } else return dg.fail("C backend: Unable to lower unnamed integer type {}", .{
- t.fmt(dg.module),
- });
+ fn renderType(dg: *DeclGen, w: anytype, t: Type) error{ OutOfMemory, AnalysisFail }!void {
+ const store = &dg.ctypes.set;
+ const module = dg.module;
+ const idx = try dg.typeToIndex(t, .complete);
+ _ = try renderTypePrefix(dg.decl_index, store.*, module, w, idx, .suffix, .{});
+ try renderTypeSuffix(dg.decl_index, store.*, module, w, idx, .suffix, .{});
}
const IntCastContext = union(enum) {
@@ -2254,16 +1604,16 @@ pub const DeclGen = struct {
/// Renders a cast to an int type, from either an int or a pointer.
///
/// Some platforms don't have 128 bit integers, so we need to use
- /// the zig_as_ and zig_lo_ macros in those cases.
+ /// the zig_make_ and zig_lo_ macros in those cases.
///
/// | Dest type bits | Src type | Result
/// |------------------|------------------|---------------------------|
/// | < 64 bit integer | pointer | (zig_<dest_ty>)(zig_<u|i>size)src
/// | < 64 bit integer | < 64 bit integer | (zig_<dest_ty>)src
/// | < 64 bit integer | > 64 bit integer | zig_lo(src)
- /// | > 64 bit integer | pointer | zig_as_<dest_ty>(0, (zig_<u|i>size)src)
- /// | > 64 bit integer | < 64 bit integer | zig_as_<dest_ty>(0, src)
- /// | > 64 bit integer | > 64 bit integer | zig_as_<dest_ty>(zig_hi_<src_ty>(src), zig_lo_<src_ty>(src))
+ /// | > 64 bit integer | pointer | zig_make_<dest_ty>(0, (zig_<u|i>size)src)
+ /// | > 64 bit integer | < 64 bit integer | zig_make_<dest_ty>(0, src)
+ /// | > 64 bit integer | > 64 bit integer | zig_make_<dest_ty>(zig_hi_<src_ty>(src), zig_lo_<src_ty>(src))
fn renderIntCast(dg: *DeclGen, w: anytype, dest_ty: Type, context: IntCastContext, src_ty: Type, location: ValueRenderLocation) !void {
const target = dg.module.getTarget();
const dest_bits = dest_ty.bitSize(target);
@@ -2284,36 +1634,41 @@ pub const DeclGen = struct {
if (needs_cast) {
try w.writeByte('(');
- try dg.renderTypecast(w, dest_ty);
+ try dg.renderType(w, dest_ty);
try w.writeByte(')');
}
if (src_is_ptr) {
try w.writeByte('(');
- try dg.renderTypecast(w, src_eff_ty);
+ try dg.renderType(w, src_eff_ty);
try w.writeByte(')');
}
try context.writeValue(dg, w, src_ty, location);
} else if (dest_bits <= 64 and src_bits > 64) {
assert(!src_is_ptr);
+ if (dest_bits < 64) {
+ try w.writeByte('(');
+ try dg.renderType(w, dest_ty);
+ try w.writeByte(')');
+ }
try w.writeAll("zig_lo_");
try dg.renderTypeForBuiltinFnName(w, src_eff_ty);
try w.writeByte('(');
try context.writeValue(dg, w, src_ty, .FunctionArgument);
try w.writeByte(')');
} else if (dest_bits > 64 and src_bits <= 64) {
- try w.writeAll("zig_as_");
+ try w.writeAll("zig_make_");
try dg.renderTypeForBuiltinFnName(w, dest_ty);
try w.writeAll("(0, "); // TODO: Should the 0 go through fmtIntLiteral?
if (src_is_ptr) {
try w.writeByte('(');
- try dg.renderTypecast(w, src_eff_ty);
+ try dg.renderType(w, src_eff_ty);
try w.writeByte(')');
}
try context.writeValue(dg, w, src_ty, .FunctionArgument);
try w.writeByte(')');
} else {
assert(!src_is_ptr);
- try w.writeAll("zig_as_");
+ try w.writeAll("zig_make_");
try dg.renderTypeForBuiltinFnName(w, dest_ty);
try w.writeAll("(zig_hi_");
try dg.renderTypeForBuiltinFnName(w, src_eff_ty);
@@ -2327,151 +1682,50 @@ pub const DeclGen = struct {
}
}
- /// Renders a type in C typecast format.
- ///
- /// This is guaranteed to be valid in a typecast expression, but not
- /// necessarily in a variable/field declaration.
- ///
- /// There are three type formats in total that we support rendering:
- /// | Function | Example 1 (*u8) | Example 2 ([10]*u8) |
- /// |---------------------|-----------------|---------------------|
- /// | `renderTypecast` | "uint8_t *" | "uint8_t *[10]" |
- /// | `renderTypeAndName` | "uint8_t *name" | "uint8_t *name[10]" |
- /// | `renderType` | "uint8_t *" | "zig_A_uint8_t_10" |
- ///
- fn renderTypecast(dg: *DeclGen, w: anytype, ty: Type) error{ OutOfMemory, AnalysisFail }!void {
- return renderTypeAndName(dg, w, ty, .{ .bytes = "" }, .Mut, 0, .Complete);
- }
-
/// Renders a type and name in field declaration/definition format.
///
/// There are three type formats in total that we support rendering:
/// | Function | Example 1 (*u8) | Example 2 ([10]*u8) |
/// |---------------------|-----------------|---------------------|
- /// | `renderTypecast` | "uint8_t *" | "uint8_t *[10]" |
/// | `renderTypeAndName` | "uint8_t *name" | "uint8_t *name[10]" |
- /// | `renderType` | "uint8_t *" | "zig_A_uint8_t_10" |
+ /// | `renderType` | "uint8_t *" | "uint8_t *[10]" |
///
fn renderTypeAndName(
dg: *DeclGen,
w: anytype,
ty: Type,
name: CValue,
- mutability: Mutability,
+ qualifiers: CQualifiers,
alignment: u32,
- kind: TypedefKind,
+ kind: CType.Kind,
) error{ OutOfMemory, AnalysisFail }!void {
- var suffix = std.ArrayList(u8).init(dg.gpa);
- defer suffix.deinit();
- const suffix_writer = suffix.writer();
-
- // Any top-level array types are rendered here as a suffix, which
- // avoids creating typedefs for every array type
const target = dg.module.getTarget();
- var render_ty = ty;
- var depth: u32 = 0;
- while (loweredArrayInfo(render_ty, target)) |array_info| {
- const c_len = array_info.len + @boolToInt(array_info.sentinel != null);
- var c_len_pl: Value.Payload.U64 = .{ .base = .{ .tag = .int_u64 }, .data = c_len };
- const c_len_val = Value.initPayload(&c_len_pl.base);
-
- try suffix_writer.writeByte('[');
- if (mutability == .ConstArgument and depth == 0) try suffix_writer.writeAll("zig_const_arr ");
- try suffix.writer().print("{}]", .{try dg.fmtIntLiteral(Type.usize, c_len_val)});
- render_ty = array_info.elem_type;
- depth += 1;
- }
-
- if (alignment != 0) {
- const abi_alignment = ty.abiAlignment(target);
- if (alignment < abi_alignment) {
- try w.print("zig_under_align({}) ", .{alignment});
- } else if (alignment > abi_alignment) {
- try w.print("zig_align({}) ", .{alignment});
- }
- }
- try dg.renderType(w, render_ty, kind);
-
- const const_prefix = switch (mutability) {
- .Const, .ConstArgument => "const ",
- .Mut => "",
- };
- try w.print(" {s}", .{const_prefix});
- try dg.writeCValue(w, name);
- try w.writeAll(suffix.items);
+ const alignas = CType.AlignAs.init(alignment, ty.abiAlignment(target));
+ try dg.renderCTypeAndName(w, try dg.typeToIndex(ty, kind), name, qualifiers, alignas);
}
- fn renderTagNameFn(dg: *DeclGen, enum_ty: Type) error{ OutOfMemory, AnalysisFail }![]const u8 {
- var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
- defer buffer.deinit();
- const bw = buffer.writer();
-
- const name_slice_ty = Type.initTag(.const_slice_u8_sentinel_0);
-
- try buffer.appendSlice("static ");
- try dg.renderType(bw, name_slice_ty, .Complete);
- const name_begin = buffer.items.len + " ".len;
- try bw.print(" zig_tagName_{}_{d}(", .{ typeToCIdentifier(enum_ty, dg.module), @enumToInt(enum_ty.getOwnerDecl()) });
- const name_end = buffer.items.len - "(".len;
- try dg.renderTypeAndName(bw, enum_ty, .{ .identifier = "tag" }, .Const, 0, .Complete);
- try buffer.appendSlice(") {\n switch (tag) {\n");
- for (enum_ty.enumFields().keys(), 0..) |name, index| {
- const name_z = try dg.typedefs.allocator.dupeZ(u8, name);
- defer dg.typedefs.allocator.free(name_z);
- const name_bytes = name_z[0 .. name_z.len + 1];
-
- var tag_pl: Value.Payload.U32 = .{
- .base = .{ .tag = .enum_field_index },
- .data = @intCast(u32, index),
- };
- const tag_val = Value.initPayload(&tag_pl.base);
-
- var int_pl: Value.Payload.U64 = undefined;
- const int_val = tag_val.enumToInt(enum_ty, &int_pl);
-
- var name_ty_pl = Type.Payload.Len{ .base = .{ .tag = .array_u8_sentinel_0 }, .data = name.len };
- const name_ty = Type.initPayload(&name_ty_pl.base);
-
- var name_pl = Value.Payload.Bytes{ .base = .{ .tag = .bytes }, .data = name_bytes };
- const name_val = Value.initPayload(&name_pl.base);
-
- var len_pl = Value.Payload.U64{ .base = .{ .tag = .int_u64 }, .data = name.len };
- const len_val = Value.initPayload(&len_pl.base);
-
- try bw.print(" case {}: {{\n static ", .{try dg.fmtIntLiteral(enum_ty, int_val)});
- try dg.renderTypeAndName(bw, name_ty, .{ .identifier = "name" }, .Const, 0, .Complete);
- try buffer.appendSlice(" = ");
- try dg.renderValue(bw, name_ty, name_val, .Initializer);
- try buffer.appendSlice(";\n return (");
- try dg.renderTypecast(bw, name_slice_ty);
- try bw.print("){{{}, {}}};\n", .{
- fmtIdent("name"), try dg.fmtIntLiteral(Type.usize, len_val),
- });
+ fn renderCTypeAndName(
+ dg: *DeclGen,
+ w: anytype,
+ cty_idx: CType.Index,
+ name: CValue,
+ qualifiers: CQualifiers,
+ alignas: CType.AlignAs,
+ ) error{ OutOfMemory, AnalysisFail }!void {
+ const store = &dg.ctypes.set;
+ const module = dg.module;
- try buffer.appendSlice(" }\n");
+ switch (std.math.order(alignas.@"align", alignas.abi)) {
+ .lt => try w.print("zig_under_align({}) ", .{alignas.getAlign()}),
+ .eq => {},
+ .gt => try w.print("zig_align({}) ", .{alignas.getAlign()}),
}
- try buffer.appendSlice(" }\n while (");
- try dg.renderValue(bw, Type.bool, Value.true, .Other);
- try buffer.appendSlice(") ");
- _ = try airBreakpoint(bw);
- try buffer.appendSlice("}\n");
-
- const rendered = try buffer.toOwnedSlice();
- errdefer dg.typedefs.allocator.free(rendered);
- const name = rendered[name_begin..name_end];
-
- try dg.typedefs.ensureUnusedCapacity(1);
- dg.typedefs.putAssumeCapacityNoClobber(
- try enum_ty.copy(dg.typedefs_arena),
- .{ .name = name, .rendered = rendered },
- );
-
- return name;
- }
- fn getTagNameFn(dg: *DeclGen, enum_ty: Type) ![]const u8 {
- return dg.getTypedefName(enum_ty) orelse
- try dg.renderTagNameFn(enum_ty);
+ const trailing =
+ try renderTypePrefix(dg.decl_index, store.*, module, w, cty_idx, .suffix, qualifiers);
+ try w.print("{}", .{trailing});
+ try dg.writeCValue(w, name);
+ try renderTypeSuffix(dg.decl_index, store.*, module, w, cty_idx, .suffix, .{});
}
fn declIsGlobal(dg: *DeclGen, tv: TypedValue) bool {
@@ -2492,10 +1746,11 @@ pub const DeclGen = struct {
fn writeCValue(dg: *DeclGen, w: anytype, c_value: CValue) !void {
switch (c_value) {
.none => unreachable,
- .local => |i| return w.print("t{d}", .{i}),
+ .local, .new_local => |i| return w.print("t{d}", .{i}),
.local_ref => |i| return w.print("&t{d}", .{i}),
.constant => unreachable,
.arg => |i| return w.print("a{d}", .{i}),
+ .arg_array => |i| return dg.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" }),
.field => |i| return w.print("f{d}", .{i}),
.decl => |decl| return dg.renderDeclName(w, decl, 0),
.decl_ref => |decl| {
@@ -2504,6 +1759,10 @@ pub const DeclGen = struct {
},
.undef => |ty| return dg.renderValue(w, ty, Value.undef, .Other),
.identifier => |ident| return w.print("{ }", .{fmtIdent(ident)}),
+ .payload_identifier => |ident| return w.print("{ }.{ }", .{
+ fmtIdent("payload"),
+ fmtIdent(ident),
+ }),
.bytes => |bytes| return w.writeAll(bytes),
}
}
@@ -2511,10 +1770,15 @@ pub const DeclGen = struct {
fn writeCValueDeref(dg: *DeclGen, w: anytype, c_value: CValue) !void {
switch (c_value) {
.none => unreachable,
- .local => |i| return w.print("(*t{d})", .{i}),
+ .local, .new_local => |i| return w.print("(*t{d})", .{i}),
.local_ref => |i| return w.print("t{d}", .{i}),
.constant => unreachable,
.arg => |i| return w.print("(*a{d})", .{i}),
+ .arg_array => |i| {
+ try w.writeAll("(*");
+ try dg.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" });
+ return w.writeByte(')');
+ },
.field => |i| return w.print("f{d}", .{i}),
.decl => |decl| {
try w.writeAll("(*");
@@ -2524,6 +1788,10 @@ pub const DeclGen = struct {
.decl_ref => |decl| return dg.renderDeclName(w, decl, 0),
.undef => unreachable,
.identifier => |ident| return w.print("(*{ })", .{fmtIdent(ident)}),
+ .payload_identifier => |ident| return w.print("(*{ }.{ })", .{
+ fmtIdent("payload"),
+ fmtIdent(ident),
+ }),
.bytes => |bytes| {
try w.writeAll("(*");
try w.writeAll(bytes);
@@ -2541,7 +1809,7 @@ pub const DeclGen = struct {
fn writeCValueDerefMember(dg: *DeclGen, writer: anytype, c_value: CValue, member: CValue) !void {
switch (c_value) {
.none, .constant, .field, .undef => unreachable,
- .local, .arg, .decl, .identifier, .bytes => {
+ .new_local, .local, .arg, .arg_array, .decl, .identifier, .payload_identifier, .bytes => {
try dg.writeCValue(writer, c_value);
try writer.writeAll("->");
},
@@ -2668,12 +1936,465 @@ pub const DeclGen = struct {
}
};
-pub fn genGlobalAsm(mod: *Module, code: *std.ArrayList(u8)) !void {
- var it = mod.global_assembly.valueIterator();
- while (it.next()) |asm_source| {
- try code.writer().print("__asm({s});\n", .{fmtStringLiteral(asm_source.*)});
+const CTypeFix = enum { prefix, suffix };
+const CQualifiers = std.enums.EnumSet(enum { @"const", @"volatile", restrict });
+const Const = CQualifiers.init(.{ .@"const" = true });
+const RenderCTypeTrailing = enum {
+ no_space,
+ maybe_space,
+
+ pub fn format(
+ self: @This(),
+ comptime fmt: []const u8,
+ _: std.fmt.FormatOptions,
+ w: anytype,
+ ) @TypeOf(w).Error!void {
+ if (fmt.len != 0)
+ @compileError("invalid format string '" ++ fmt ++ "' for type '" ++
+ @typeName(@This()) ++ "'");
+ comptime assert(fmt.len == 0);
+ switch (self) {
+ .no_space => {},
+ .maybe_space => try w.writeByte(' '),
+ }
+ }
+};
+fn renderTypeName(
+ mod: *Module,
+ w: anytype,
+ idx: CType.Index,
+ cty: CType,
+ attributes: []const u8,
+) !void {
+ switch (cty.tag()) {
+ else => unreachable,
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => |tag| try w.print("{s} {s}anon__lazy_{d}", .{
+ @tagName(tag)["fwd_anon_".len..],
+ attributes,
+ idx,
+ }),
+
+ .fwd_struct,
+ .fwd_union,
+ => |tag| {
+ const owner_decl = cty.cast(CType.Payload.FwdDecl).?.data;
+ try w.print("{s} {s}{}__{d}", .{
+ @tagName(tag)["fwd_".len..],
+ attributes,
+ fmtIdent(mem.span(mod.declPtr(owner_decl).name)),
+ @enumToInt(owner_decl),
+ });
+ },
}
}
+fn renderTypePrefix(
+ decl: Decl.OptionalIndex,
+ store: CType.Store.Set,
+ mod: *Module,
+ w: anytype,
+ idx: CType.Index,
+ parent_fix: CTypeFix,
+ qualifiers: CQualifiers,
+) @TypeOf(w).Error!RenderCTypeTrailing {
+ var trailing = RenderCTypeTrailing.maybe_space;
+
+ const cty = store.indexToCType(idx);
+ switch (cty.tag()) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ .zig_c_longdouble,
+ => |tag| try w.writeAll(@tagName(tag)),
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => |tag| {
+ const child_idx = cty.cast(CType.Payload.Child).?.data;
+ const child_trailing = try renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ child_idx,
+ .prefix,
+ CQualifiers.init(.{ .@"const" = switch (tag) {
+ .pointer, .pointer_volatile => false,
+ .pointer_const, .pointer_const_volatile => true,
+ else => unreachable,
+ }, .@"volatile" = switch (tag) {
+ .pointer, .pointer_const => false,
+ .pointer_volatile, .pointer_const_volatile => true,
+ else => unreachable,
+ } }),
+ );
+ try w.print("{}*", .{child_trailing});
+ trailing = .no_space;
+ },
+
+ .array,
+ .vector,
+ => {
+ const child_idx = cty.cast(CType.Payload.Sequence).?.data.elem_type;
+ const child_trailing = try renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ child_idx,
+ .suffix,
+ qualifiers,
+ );
+ switch (parent_fix) {
+ .prefix => {
+ try w.print("{}(", .{child_trailing});
+ return .no_space;
+ },
+ .suffix => return child_trailing,
+ }
+ },
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => if (decl.unwrap()) |decl_index|
+ try w.print("anon__{d}_{d}", .{ @enumToInt(decl_index), idx })
+ else
+ try renderTypeName(mod, w, idx, cty, ""),
+
+ .fwd_struct,
+ .fwd_union,
+ => try renderTypeName(mod, w, idx, cty, ""),
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => |tag| {
+ try w.print("{s} {s}", .{
+ @tagName(tag)["unnamed_".len..],
+ if (cty.isPacked()) "zig_packed(" else "",
+ });
+ try renderAggregateFields(mod, w, store, cty, 1);
+ if (cty.isPacked()) try w.writeByte(')');
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => return renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ cty.cast(CType.Payload.Aggregate).?.data.fwd_decl,
+ parent_fix,
+ qualifiers,
+ ),
+
+ .function,
+ .varargs_function,
+ => {
+ const child_trailing = try renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ cty.cast(CType.Payload.Function).?.data.return_type,
+ .suffix,
+ .{},
+ );
+ switch (parent_fix) {
+ .prefix => {
+ try w.print("{}(", .{child_trailing});
+ return .no_space;
+ },
+ .suffix => return child_trailing,
+ }
+ },
+ }
+
+ var qualifier_it = qualifiers.iterator();
+ while (qualifier_it.next()) |qualifier| {
+ try w.print("{}{s}", .{ trailing, @tagName(qualifier) });
+ trailing = .maybe_space;
+ }
+
+ return trailing;
+}
+fn renderTypeSuffix(
+ decl: Decl.OptionalIndex,
+ store: CType.Store.Set,
+ mod: *Module,
+ w: anytype,
+ idx: CType.Index,
+ parent_fix: CTypeFix,
+ qualifiers: CQualifiers,
+) @TypeOf(w).Error!void {
+ const cty = store.indexToCType(idx);
+ switch (cty.tag()) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ .zig_c_longdouble,
+ => {},
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => try renderTypeSuffix(
+ decl,
+ store,
+ mod,
+ w,
+ cty.cast(CType.Payload.Child).?.data,
+ .prefix,
+ .{},
+ ),
+
+ .array,
+ .vector,
+ => {
+ switch (parent_fix) {
+ .prefix => try w.writeByte(')'),
+ .suffix => {},
+ }
+
+ try w.print("[{}]", .{cty.cast(CType.Payload.Sequence).?.data.len});
+ try renderTypeSuffix(
+ decl,
+ store,
+ mod,
+ w,
+ cty.cast(CType.Payload.Sequence).?.data.elem_type,
+ .suffix,
+ .{},
+ );
+ },
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ .fwd_struct,
+ .fwd_union,
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {},
+
+ .function,
+ .varargs_function,
+ => |tag| {
+ switch (parent_fix) {
+ .prefix => try w.writeByte(')'),
+ .suffix => {},
+ }
+
+ const data = cty.cast(CType.Payload.Function).?.data;
+
+ try w.writeByte('(');
+ var need_comma = false;
+ for (data.param_types, 0..) |param_type, param_i| {
+ if (need_comma) try w.writeAll(", ");
+ need_comma = true;
+ const trailing =
+ try renderTypePrefix(decl, store, mod, w, param_type, .suffix, qualifiers);
+ if (qualifiers.contains(.@"const")) try w.print("{}a{d}", .{ trailing, param_i });
+ try renderTypeSuffix(decl, store, mod, w, param_type, .suffix, .{});
+ }
+ switch (tag) {
+ .function => {},
+ .varargs_function => {
+ if (need_comma) try w.writeAll(", ");
+ need_comma = true;
+ try w.writeAll("...");
+ },
+ else => unreachable,
+ }
+ if (!need_comma) try w.writeAll("void");
+ try w.writeByte(')');
+
+ try renderTypeSuffix(decl, store, mod, w, data.return_type, .suffix, .{});
+ },
+ }
+}
+fn renderAggregateFields(
+ mod: *Module,
+ writer: anytype,
+ store: CType.Store.Set,
+ cty: CType,
+ indent: usize,
+) !void {
+ try writer.writeAll("{\n");
+ const fields = cty.fields();
+ for (fields) |field| {
+ try writer.writeByteNTimes(' ', indent + 1);
+ switch (std.math.order(field.alignas.@"align", field.alignas.abi)) {
+ .lt => try writer.print("zig_under_align({}) ", .{field.alignas.getAlign()}),
+ .eq => {},
+ .gt => try writer.print("zig_align({}) ", .{field.alignas.getAlign()}),
+ }
+ const trailing = try renderTypePrefix(.none, store, mod, writer, field.type, .suffix, .{});
+ try writer.print("{}{ }", .{ trailing, fmtIdent(mem.span(field.name)) });
+ try renderTypeSuffix(.none, store, mod, writer, field.type, .suffix, .{});
+ try writer.writeAll(";\n");
+ }
+ try writer.writeByteNTimes(' ', indent);
+ try writer.writeByte('}');
+}
+
+pub fn genTypeDecl(
+ mod: *Module,
+ writer: anytype,
+ global_store: CType.Store.Set,
+ global_idx: CType.Index,
+ decl: Decl.OptionalIndex,
+ decl_store: CType.Store.Set,
+ decl_idx: CType.Index,
+ found_existing: bool,
+) !void {
+ const global_cty = global_store.indexToCType(global_idx);
+ switch (global_cty.tag()) {
+ .fwd_anon_struct => if (decl != .none) {
+ try writer.writeAll("typedef ");
+ _ = try renderTypePrefix(.none, global_store, mod, writer, global_idx, .suffix, .{});
+ try writer.writeByte(' ');
+ _ = try renderTypePrefix(decl, decl_store, mod, writer, decl_idx, .suffix, .{});
+ try writer.writeAll(";\n");
+ },
+
+ .fwd_struct,
+ .fwd_union,
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => |tag| if (!found_existing) {
+ switch (tag) {
+ .fwd_struct,
+ .fwd_union,
+ => {
+ const owner_decl = global_cty.cast(CType.Payload.FwdDecl).?.data;
+ _ = try renderTypePrefix(.none, global_store, mod, writer, global_idx, .suffix, .{});
+ try writer.writeAll("; // ");
+ try mod.declPtr(owner_decl).renderFullyQualifiedName(mod, writer);
+ try writer.writeByte('\n');
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {
+ const fwd_idx = global_cty.cast(CType.Payload.Aggregate).?.data.fwd_decl;
+ try renderTypeName(
+ mod,
+ writer,
+ fwd_idx,
+ global_store.indexToCType(fwd_idx),
+ if (global_cty.isPacked()) "zig_packed(" else "",
+ );
+ try writer.writeByte(' ');
+ try renderAggregateFields(mod, writer, global_store, global_cty, 0);
+ if (global_cty.isPacked()) try writer.writeByte(')');
+ try writer.writeAll(";\n");
+ },
+
+ else => unreachable,
+ }
+ },
+
+ else => {},
+ }
+}
+
+pub fn genGlobalAsm(mod: *Module, writer: anytype) !void {
+ var it = mod.global_assembly.valueIterator();
+ while (it.next()) |asm_source| try writer.print("__asm({s});\n", .{fmtStringLiteral(asm_source.*, null)});
+}
pub fn genErrDecls(o: *Object) !void {
const writer = o.writer();
@@ -2690,26 +2411,24 @@ pub fn genErrDecls(o: *Object) !void {
o.indent_writer.popIndent();
try writer.writeAll("};\n");
- const name_prefix = "zig_errorName";
- const name_buf = try o.dg.gpa.alloc(u8, name_prefix.len + "_".len + max_name_len + 1);
+ const array_identifier = "zig_errorName";
+ const name_prefix = array_identifier ++ "_";
+ const name_buf = try o.dg.gpa.alloc(u8, name_prefix.len + max_name_len);
defer o.dg.gpa.free(name_buf);
- std.mem.copy(u8, name_buf, name_prefix ++ "_");
+ std.mem.copy(u8, name_buf, name_prefix);
for (o.dg.module.error_name_list.items) |name| {
- std.mem.copy(u8, name_buf[name_prefix.len + "_".len ..], name);
- name_buf[name_prefix.len + "_".len + name.len] = 0;
-
- const identifier = name_buf[0 .. name_prefix.len + "_".len + name.len :0];
- const name_z = identifier[name_prefix.len + "_".len ..];
+ std.mem.copy(u8, name_buf[name_prefix.len..], name);
+ const identifier = name_buf[0 .. name_prefix.len + name.len];
var name_ty_pl = Type.Payload.Len{ .base = .{ .tag = .array_u8_sentinel_0 }, .data = name.len };
const name_ty = Type.initPayload(&name_ty_pl.base);
- var name_pl = Value.Payload.Bytes{ .base = .{ .tag = .bytes }, .data = name_z };
+ var name_pl = Value.Payload.Bytes{ .base = .{ .tag = .bytes }, .data = name };
const name_val = Value.initPayload(&name_pl.base);
try writer.writeAll("static ");
- try o.dg.renderTypeAndName(writer, name_ty, .{ .identifier = identifier }, .Const, 0, .Complete);
+ try o.dg.renderTypeAndName(writer, name_ty, .{ .identifier = identifier }, Const, 0, .complete);
try writer.writeAll(" = ");
try o.dg.renderValue(writer, name_ty, name_val, .StaticInitializer);
try writer.writeAll(";\n");
@@ -2722,7 +2441,7 @@ pub fn genErrDecls(o: *Object) !void {
const name_array_ty = Type.initPayload(&name_array_ty_pl.base);
try writer.writeAll("static ");
- try o.dg.renderTypeAndName(writer, name_array_ty, .{ .identifier = name_prefix }, .Const, 0, .Complete);
+ try o.dg.renderTypeAndName(writer, name_array_ty, .{ .identifier = array_identifier }, Const, 0, .complete);
try writer.writeAll(" = {");
for (o.dg.module.error_name_list.items, 0..) |name, value| {
if (value != 0) try writer.writeByte(',');
@@ -2730,7 +2449,7 @@ pub fn genErrDecls(o: *Object) !void {
var len_pl = Value.Payload.U64{ .base = .{ .tag = .int_u64 }, .data = name.len };
const len_val = Value.initPayload(&len_pl.base);
- try writer.print("{{" ++ name_prefix ++ "_{}, {}}}", .{
+ try writer.print("{{" ++ name_prefix ++ "{}, {}}}", .{
fmtIdent(name), try o.dg.fmtIntLiteral(Type.usize, len_val),
});
}
@@ -2742,14 +2461,95 @@ fn genExports(o: *Object) !void {
defer tracy.end();
const fwd_decl_writer = o.dg.fwd_decl.writer();
- if (o.dg.module.decl_exports.get(o.dg.decl_index)) |exports| for (exports.items[1..], 0..) |@"export", i| {
- try fwd_decl_writer.writeAll("zig_export(");
- try o.dg.renderFunctionSignature(fwd_decl_writer, .Forward, @intCast(u32, 1 + i));
- try fwd_decl_writer.print(", {s}, {s});\n", .{
- fmtStringLiteral(exports.items[0].options.name),
- fmtStringLiteral(@"export".options.name),
- });
- };
+ if (o.dg.module.decl_exports.get(o.dg.decl_index.unwrap().?)) |exports| {
+ for (exports.items[1..], 1..) |@"export", i| {
+ try fwd_decl_writer.writeAll("zig_export(");
+ try o.dg.renderFunctionSignature(fwd_decl_writer, o.dg.decl_index.unwrap().?, .forward, .{ .export_index = @intCast(u32, i) });
+ try fwd_decl_writer.print(", {s}, {s});\n", .{
+ fmtStringLiteral(exports.items[0].options.name, null),
+ fmtStringLiteral(@"export".options.name, null),
+ });
+ }
+ }
+}
+
+pub fn genLazyFn(o: *Object, lazy_fn: LazyFnMap.Entry) !void {
+ const w = o.writer();
+ const key = lazy_fn.key_ptr.*;
+ const val = lazy_fn.value_ptr;
+ const fn_name = val.fn_name;
+ switch (key) {
+ .tag_name => {
+ const enum_ty = val.data.tag_name;
+
+ const name_slice_ty = Type.initTag(.const_slice_u8_sentinel_0);
+
+ try w.writeAll("static ");
+ try o.dg.renderType(w, name_slice_ty);
+ try w.writeByte(' ');
+ try w.writeAll(fn_name);
+ try w.writeByte('(');
+ try o.dg.renderTypeAndName(w, enum_ty, .{ .identifier = "tag" }, Const, 0, .complete);
+ try w.writeAll(") {\n switch (tag) {\n");
+ for (enum_ty.enumFields().keys(), 0..) |name, index| {
+ var tag_pl: Value.Payload.U32 = .{
+ .base = .{ .tag = .enum_field_index },
+ .data = @intCast(u32, index),
+ };
+ const tag_val = Value.initPayload(&tag_pl.base);
+
+ var int_pl: Value.Payload.U64 = undefined;
+ const int_val = tag_val.enumToInt(enum_ty, &int_pl);
+
+ var name_ty_pl = Type.Payload.Len{ .base = .{ .tag = .array_u8_sentinel_0 }, .data = name.len };
+ const name_ty = Type.initPayload(&name_ty_pl.base);
+
+ var name_pl = Value.Payload.Bytes{ .base = .{ .tag = .bytes }, .data = name };
+ const name_val = Value.initPayload(&name_pl.base);
+
+ var len_pl = Value.Payload.U64{ .base = .{ .tag = .int_u64 }, .data = name.len };
+ const len_val = Value.initPayload(&len_pl.base);
+
+ try w.print(" case {}: {{\n static ", .{try o.dg.fmtIntLiteral(enum_ty, int_val)});
+ try o.dg.renderTypeAndName(w, name_ty, .{ .identifier = "name" }, Const, 0, .complete);
+ try w.writeAll(" = ");
+ try o.dg.renderValue(w, name_ty, name_val, .Initializer);
+ try w.writeAll(";\n return (");
+ try o.dg.renderType(w, name_slice_ty);
+ try w.print("){{{}, {}}};\n", .{
+ fmtIdent("name"), try o.dg.fmtIntLiteral(Type.usize, len_val),
+ });
+
+ try w.writeAll(" }\n");
+ }
+ try w.writeAll(" }\n while (");
+ try o.dg.renderValue(w, Type.bool, Value.true, .Other);
+ try w.writeAll(") ");
+ _ = try airBreakpoint(w);
+ try w.writeAll("}\n");
+ },
+ .never_tail, .never_inline => |fn_decl_index| {
+ const fn_decl = o.dg.module.declPtr(fn_decl_index);
+ const fn_cty = try o.dg.typeToCType(fn_decl.ty, .complete);
+ const fn_info = fn_cty.cast(CType.Payload.Function).?.data;
+
+ const fwd_decl_writer = o.dg.fwd_decl.writer();
+ try fwd_decl_writer.print("static zig_{s} ", .{@tagName(key)});
+ try o.dg.renderFunctionSignature(fwd_decl_writer, fn_decl_index, .forward, .{ .string = fn_name });
+ try fwd_decl_writer.writeAll(";\n");
+
+ try w.print("static zig_{s} ", .{@tagName(key)});
+ try o.dg.renderFunctionSignature(w, fn_decl_index, .complete, .{ .string = fn_name });
+ try w.writeAll(" {\n return ");
+ try o.dg.renderDeclName(w, fn_decl_index, 0);
+ try w.writeByte('(');
+ for (0..fn_info.param_types.len) |arg| {
+ if (arg > 0) try w.writeAll(", ");
+ try o.dg.writeCValue(w, .{ .arg = arg });
+ }
+ try w.writeAll(");\n}\n");
+ },
+ }
}
pub fn genFunc(f: *Function) !void {
@@ -2758,9 +2558,10 @@ pub fn genFunc(f: *Function) !void {
const o = &f.object;
const gpa = o.dg.gpa;
+ const decl_index = o.dg.decl_index.unwrap().?;
const tv: TypedValue = .{
- .ty = o.dg.decl.ty,
- .val = o.dg.decl.val,
+ .ty = o.dg.decl.?.ty,
+ .val = o.dg.decl.?.val,
};
o.code_header = std.ArrayList(u8).init(gpa);
@@ -2769,13 +2570,13 @@ pub fn genFunc(f: *Function) !void {
const is_global = o.dg.declIsGlobal(tv);
const fwd_decl_writer = o.dg.fwd_decl.writer();
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
- try o.dg.renderFunctionSignature(fwd_decl_writer, .Forward, 0);
+ try o.dg.renderFunctionSignature(fwd_decl_writer, decl_index, .forward, .{ .export_index = 0 });
try fwd_decl_writer.writeAll(";\n");
try genExports(o);
try o.indent_writer.insertNewline();
if (!is_global) try o.writer().writeAll("static ");
- try o.dg.renderFunctionSignature(o.writer(), .Complete, 0);
+ try o.dg.renderFunctionSignature(o.writer(), decl_index, .complete, .{ .export_index = 0 });
try o.writer().writeByte(' ');
// In case we need to use the header, populate it with a copy of the function
@@ -2799,41 +2600,31 @@ pub fn genFunc(f: *Function) !void {
// missing. These are added now to complete the map. Then we can sort by
// alignment, descending.
const free_locals = f.getFreeLocals();
- const values = f.allocs.values();
- for (f.allocs.keys(), 0..) |local_index, i| {
- if (values[i]) continue; // static
+ for (f.allocs.keys(), f.allocs.values()) |local_index, value| {
+ if (value) continue; // static
const local = f.locals.items[local_index];
log.debug("inserting local {d} into free_locals", .{local_index});
- const gop = try free_locals.getOrPutContext(gpa, local.ty, f.tyHashCtx());
+ const gop = try free_locals.getOrPut(gpa, local.getType());
if (!gop.found_existing) gop.value_ptr.* = .{};
- try gop.value_ptr.append(gpa, local_index);
+ try gop.value_ptr.putNoClobber(gpa, local_index, {});
}
const SortContext = struct {
- target: std.Target,
- keys: []const Type,
+ keys: []const LocalType,
- pub fn lessThan(ctx: @This(), a_index: usize, b_index: usize) bool {
- const a_ty = ctx.keys[a_index];
- const b_ty = ctx.keys[b_index];
- return b_ty.abiAlignment(ctx.target) < a_ty.abiAlignment(ctx.target);
+ pub fn lessThan(ctx: @This(), lhs_index: usize, rhs_index: usize) bool {
+ const lhs_ty = ctx.keys[lhs_index];
+ const rhs_ty = ctx.keys[rhs_index];
+ return lhs_ty.alignas.getAlign() > rhs_ty.alignas.getAlign();
}
};
- const target = o.dg.module.getTarget();
- free_locals.sort(SortContext{ .target = target, .keys = free_locals.keys() });
+ free_locals.sort(SortContext{ .keys = free_locals.keys() });
const w = o.code_header.writer();
for (free_locals.values()) |list| {
- for (list.items) |local_index| {
+ for (list.keys()) |local_index| {
const local = f.locals.items[local_index];
- try o.dg.renderTypeAndName(
- w,
- local.ty,
- .{ .local = local_index },
- .Mut,
- local.alignment,
- .Complete,
- );
+ try o.dg.renderCTypeAndName(w, local.cty_idx, .{ .local = local_index }, .{}, local.alignas);
try w.writeAll(";\n ");
}
}
@@ -2850,15 +2641,15 @@ pub fn genDecl(o: *Object) !void {
const tracy = trace(@src());
defer tracy.end();
- const tv: TypedValue = .{
- .ty = o.dg.decl.ty,
- .val = o.dg.decl.val,
- };
+ const decl = o.dg.decl.?;
+ const decl_c_value = .{ .decl = o.dg.decl_index.unwrap().? };
+ const tv: TypedValue = .{ .ty = decl.ty, .val = decl.val };
+
if (!tv.ty.isFnOrHasRuntimeBitsIgnoreComptime()) return;
if (tv.val.tag() == .extern_fn) {
const fwd_decl_writer = o.dg.fwd_decl.writer();
try fwd_decl_writer.writeAll("zig_extern ");
- try o.dg.renderFunctionSignature(fwd_decl_writer, .Forward, 0);
+ try o.dg.renderFunctionSignature(fwd_decl_writer, decl_c_value.decl, .forward, .{ .export_index = 0 });
try fwd_decl_writer.writeAll(";\n");
try genExports(o);
} else if (tv.val.castTag(.variable)) |var_payload| {
@@ -2867,11 +2658,9 @@ pub fn genDecl(o: *Object) !void {
const is_global = o.dg.declIsGlobal(tv) or variable.is_extern;
const fwd_decl_writer = o.dg.fwd_decl.writer();
- const decl_c_value = CValue{ .decl = o.dg.decl_index };
-
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
if (variable.is_threadlocal) try fwd_decl_writer.writeAll("zig_threadlocal ");
- try o.dg.renderTypeAndName(fwd_decl_writer, o.dg.decl.ty, decl_c_value, .Mut, o.dg.decl.@"align", .Complete);
+ try o.dg.renderTypeAndName(fwd_decl_writer, decl.ty, decl_c_value, .{}, decl.@"align", .complete);
try fwd_decl_writer.writeAll(";\n");
try genExports(o);
@@ -2880,27 +2669,26 @@ pub fn genDecl(o: *Object) !void {
const w = o.writer();
if (!is_global) try w.writeAll("static ");
if (variable.is_threadlocal) try w.writeAll("zig_threadlocal ");
- if (o.dg.decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
- try o.dg.renderTypeAndName(w, o.dg.decl.ty, decl_c_value, .Mut, o.dg.decl.@"align", .Complete);
- if (o.dg.decl.@"linksection" != null) try w.writeAll(", read, write)");
+ if (decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
+ try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, .{}, decl.@"align", .complete);
+ if (decl.@"linksection" != null) try w.writeAll(", read, write)");
try w.writeAll(" = ");
try o.dg.renderValue(w, tv.ty, variable.init, .StaticInitializer);
try w.writeByte(';');
try o.indent_writer.insertNewline();
} else {
- const is_global = o.dg.module.decl_exports.contains(o.dg.decl_index);
+ const is_global = o.dg.module.decl_exports.contains(decl_c_value.decl);
const fwd_decl_writer = o.dg.fwd_decl.writer();
- const decl_c_value: CValue = .{ .decl = o.dg.decl_index };
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
- try o.dg.renderTypeAndName(fwd_decl_writer, tv.ty, decl_c_value, .Const, o.dg.decl.@"align", .Complete);
+ try o.dg.renderTypeAndName(fwd_decl_writer, tv.ty, decl_c_value, Const, decl.@"align", .complete);
try fwd_decl_writer.writeAll(";\n");
const w = o.writer();
if (!is_global) try w.writeAll("static ");
- if (o.dg.decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
- try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, .Const, o.dg.decl.@"align", .Complete);
- if (o.dg.decl.@"linksection" != null) try w.writeAll(", read)");
+ if (decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
+ try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, Const, decl.@"align", .complete);
+ if (decl.@"linksection" != null) try w.writeAll(", read)");
try w.writeAll(" = ");
try o.dg.renderValue(w, tv.ty, tv.val, .StaticInitializer);
try w.writeAll(";\n");
@@ -2912,8 +2700,8 @@ pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void {
defer tracy.end();
const tv: TypedValue = .{
- .ty = dg.decl.ty,
- .val = dg.decl.val,
+ .ty = dg.decl.?.ty,
+ .val = dg.decl.?.val,
};
const writer = dg.fwd_decl.writer();
@@ -2922,7 +2710,7 @@ pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void {
const is_global = dg.declIsGlobal(tv);
if (is_global) {
try writer.writeAll("zig_extern ");
- try dg.renderFunctionSignature(writer, .Complete, 0);
+ try dg.renderFunctionSignature(writer, dg.decl_index.unwrap().?, .complete, .{ .export_index = 0 });
try dg.fwd_decl.appendSlice(";\n");
}
},
@@ -2951,7 +2739,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
// zig fmt: off
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
- .arg => airArg(f),
+ .arg => try airArg(f, inst),
.breakpoint => try airBreakpoint(f.object.writer()),
.ret_addr => try airRetAddr(f, inst),
@@ -3112,10 +2900,10 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
.dbg_block_begin,
.dbg_block_end,
- => CValue{ .none = {} },
+ => .none,
.call => try airCall(f, inst, .auto),
- .call_always_tail => try airCall(f, inst, .always_tail),
+ .call_always_tail => .none,
.call_never_tail => try airCall(f, inst, .never_tail),
.call_never_inline => try airCall(f, inst, .never_inline),
@@ -3194,19 +2982,20 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
.error_set_has_value => return f.fail("TODO: C backend: implement error_set_has_value", .{}),
.vector_store_elem => return f.fail("TODO: C backend: implement vector_store_elem", .{}),
- .c_va_arg => return f.fail("TODO implement c_va_arg", .{}),
- .c_va_copy => return f.fail("TODO implement c_va_copy", .{}),
- .c_va_end => return f.fail("TODO implement c_va_end", .{}),
- .c_va_start => return f.fail("TODO implement c_va_start", .{}),
+ .c_va_start => try airCVaStart(f, inst),
+ .c_va_arg => try airCVaArg(f, inst),
+ .c_va_end => try airCVaEnd(f, inst),
+ .c_va_copy => try airCVaCopy(f, inst),
// zig fmt: on
};
- if (result_value == .local) {
- log.debug("map %{d} to t{d}", .{ inst, result_value.local });
- }
- switch (result_value) {
- .none => {},
- else => try f.value_map.putNoClobber(Air.indexToRef(inst), result_value),
+ if (result_value == .new_local) {
+ log.debug("map %{d} to t{d}", .{ inst, result_value.new_local });
}
+ try f.value_map.putNoClobber(Air.indexToRef(inst), switch (result_value) {
+ .none => continue,
+ .new_local => |i| .{ .local = i },
+ else => result_value,
+ });
}
}
@@ -3215,7 +3004,7 @@ fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: [
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -3241,7 +3030,7 @@ fn airPtrElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
!inst_ty.hasRuntimeBitsIgnoreComptime())
{
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const ptr = try f.resolveInst(bin_op.lhs);
@@ -3267,7 +3056,7 @@ fn airPtrElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeByte(']');
if (is_array) {
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeAll("))");
}
try writer.writeAll(";\n");
@@ -3280,9 +3069,10 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
+ const inst_ty = f.air.typeOfIndex(inst);
const ptr_ty = f.air.typeOf(bin_op.lhs);
const child_ty = ptr_ty.childType();
@@ -3297,7 +3087,9 @@ fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, f.air.typeOfIndex(inst));
try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = &(");
+ try writer.writeAll(" = (");
+ try f.renderType(writer, inst_ty);
+ try writer.writeAll(")&(");
if (ptr_ty.ptrSize() == .One) {
// It's a pointer to an array, so we need to de-reference.
try f.writeCValueDeref(writer, ptr);
@@ -3318,7 +3110,7 @@ fn airSliceElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
!inst_ty.hasRuntimeBitsIgnoreComptime())
{
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const slice = try f.resolveInst(bin_op.lhs);
@@ -3344,7 +3136,7 @@ fn airSliceElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeByte(']');
if (is_array) {
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeAll("))");
}
try writer.writeAll(";\n");
@@ -3357,7 +3149,7 @@ fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const slice_ty = f.air.typeOf(bin_op.lhs);
@@ -3387,7 +3179,7 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
const inst_ty = f.air.typeOfIndex(inst);
if (f.liveness.isUnused(inst) or !inst_ty.hasRuntimeBitsIgnoreComptime()) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const array = try f.resolveInst(bin_op.lhs);
@@ -3413,7 +3205,7 @@ fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeByte(']');
if (is_array) {
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeAll("))");
}
try writer.writeAll(";\n");
@@ -3425,16 +3217,19 @@ fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
const elem_type = inst_ty.elemType();
if (!elem_type.isFnOrHasRuntimeBitsIgnoreComptime()) {
- return CValue{ .undef = inst_ty };
+ return .{ .undef = inst_ty };
}
- const mutability: Mutability = if (inst_ty.isConstPtr()) .Const else .Mut;
const target = f.object.dg.module.getTarget();
- const local = try f.allocAlignedLocal(elem_type, mutability, inst_ty.ptrAlignment(target));
- log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.local });
+ const local = try f.allocAlignedLocal(
+ elem_type,
+ CQualifiers.init(.{ .@"const" = inst_ty.isConstPtr() }),
+ inst_ty.ptrAlignment(target),
+ );
+ log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local });
const gpa = f.object.dg.module.gpa;
- try f.allocs.put(gpa, local.local, false);
- return CValue{ .local_ref = local.local };
+ try f.allocs.put(gpa, local.new_local, false);
+ return .{ .local_ref = local.new_local };
}
fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -3442,22 +3237,31 @@ fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const elem_ty = inst_ty.elemType();
if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime()) {
- return CValue{ .undef = inst_ty };
+ return .{ .undef = inst_ty };
}
- const mutability: Mutability = if (inst_ty.isConstPtr()) .Const else .Mut;
const target = f.object.dg.module.getTarget();
- const local = try f.allocAlignedLocal(elem_ty, mutability, inst_ty.ptrAlignment(target));
- log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.local });
+ const local = try f.allocAlignedLocal(
+ elem_ty,
+ CQualifiers.init(.{ .@"const" = inst_ty.isConstPtr() }),
+ inst_ty.ptrAlignment(target),
+ );
+ log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local });
const gpa = f.object.dg.module.gpa;
- try f.allocs.put(gpa, local.local, false);
- return CValue{ .local_ref = local.local };
+ try f.allocs.put(gpa, local.new_local, false);
+ return .{ .local_ref = local.new_local };
}
-fn airArg(f: *Function) CValue {
+fn airArg(f: *Function, inst: Air.Inst.Index) !CValue {
+ const inst_ty = f.air.typeOfIndex(inst);
+ const inst_cty = try f.typeToIndex(inst_ty, .parameter);
+
const i = f.next_arg_index;
f.next_arg_index += 1;
- return .{ .arg = i };
+ return if (inst_cty != try f.typeToIndex(inst_ty, .complete))
+ .{ .arg_array = i }
+ else
+ .{ .arg = i };
}
fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -3469,7 +3273,7 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
(!ptr_info.@"volatile" and f.liveness.isUnused(inst)))
{
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -3491,7 +3295,7 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(", (const char *)");
try f.writeCValue(writer, operand, .Other);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, src_ty);
+ try f.renderType(writer, src_ty);
try writer.writeAll("))");
} else if (ptr_info.host_size != 0) {
var host_pl = Type.Payload.Bits{
@@ -3520,11 +3324,11 @@ fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, src_ty);
+ try f.renderType(writer, src_ty);
try writer.writeAll(")zig_wrap_");
try f.object.dg.renderTypeForBuiltinFnName(writer, field_ty);
try writer.writeAll("((");
- try f.renderTypecast(writer, field_ty);
+ try f.renderType(writer, field_ty);
try writer.writeByte(')');
const cant_cast = host_ty.isInt() and host_ty.bitSize(target) > 64;
if (cant_cast) {
@@ -3554,20 +3358,24 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
const writer = f.object.writer();
const target = f.object.dg.module.getTarget();
+ const op_inst = Air.refToIndex(un_op);
const op_ty = f.air.typeOf(un_op);
const ret_ty = if (is_ptr) op_ty.childType() else op_ty;
var lowered_ret_buf: LowerFnRetTyBuffer = undefined;
const lowered_ret_ty = lowerFnRetTy(ret_ty, &lowered_ret_buf, target);
- if (lowered_ret_ty.hasRuntimeBitsIgnoreComptime()) {
- var deref = is_ptr;
+ if (op_inst != null and f.air.instructions.items(.tag)[op_inst.?] == .call_always_tail) {
+ try reap(f, inst, &.{un_op});
+ _ = try airCall(f, op_inst.?, .always_tail);
+ } else if (lowered_ret_ty.hasRuntimeBitsIgnoreComptime()) {
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
+ var deref = is_ptr;
const is_array = lowersToArray(ret_ty, target);
const ret_val = if (is_array) ret_val: {
const array_local = try f.allocLocal(inst, try lowered_ret_ty.copy(f.arena.allocator()));
try writer.writeAll("memcpy(");
- try f.writeCValueMember(writer, array_local, .{ .field = 0 });
+ try f.writeCValueMember(writer, array_local, .{ .identifier = "array" });
try writer.writeAll(", ");
if (deref)
try f.writeCValueDeref(writer, operand)
@@ -3575,7 +3383,7 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
try f.writeCValue(writer, operand, .FunctionArgument);
deref = false;
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, ret_ty);
+ try f.renderType(writer, ret_ty);
try writer.writeAll("));\n");
break :ret_val array_local;
} else operand;
@@ -3587,16 +3395,15 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
try f.writeCValue(writer, ret_val, .Other);
try writer.writeAll(";\n");
if (is_array) {
- try freeLocal(f, inst, ret_val.local, 0);
+ try freeLocal(f, inst, ret_val.new_local, 0);
}
} else {
try reap(f, inst, &.{un_op});
- if (f.object.dg.decl.ty.fnCallingConvention() != .Naked) {
- // Not even allowed to return void in a naked function.
+ // Not even allowed to return void in a naked function.
+ if (if (f.object.dg.decl) |decl| decl.ty.fnCallingConvention() != .Naked else true)
try writer.writeAll("return;\n");
- }
}
- return CValue.none;
+ return .none;
}
fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -3604,7 +3411,7 @@ fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -3625,7 +3432,7 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -3644,15 +3451,17 @@ fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = ");
+ if (dest_c_bits < 64) {
+ try writer.writeByte('(');
+ try f.renderType(writer, inst_ty);
+ try writer.writeByte(')');
+ }
+
const needs_lo = operand_int_info.bits > 64 and dest_bits <= 64;
if (needs_lo) {
try writer.writeAll("zig_lo_");
try f.object.dg.renderTypeForBuiltinFnName(writer, operand_ty);
try writer.writeByte('(');
- } else if (dest_c_bits <= 64) {
- try writer.writeByte('(');
- try f.renderTypecast(writer, inst_ty);
- try writer.writeByte(')');
}
if (dest_bits >= 8 and std.math.isPowerOfTwo(dest_bits)) {
@@ -3712,7 +3521,7 @@ fn airBoolToInt(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
@@ -3732,10 +3541,10 @@ fn storeUndefined(f: *Function, lhs_child_ty: Type, dest_ptr: CValue) !CValue {
try writer.writeAll("memset(");
try f.writeCValue(writer, dest_ptr, .FunctionArgument);
try writer.print(", {x}, sizeof(", .{try f.fmtIntLiteral(Type.u8, Value.undef)});
- try f.renderTypecast(writer, lhs_child_ty);
+ try f.renderType(writer, lhs_child_ty);
try writer.writeAll("));\n");
}
- return CValue.none;
+ return .none;
}
fn airStore(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -3744,7 +3553,7 @@ fn airStore(f: *Function, inst: Air.Inst.Index) !CValue {
const ptr_info = f.air.typeOf(bin_op.lhs).ptrInfo().data;
if (!ptr_info.pointee_type.hasRuntimeBitsIgnoreComptime()) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const ptr_val = try f.resolveInst(bin_op.lhs);
@@ -3793,10 +3602,10 @@ fn airStore(f: *Function, inst: Air.Inst.Index) !CValue {
if (!is_array) try writer.writeByte('&');
try f.writeCValue(writer, array_src, .FunctionArgument);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, src_ty);
+ try f.renderType(writer, src_ty);
try writer.writeAll("))");
if (src_val == .constant) {
- try freeLocal(f, inst, array_src.local, 0);
+ try freeLocal(f, inst, array_src.new_local, 0);
}
} else if (ptr_info.host_size != 0) {
const host_bits = ptr_info.host_size * 8;
@@ -3847,18 +3656,18 @@ fn airStore(f: *Function, inst: Air.Inst.Index) !CValue {
const cant_cast = host_ty.isInt() and host_ty.bitSize(target) > 64;
if (cant_cast) {
if (src_ty.bitSize(target) > 64) return f.fail("TODO: C backend: implement casting between types > 64 bits", .{});
- try writer.writeAll("zig_as_");
+ try writer.writeAll("zig_make_");
try f.object.dg.renderTypeForBuiltinFnName(writer, host_ty);
try writer.writeAll("(0, ");
} else {
try writer.writeByte('(');
- try f.renderTypecast(writer, host_ty);
+ try f.renderType(writer, host_ty);
try writer.writeByte(')');
}
if (src_ty.isPtrAtRuntime()) {
try writer.writeByte('(');
- try f.renderTypecast(writer, Type.usize);
+ try f.renderType(writer, Type.usize);
try writer.writeByte(')');
}
try f.writeCValue(writer, src_val, .Other);
@@ -3870,7 +3679,7 @@ fn airStore(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, src_val, .Other);
}
try writer.writeAll(";\n");
- return CValue.none;
+ return .none;
}
fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info: BuiltinInfo) !CValue {
@@ -3879,7 +3688,7 @@ fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info:
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const lhs = try f.resolveInst(bin_op.lhs);
@@ -3935,7 +3744,7 @@ fn airNot(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const op = try f.resolveInst(ty_op.operand);
@@ -3970,7 +3779,7 @@ fn airBinOp(
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
const inst_ty = f.air.typeOfIndex(inst);
@@ -3993,7 +3802,7 @@ fn airCmpOp(f: *Function, inst: Air.Inst.Index, operator: []const u8, operation:
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const operand_ty = f.air.typeOf(bin_op.lhs);
@@ -4033,7 +3842,7 @@ fn airEquality(
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const operand_ty = f.air.typeOf(bin_op.lhs);
@@ -4089,7 +3898,7 @@ fn airCmpLtErrorsLen(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -4110,7 +3919,7 @@ fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue {
const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const lhs = try f.resolveInst(bin_op.lhs);
@@ -4118,32 +3927,31 @@ fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
const inst_ty = f.air.typeOfIndex(inst);
- const elem_ty = switch (inst_ty.ptrSize()) {
- .One => blk: {
- const array_ty = inst_ty.childType();
- break :blk array_ty.childType();
- },
- else => inst_ty.childType(),
- };
+ const elem_ty = inst_ty.elemType2();
- // We must convert to and from integer types to prevent UB if the operation
- // results in a NULL pointer, or if LHS is NULL. The operation is only UB
- // if the result is NULL and then dereferenced.
const local = try f.allocLocal(inst, inst_ty);
const writer = f.object.writer();
try f.writeCValue(writer, local, .Other);
- try writer.writeAll(" = (");
- try f.renderTypecast(writer, inst_ty);
- try writer.writeAll(")(((uintptr_t)");
- try f.writeCValue(writer, lhs, .Other);
- try writer.writeAll(") ");
- try writer.writeByte(operator);
- try writer.writeAll(" (");
- try f.writeCValue(writer, rhs, .Other);
- try writer.writeAll("*sizeof(");
- try f.renderTypecast(writer, elem_ty);
- try writer.writeAll(")));\n");
+ try writer.writeAll(" = ");
+
+ if (elem_ty.hasRuntimeBitsIgnoreComptime()) {
+ // We must convert to and from integer types to prevent UB if the operation
+ // results in a NULL pointer, or if LHS is NULL. The operation is only UB
+ // if the result is NULL and then dereferenced.
+ try writer.writeByte('(');
+ try f.renderType(writer, inst_ty);
+ try writer.writeAll(")(((uintptr_t)");
+ try f.writeCValue(writer, lhs, .Other);
+ try writer.writeAll(") ");
+ try writer.writeByte(operator);
+ try writer.writeAll(" (");
+ try f.writeCValue(writer, rhs, .Other);
+ try writer.writeAll("*sizeof(");
+ try f.renderType(writer, elem_ty);
+ try writer.writeAll(")))");
+ } else try f.writeCValue(writer, lhs, .Initializer);
+ try writer.writeAll(";\n");
return local;
}
@@ -4152,7 +3960,7 @@ fn airMinMax(f: *Function, inst: Air.Inst.Index, operator: u8, operation: []cons
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -4191,7 +3999,7 @@ fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const ptr = try f.resolveInst(bin_op.lhs);
@@ -4204,7 +4012,7 @@ fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(".ptr = (");
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
- try f.renderTypecast(writer, inst_ty.slicePtrFieldType(&buf));
+ try f.renderType(writer, inst_ty.slicePtrFieldType(&buf));
try writer.writeByte(')');
try f.writeCValue(writer, ptr, .Other);
try writer.writeAll("; ");
@@ -4222,24 +4030,41 @@ fn airCall(
modifier: std.builtin.CallModifier,
) !CValue {
// Not even allowed to call panic in a naked function.
- if (f.object.dg.decl.ty.fnCallingConvention() == .Naked) return .none;
+ if (f.object.dg.decl) |decl| if (decl.ty.fnCallingConvention() == .Naked) return .none;
+
const gpa = f.object.dg.gpa;
+ const module = f.object.dg.module;
+ const target = module.getTarget();
+ const writer = f.object.writer();
- switch (modifier) {
- .auto => {},
- .always_tail => return f.fail("TODO: C backend: call with always_tail attribute", .{}),
- .never_tail => return f.fail("TODO: C backend: call with never_tail attribute", .{}),
- .never_inline => return f.fail("TODO: C backend: call with never_inline attribute", .{}),
- else => unreachable,
- }
const pl_op = f.air.instructions.items(.data)[inst].pl_op;
const extra = f.air.extraData(Air.Call, pl_op.payload);
const args = @ptrCast([]const Air.Inst.Ref, f.air.extra[extra.end..][0..extra.data.args_len]);
const resolved_args = try gpa.alloc(CValue, args.len);
defer gpa.free(resolved_args);
- for (args, 0..) |arg, i| {
- resolved_args[i] = try f.resolveInst(arg);
+ for (resolved_args, args) |*resolved_arg, arg| {
+ const arg_ty = f.air.typeOf(arg);
+ const arg_cty = try f.typeToIndex(arg_ty, .parameter);
+ if (f.indexToCType(arg_cty).tag() == .void) {
+ resolved_arg.* = .none;
+ continue;
+ }
+ resolved_arg.* = try f.resolveInst(arg);
+ if (arg_cty != try f.typeToIndex(arg_ty, .complete)) {
+ var lowered_arg_buf: LowerFnRetTyBuffer = undefined;
+ const lowered_arg_ty = lowerFnRetTy(arg_ty, &lowered_arg_buf, target);
+
+ const array_local = try f.allocLocal(inst, try lowered_arg_ty.copy(f.arena.allocator()));
+ try writer.writeAll("memcpy(");
+ try f.writeCValueMember(writer, array_local, .{ .identifier = "array" });
+ try writer.writeAll(", ");
+ try f.writeCValue(writer, resolved_arg.*, .FunctionArgument);
+ try writer.writeAll(", sizeof(");
+ try f.renderType(writer, lowered_arg_ty);
+ try writer.writeAll("));\n");
+ resolved_arg.* = array_local;
+ }
}
const callee = try f.resolveInst(pl_op.operand);
@@ -4256,18 +4081,19 @@ fn airCall(
.Pointer => callee_ty.childType(),
else => unreachable,
};
- const writer = f.object.writer();
- const target = f.object.dg.module.getTarget();
const ret_ty = fn_ty.fnReturnType();
var lowered_ret_buf: LowerFnRetTyBuffer = undefined;
const lowered_ret_ty = lowerFnRetTy(ret_ty, &lowered_ret_buf, target);
- const result_local: CValue = if (!lowered_ret_ty.hasRuntimeBitsIgnoreComptime())
+ const result_local = if (modifier == .always_tail) r: {
+ try writer.writeAll("zig_always_tail return ");
+ break :r .none;
+ } else if (!lowered_ret_ty.hasRuntimeBitsIgnoreComptime())
.none
else if (f.liveness.isUnused(inst)) r: {
try writer.writeByte('(');
- try f.renderTypecast(writer, Type.void);
+ try f.renderType(writer, Type.void);
try writer.writeByte(')');
break :r .none;
} else r: {
@@ -4277,48 +4103,44 @@ fn airCall(
break :r local;
};
- var is_extern = false;
- var name: [*:0]const u8 = "";
callee: {
known: {
const fn_decl = fn_decl: {
const callee_val = f.air.value(pl_op.operand) orelse break :known;
break :fn_decl switch (callee_val.tag()) {
- .extern_fn => blk: {
- is_extern = true;
- break :blk callee_val.castTag(.extern_fn).?.data.owner_decl;
- },
+ .extern_fn => callee_val.castTag(.extern_fn).?.data.owner_decl,
.function => callee_val.castTag(.function).?.data.owner_decl,
.decl_ref => callee_val.castTag(.decl_ref).?.data,
else => break :known,
};
};
- name = f.object.dg.module.declPtr(fn_decl).name;
- try f.object.dg.renderDeclName(writer, fn_decl, 0);
+ switch (modifier) {
+ .auto, .always_tail => try f.object.dg.renderDeclName(writer, fn_decl, 0),
+ inline .never_tail, .never_inline => |mod| try writer.writeAll(try f.getLazyFnName(
+ @unionInit(LazyFnKey, @tagName(mod), fn_decl),
+ @unionInit(LazyFnValue.Data, @tagName(mod), {}),
+ )),
+ else => unreachable,
+ }
break :callee;
}
+ switch (modifier) {
+ .auto, .always_tail => {},
+ .never_tail => return f.fail("CBE: runtime callee with never_tail attribute unsupported", .{}),
+ .never_inline => return f.fail("CBE: runtime callee with never_inline attribute unsupported", .{}),
+ else => unreachable,
+ }
// Fall back to function pointer call.
try f.writeCValue(writer, callee, .Other);
}
try writer.writeByte('(');
var args_written: usize = 0;
- for (args, 0..) |arg, arg_i| {
- const ty = f.air.typeOf(arg);
- if (!ty.hasRuntimeBitsIgnoreComptime()) continue;
- if (args_written != 0) {
- try writer.writeAll(", ");
- }
- if ((is_extern or std.mem.eql(u8, std.mem.span(name), "main")) and
- ty.isCPtr() and ty.childType().tag() == .u8)
- {
- // Corresponds with hack in renderType .Pointer case.
- try writer.writeAll("(char");
- if (ty.isConstPtr()) try writer.writeAll(" const");
- if (ty.isVolatilePtr()) try writer.writeAll(" volatile");
- try writer.writeAll(" *)");
- }
- try f.writeCValue(writer, resolved_args[arg_i], .FunctionArgument);
+ for (resolved_args) |resolved_arg| {
+ if (resolved_arg == .none) continue;
+ if (args_written != 0) try writer.writeAll(", ");
+ try f.writeCValue(writer, resolved_arg, .FunctionArgument);
+ if (resolved_arg == .new_local) try freeLocal(f, inst, resolved_arg.new_local, 0);
args_written += 1;
}
try writer.writeAll(");\n");
@@ -4331,11 +4153,11 @@ fn airCall(
try writer.writeAll("memcpy(");
try f.writeCValue(writer, array_local, .FunctionArgument);
try writer.writeAll(", ");
- try f.writeCValueMember(writer, result_local, .{ .field = 0 });
+ try f.writeCValueMember(writer, result_local, .{ .identifier = "array" });
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, ret_ty);
+ try f.renderType(writer, ret_ty);
try writer.writeAll("));\n");
- try freeLocal(f, inst, result_local.local, 0);
+ try freeLocal(f, inst, result_local.new_local, 0);
break :r array_local;
};
@@ -4354,7 +4176,7 @@ fn airDbgStmt(f: *Function, inst: Air.Inst.Index) !CValue {
// Perhaps an additional compilation option is in order?
//try writer.print("#line {d}\n", .{dbg_stmt.line + 1});
try writer.print("/* file:{d}:{d} */\n", .{ dbg_stmt.line + 1, dbg_stmt.column + 1 });
- return CValue.none;
+ return .none;
}
fn airDbgInline(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4363,7 +4185,7 @@ fn airDbgInline(f: *Function, inst: Air.Inst.Index) !CValue {
const function = f.air.values[ty_pl.payload].castTag(.function).?.data;
const mod = f.object.dg.module;
try writer.print("/* dbg func:{s} */\n", .{mod.declPtr(function.owner_decl).name});
- return CValue.none;
+ return .none;
}
fn airDbgVar(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4375,7 +4197,7 @@ fn airDbgVar(f: *Function, inst: Air.Inst.Index) !CValue {
try reap(f, inst, &.{pl_op.operand});
const writer = f.object.writer();
try writer.print("/* var:{s} */\n", .{name});
- return CValue.none;
+ return .none;
}
fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4391,7 +4213,7 @@ fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue {
const result = if (inst_ty.tag() != .void and !f.liveness.isUnused(inst))
try f.allocLocal(inst, inst_ty)
else
- CValue{ .none = {} };
+ .none;
try f.blocks.putNoClobber(f.object.dg.gpa, inst, .{
.block_id = block_id,
@@ -4400,8 +4222,9 @@ fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue {
try genBodyInner(f, body);
try f.object.indent_writer.insertNewline();
+ // label might be unused, add a dummy goto
// label must be followed by an expression, add an empty one.
- try writer.print("zig_block_{d}:;\n", .{block_id});
+ try writer.print("goto zig_block_{d};\nzig_block_{d}: (void)0;\n", .{ block_id, block_id });
return result;
}
@@ -4460,7 +4283,7 @@ fn lowerTry(
if (!payload_has_bits) {
if (!operand_is_ptr) {
- return CValue.none;
+ return .none;
} else {
return err_union;
}
@@ -4469,7 +4292,7 @@ fn lowerTry(
try reap(f, inst, &.{operand});
if (f.liveness.isUnused(inst)) {
- return CValue.none;
+ return .none;
}
const target = f.object.dg.module.getTarget();
@@ -4481,7 +4304,7 @@ fn lowerTry(
try writer.writeAll(", ");
try f.writeCValueMember(writer, err_union, .{ .identifier = "payload" });
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, payload_ty);
+ try f.renderType(writer, payload_ty);
try writer.writeAll("));\n");
} else {
try f.writeCValue(writer, local, .Other);
@@ -4514,7 +4337,7 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(", ");
try f.writeCValue(writer, operand, .FunctionArgument);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, operand_ty);
+ try f.renderType(writer, operand_ty);
try writer.writeAll("))");
} else {
try f.writeCValue(writer, result, .Other);
@@ -4525,7 +4348,7 @@ fn airBr(f: *Function, inst: Air.Inst.Index) !CValue {
}
try writer.print("goto zig_block_{d};\n", .{block.block_id});
- return CValue.none;
+ return .none;
}
fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4535,7 +4358,7 @@ fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
// https://github.com/ziglang/zig/issues/13410
if (f.liveness.isUnused(inst) or !dest_ty.hasRuntimeBits()) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -4563,7 +4386,7 @@ fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
if (dest_ty.isPtrAtRuntime() and operand_ty.isPtrAtRuntime()) {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, dest_ty);
+ try f.renderType(writer, dest_ty);
try writer.writeByte(')');
try f.writeCValue(writer, operand, .Other);
try writer.writeAll(";\n");
@@ -4584,7 +4407,7 @@ fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(", &");
try f.writeCValue(writer, operand_lval, .Other);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, dest_ty);
+ try f.renderType(writer, dest_ty);
try writer.writeAll("));\n");
// Ensure padding bits have the expected value.
@@ -4599,7 +4422,7 @@ fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
}
if (operand == .constant) {
- try freeLocal(f, inst, operand_lval.local, 0);
+ try freeLocal(f, inst, operand_lval.new_local, 0);
}
return local;
@@ -4607,27 +4430,27 @@ fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
fn airBreakpoint(writer: anytype) !CValue {
try writer.writeAll("zig_breakpoint();\n");
- return CValue.none;
+ return .none;
}
fn airRetAddr(f: *Function, inst: Air.Inst.Index) !CValue {
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
const writer = f.object.writer();
const local = try f.allocLocal(inst, Type.usize);
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, Type.usize);
+ try f.renderType(writer, Type.usize);
try writer.writeAll(")zig_return_address();\n");
return local;
}
fn airFrameAddress(f: *Function, inst: Air.Inst.Index) !CValue {
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
const writer = f.object.writer();
const local = try f.allocLocal(inst, Type.usize);
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, Type.usize);
+ try f.renderType(writer, Type.usize);
try writer.writeAll(")zig_frame_address();\n");
return local;
}
@@ -4640,15 +4463,15 @@ fn airFence(f: *Function, inst: Air.Inst.Index) !CValue {
try writeMemoryOrder(writer, atomic_order);
try writer.writeAll(");\n");
- return CValue.none;
+ return .none;
}
fn airUnreach(f: *Function) !CValue {
// Not even allowed to call unreachable in a naked function.
- if (f.object.dg.decl.ty.fnCallingConvention() == .Naked) return .none;
+ if (f.object.dg.decl) |decl| if (decl.ty.fnCallingConvention() == .Naked) return .none;
try f.object.writer().writeAll("zig_unreachable();\n");
- return CValue.none;
+ return .none;
}
fn airLoop(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4669,18 +4492,18 @@ fn airLoop(f: *Function, inst: Air.Inst.Index) !CValue {
const new_free_locals = f.getFreeLocals();
var it = new_free_locals.iterator();
while (it.next()) |entry| {
- const gop = try old_free_locals.getOrPutContext(gpa, entry.key_ptr.*, f.tyHashCtx());
+ const gop = try old_free_locals.getOrPut(gpa, entry.key_ptr.*);
if (gop.found_existing) {
- try gop.value_ptr.appendSlice(gpa, entry.value_ptr.items);
- } else {
- gop.value_ptr.* = entry.value_ptr.*;
- entry.value_ptr.* = .{};
- }
+ try gop.value_ptr.ensureUnusedCapacity(gpa, entry.value_ptr.count());
+ for (entry.value_ptr.keys()) |local_index| {
+ gop.value_ptr.putAssumeCapacityNoClobber(local_index, {});
+ }
+ } else gop.value_ptr.* = entry.value_ptr.move();
}
deinitFreeLocalsMap(gpa, new_free_locals);
new_free_locals.* = old_free_locals.move();
- return CValue.none;
+ return .none;
}
fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4705,6 +4528,10 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
// that we can notice and use them in the else branch. Any new locals must
// necessarily be free already after the then branch is complete.
const pre_locals_len = @intCast(LocalIndex, f.locals.items.len);
+ // Remember how many allocs there were before entering the then branch so
+ // that we can notice and make sure not to use them in the else branch.
+ // Any new allocs must be removed from the free list.
+ const pre_allocs_len = @intCast(LocalIndex, f.allocs.count());
const pre_clone_depth = f.free_locals_clone_depth;
f.free_locals_clone_depth = @intCast(LoopDepth, f.free_locals_stack.items.len);
@@ -4735,7 +4562,7 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
try die(f, inst, Air.indexToRef(operand));
}
- try noticeBranchFrees(f, pre_locals_len, inst);
+ try noticeBranchFrees(f, pre_locals_len, pre_allocs_len, inst);
if (needs_else) {
try genBody(f, else_body);
@@ -4745,7 +4572,7 @@ fn airCondBr(f: *Function, inst: Air.Inst.Index) !CValue {
try f.object.indent_writer.insertNewline();
- return CValue.none;
+ return .none;
}
fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -4759,11 +4586,11 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll("switch (");
if (condition_ty.zigTypeTag() == .Bool) {
try writer.writeByte('(');
- try f.renderTypecast(writer, Type.u1);
+ try f.renderType(writer, Type.u1);
try writer.writeByte(')');
} else if (condition_ty.isPtrAtRuntime()) {
try writer.writeByte('(');
- try f.renderTypecast(writer, Type.usize);
+ try f.renderType(writer, Type.usize);
try writer.writeByte(')');
}
try f.writeCValue(writer, condition, .Other);
@@ -4780,8 +4607,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
const last_case_i = switch_br.data.cases_len - @boolToInt(switch_br.data.else_body_len == 0);
var extra_index: usize = switch_br.end;
- var case_i: u32 = 0;
- while (case_i < switch_br.data.cases_len) : (case_i += 1) {
+ for (0..switch_br.data.cases_len) |case_i| {
const case = f.air.extraData(Air.SwitchBr.Case, extra_index);
const items = @ptrCast([]const Air.Inst.Ref, f.air.extra[case.end..][0..case.data.items_len]);
const case_body = f.air.extra[case.end + items.len ..][0..case.data.body_len];
@@ -4792,7 +4618,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll("case ");
if (condition_ty.isPtrAtRuntime()) {
try writer.writeByte('(');
- try f.renderTypecast(writer, Type.usize);
+ try f.renderType(writer, Type.usize);
try writer.writeByte(')');
}
try f.object.dg.renderValue(writer, condition_ty, f.air.value(item).?, .Other);
@@ -4811,6 +4637,10 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
// we can notice and use them in subsequent branches. Any new locals must
// necessarily be free already after the previous branch is complete.
const pre_locals_len = @intCast(LocalIndex, f.locals.items.len);
+ // Remember how many allocs there were before entering each branch so that
+ // we can notice and make sure not to use them in subsequent branches.
+ // Any new allocs must be removed from the free list.
+ const pre_allocs_len = @intCast(LocalIndex, f.allocs.count());
const pre_clone_depth = f.free_locals_clone_depth;
f.free_locals_clone_depth = @intCast(LoopDepth, f.free_locals_stack.items.len);
@@ -4831,7 +4661,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
try genBody(f, case_body);
}
- try noticeBranchFrees(f, pre_locals_len, inst);
+ try noticeBranchFrees(f, pre_locals_len, pre_allocs_len, inst);
} else {
for (liveness.deaths[case_i]) |operand| {
try die(f, inst, Air.indexToRef(operand));
@@ -4858,7 +4688,7 @@ fn airSwitchBr(f: *Function, inst: Air.Inst.Index) !CValue {
f.object.indent_writer.popIndent();
try writer.writeAll("}\n");
- return CValue.none;
+ return .none;
}
fn asmInputNeedsLocal(constraint: []const u8, value: CValue) bool {
@@ -4880,8 +4710,8 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const inputs = @ptrCast([]const Air.Inst.Ref, f.air.extra[extra_i..][0..extra.data.inputs_len]);
extra_i += inputs.len;
- const result: CValue = r: {
- if (!is_volatile and f.liveness.isUnused(inst)) break :r CValue.none;
+ const result = r: {
+ if (!is_volatile and f.liveness.isUnused(inst)) break :r .none;
const writer = f.object.writer();
const inst_ty = f.air.typeOfIndex(inst);
@@ -4918,14 +4748,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll("register ");
const alignment = 0;
const local_value = try f.allocLocalValue(output_ty, alignment);
- try f.object.dg.renderTypeAndName(
- writer,
- output_ty,
- local_value,
- .Mut,
- alignment,
- .Complete,
- );
+ try f.object.dg.renderTypeAndName(writer, output_ty, local_value, .{}, alignment, .complete);
try writer.writeAll(" __asm(\"");
try writer.writeAll(constraint["={".len .. constraint.len - "}".len]);
try writer.writeAll("\")");
@@ -4957,14 +4780,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
if (is_reg) try writer.writeAll("register ");
const alignment = 0;
const local_value = try f.allocLocalValue(input_ty, alignment);
- try f.object.dg.renderTypeAndName(
- writer,
- input_ty,
- local_value,
- .Const,
- alignment,
- .Complete,
- );
+ try f.object.dg.renderTypeAndName(writer, input_ty, local_value, Const, alignment, .complete);
if (is_reg) {
try writer.writeAll(" __asm(\"");
try writer.writeAll(constraint["{".len .. constraint.len - "}".len]);
@@ -4975,14 +4791,11 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
}
}
- {
- var clobber_i: u32 = 0;
- while (clobber_i < clobbers_len) : (clobber_i += 1) {
- const clobber = std.mem.sliceTo(std.mem.sliceAsBytes(f.air.extra[extra_i..]), 0);
- // This equation accounts for the fact that even if we have exactly 4 bytes
- // for the string, we still use the next u32 for the null terminator.
- extra_i += clobber.len / 4 + 1;
- }
+ for (0..clobbers_len) |_| {
+ const clobber = std.mem.sliceTo(std.mem.sliceAsBytes(f.air.extra[extra_i..]), 0);
+ // This equation accounts for the fact that even if we have exactly 4 bytes
+ // for the string, we still use the next u32 for the null terminator.
+ extra_i += clobber.len / 4 + 1;
}
{
@@ -5037,7 +4850,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll("__asm");
if (is_volatile) try writer.writeAll(" volatile");
- try writer.print("({s}", .{fmtStringLiteral(fixed_asm_source[0..dst_i])});
+ try writer.print("({s}", .{fmtStringLiteral(fixed_asm_source[0..dst_i], null)});
}
extra_i = constraints_extra_begin;
@@ -5055,7 +4868,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeByte(' ');
if (!std.mem.eql(u8, name, "_")) try writer.print("[{s}]", .{name});
const is_reg = constraint[1] == '{';
- try writer.print("{s}(", .{fmtStringLiteral(if (is_reg) "=r" else constraint)});
+ try writer.print("{s}(", .{fmtStringLiteral(if (is_reg) "=r" else constraint, null)});
if (is_reg) {
try f.writeCValue(writer, .{ .local = locals_index }, .Other);
locals_index += 1;
@@ -5081,28 +4894,25 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const is_reg = constraint[0] == '{';
const input_val = try f.resolveInst(input);
- try writer.print("{s}(", .{fmtStringLiteral(if (is_reg) "r" else constraint)});
+ try writer.print("{s}(", .{fmtStringLiteral(if (is_reg) "r" else constraint, null)});
try f.writeCValue(writer, if (asmInputNeedsLocal(constraint, input_val)) local: {
- const input_local = CValue{ .local = locals_index };
+ const input_local = .{ .local = locals_index };
locals_index += 1;
break :local input_local;
} else input_val, .Other);
try writer.writeByte(')');
}
try writer.writeByte(':');
- {
- var clobber_i: u32 = 0;
- while (clobber_i < clobbers_len) : (clobber_i += 1) {
- const clobber = std.mem.sliceTo(std.mem.sliceAsBytes(f.air.extra[extra_i..]), 0);
- // This equation accounts for the fact that even if we have exactly 4 bytes
- // for the string, we still use the next u32 for the null terminator.
- extra_i += clobber.len / 4 + 1;
+ for (0..clobbers_len) |clobber_i| {
+ const clobber = std.mem.sliceTo(std.mem.sliceAsBytes(f.air.extra[extra_i..]), 0);
+ // This equation accounts for the fact that even if we have exactly 4 bytes
+ // for the string, we still use the next u32 for the null terminator.
+ extra_i += clobber.len / 4 + 1;
- if (clobber.len == 0) continue;
+ if (clobber.len == 0) continue;
- if (clobber_i > 0) try writer.writeByte(',');
- try writer.print(" {s}", .{fmtStringLiteral(clobber)});
- }
+ if (clobber_i > 0) try writer.writeByte(',');
+ try writer.print(" {s}", .{fmtStringLiteral(clobber, null)});
}
try writer.writeAll(");\n");
@@ -5119,7 +4929,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const is_reg = constraint[1] == '{';
if (is_reg) {
try f.writeCValueDeref(writer, if (output == .none)
- CValue{ .local_ref = local.local }
+ .{ .local_ref = local.new_local }
else
try f.resolveInst(output));
try writer.writeAll(" = ");
@@ -5154,7 +4964,7 @@ fn airIsNull(
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const writer = f.object.writer();
@@ -5204,7 +5014,7 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -5215,7 +5025,7 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
const payload_ty = opt_ty.optionalChild(&buf);
if (!payload_ty.hasRuntimeBitsIgnoreComptime()) {
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -5244,7 +5054,7 @@ fn airOptionalPayload(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValueMember(writer, operand, .{ .identifier = "payload" });
if (is_array) {
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeAll("))");
}
try writer.writeAll(";\n");
@@ -5256,7 +5066,7 @@ fn airOptionalPayloadPtr(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const writer = f.object.writer();
@@ -5267,7 +5077,7 @@ fn airOptionalPayloadPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const inst_ty = f.air.typeOfIndex(inst);
if (!inst_ty.childType().hasRuntimeBitsIgnoreComptime()) {
- return CValue{ .undef = inst_ty };
+ return .{ .undef = inst_ty };
}
const local = try f.allocLocal(inst, inst_ty);
@@ -5299,7 +5109,7 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
if (opt_ty.optionalReprIsPayload()) {
if (f.liveness.isUnused(inst)) {
- return CValue.none;
+ return .none;
}
const local = try f.allocLocal(inst, inst_ty);
// The payload and the optional are the same value.
@@ -5316,7 +5126,7 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
if (f.liveness.isUnused(inst)) {
- return CValue.none;
+ return .none;
}
const local = try f.allocLocal(inst, inst_ty);
@@ -5328,6 +5138,62 @@ fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
}
}
+fn fieldLocation(
+ container_ty: Type,
+ field_ptr_ty: Type,
+ field_index: u32,
+ target: std.Target,
+) union(enum) {
+ begin: void,
+ field: CValue,
+ byte_offset: u32,
+ end: void,
+} {
+ return switch (container_ty.zigTypeTag()) {
+ .Struct => switch (container_ty.containerLayout()) {
+ .Auto, .Extern => for (field_index..container_ty.structFieldCount()) |next_field_index| {
+ if (container_ty.structFieldIsComptime(next_field_index)) continue;
+ const field_ty = container_ty.structFieldType(next_field_index);
+ if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+ break .{ .field = if (container_ty.isSimpleTuple())
+ .{ .field = next_field_index }
+ else
+ .{ .identifier = container_ty.structFieldName(next_field_index) } };
+ } else if (container_ty.hasRuntimeBitsIgnoreComptime()) .end else .begin,
+ .Packed => if (field_ptr_ty.ptrInfo().data.host_size == 0)
+ .{ .byte_offset = container_ty.packedStructFieldByteOffset(field_index, target) }
+ else
+ .begin,
+ },
+ .Union => switch (container_ty.containerLayout()) {
+ .Auto, .Extern => {
+ const field_ty = container_ty.structFieldType(field_index);
+ if (!field_ty.hasRuntimeBitsIgnoreComptime())
+ return if (container_ty.unionTagTypeSafety() != null and
+ !container_ty.unionHasAllZeroBitFieldTypes())
+ .{ .field = .{ .identifier = "payload" } }
+ else
+ .begin;
+ const field_name = container_ty.unionFields().keys()[field_index];
+ return .{ .field = if (container_ty.unionTagTypeSafety()) |_|
+ .{ .payload_identifier = field_name }
+ else
+ .{ .identifier = field_name } };
+ },
+ .Packed => .begin,
+ },
+ .Pointer => switch (container_ty.ptrSize()) {
+ .Slice => switch (field_index) {
+ 0 => .{ .field = .{ .identifier = "ptr" } },
+ 1 => .{ .field = .{ .identifier = "len" } },
+ else => unreachable,
+ },
+ .One, .Many, .C => unreachable,
+ },
+ else => unreachable,
+ };
+}
+
fn airStructFieldPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_pl = f.air.instructions.items(.data)[inst].ty_pl;
const extra = f.air.extraData(Air.StructField, ty_pl.payload).data;
@@ -5337,10 +5203,10 @@ fn airStructFieldPtr(f: *Function, inst: Air.Inst.Index) !CValue {
return .none;
}
- const struct_ptr = try f.resolveInst(extra.struct_operand);
+ const container_ptr_val = try f.resolveInst(extra.struct_operand);
try reap(f, inst, &.{extra.struct_operand});
- const struct_ptr_ty = f.air.typeOf(extra.struct_operand);
- return structFieldPtr(f, inst, struct_ptr_ty, struct_ptr, extra.field_index);
+ const container_ptr_ty = f.air.typeOf(extra.struct_operand);
+ return fieldPtr(f, inst, container_ptr_ty, container_ptr_val, extra.field_index);
}
fn airStructFieldPtrIndex(f: *Function, inst: Air.Inst.Index, index: u8) !CValue {
@@ -5351,10 +5217,10 @@ fn airStructFieldPtrIndex(f: *Function, inst: Air.Inst.Index, index: u8) !CValue
return .none;
}
- const struct_ptr = try f.resolveInst(ty_op.operand);
+ const container_ptr_val = try f.resolveInst(ty_op.operand);
try reap(f, inst, &.{ty_op.operand});
- const struct_ptr_ty = f.air.typeOf(ty_op.operand);
- return structFieldPtr(f, inst, struct_ptr_ty, struct_ptr, index);
+ const container_ptr_ty = f.air.typeOf(ty_op.operand);
+ return fieldPtr(f, inst, container_ptr_ty, container_ptr_val, index);
}
fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -5363,137 +5229,119 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{extra.field_ptr});
- return CValue.none;
+ return .none;
}
- const struct_ptr_ty = f.air.typeOfIndex(inst);
+ const target = f.object.dg.module.getTarget();
+ const container_ptr_ty = f.air.typeOfIndex(inst);
+ const container_ty = container_ptr_ty.childType();
+
const field_ptr_ty = f.air.typeOf(extra.field_ptr);
const field_ptr_val = try f.resolveInst(extra.field_ptr);
try reap(f, inst, &.{extra.field_ptr});
- const target = f.object.dg.module.getTarget();
- const struct_ty = struct_ptr_ty.childType();
- const field_offset = struct_ty.structFieldOffset(extra.field_index, target);
-
- var field_offset_pl = Value.Payload.I64{
- .base = .{ .tag = .int_i64 },
- .data = -@intCast(i64, field_offset),
- };
- const field_offset_val = Value.initPayload(&field_offset_pl.base);
-
- var u8_ptr_pl = field_ptr_ty.ptrInfo();
- u8_ptr_pl.data.pointee_type = Type.u8;
- const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
-
const writer = f.object.writer();
- const local = try f.allocLocal(inst, struct_ptr_ty);
+ const local = try f.allocLocal(inst, container_ptr_ty);
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, struct_ptr_ty);
- try writer.writeAll(")&((");
- try f.renderTypecast(writer, u8_ptr_ty);
+ try f.renderType(writer, container_ptr_ty);
try writer.writeByte(')');
- try f.writeCValue(writer, field_ptr_val, .Other);
- try writer.print(")[{}];\n", .{try f.fmtIntLiteral(Type.isize, field_offset_val)});
+
+ switch (fieldLocation(container_ty, field_ptr_ty, extra.field_index, target)) {
+ .begin => try f.writeCValue(writer, field_ptr_val, .Initializer),
+ .field => |field| {
+ var u8_ptr_pl = field_ptr_ty.ptrInfo();
+ u8_ptr_pl.data.pointee_type = Type.u8;
+ const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
+
+ try writer.writeAll("((");
+ try f.renderType(writer, u8_ptr_ty);
+ try writer.writeByte(')');
+ try f.writeCValue(writer, field_ptr_val, .Other);
+ try writer.writeAll(" - offsetof(");
+ try f.renderType(writer, container_ty);
+ try writer.writeAll(", ");
+ try f.writeCValue(writer, field, .Other);
+ try writer.writeAll("))");
+ },
+ .byte_offset => |byte_offset| {
+ var u8_ptr_pl = field_ptr_ty.ptrInfo();
+ u8_ptr_pl.data.pointee_type = Type.u8;
+ const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
+
+ var byte_offset_pl = Value.Payload.U64{
+ .base = .{ .tag = .int_u64 },
+ .data = byte_offset,
+ };
+ const byte_offset_val = Value.initPayload(&byte_offset_pl.base);
+
+ try writer.writeAll("((");
+ try f.renderType(writer, u8_ptr_ty);
+ try writer.writeByte(')');
+ try f.writeCValue(writer, field_ptr_val, .Other);
+ try writer.print(" - {})", .{try f.fmtIntLiteral(Type.usize, byte_offset_val)});
+ },
+ .end => {
+ try f.writeCValue(writer, field_ptr_val, .Other);
+ try writer.print(" - {}", .{try f.fmtIntLiteral(Type.usize, Value.one)});
+ },
+ }
+
+ try writer.writeAll(";\n");
return local;
}
-fn structFieldPtr(f: *Function, inst: Air.Inst.Index, struct_ptr_ty: Type, struct_ptr: CValue, index: u32) !CValue {
- const writer = f.object.writer();
+fn fieldPtr(
+ f: *Function,
+ inst: Air.Inst.Index,
+ container_ptr_ty: Type,
+ container_ptr_val: CValue,
+ field_index: u32,
+) !CValue {
+ const target = f.object.dg.module.getTarget();
+ const container_ty = container_ptr_ty.elemType();
const field_ptr_ty = f.air.typeOfIndex(inst);
- const field_ptr_info = field_ptr_ty.ptrInfo();
- const struct_ty = struct_ptr_ty.elemType();
- const field_ty = struct_ty.structFieldType(index);
// Ensure complete type definition is visible before accessing fields.
- try f.renderType(std.io.null_writer, struct_ty);
+ _ = try f.typeToIndex(container_ty, .complete);
+ const writer = f.object.writer();
const local = try f.allocLocal(inst, field_ptr_ty);
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, field_ptr_ty);
+ try f.renderType(writer, field_ptr_ty);
try writer.writeByte(')');
- const extra_name: CValue = switch (struct_ty.tag()) {
- .union_tagged, .union_safety_tagged => .{ .identifier = "payload" },
- else => .none,
- };
-
- const FieldLoc = union(enum) {
- begin: void,
- field: CValue,
- end: void,
- };
- const field_loc = switch (struct_ty.tag()) {
- .@"struct" => switch (struct_ty.containerLayout()) {
- .Auto, .Extern => for (struct_ty.structFields().values()[index..], 0..) |field, offset| {
- if (field.ty.hasRuntimeBitsIgnoreComptime()) break FieldLoc{ .field = .{
- .identifier = struct_ty.structFieldName(index + offset),
- } };
- } else @as(FieldLoc, .end),
- .Packed => if (field_ptr_info.data.host_size == 0) {
- const target = f.object.dg.module.getTarget();
-
- const byte_offset = struct_ty.packedStructFieldByteOffset(index, target);
- var byte_offset_pl = Value.Payload.U64{
- .base = .{ .tag = .int_u64 },
- .data = byte_offset,
- };
- const byte_offset_val = Value.initPayload(&byte_offset_pl.base);
-
- var u8_ptr_pl = field_ptr_info;
- u8_ptr_pl.data.pointee_type = Type.u8;
- const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
+ switch (fieldLocation(container_ty, field_ptr_ty, field_index, target)) {
+ .begin => try f.writeCValue(writer, container_ptr_val, .Initializer),
+ .field => |field| {
+ try writer.writeByte('&');
+ try f.writeCValueDerefMember(writer, container_ptr_val, field);
+ },
+ .byte_offset => |byte_offset| {
+ var u8_ptr_pl = field_ptr_ty.ptrInfo();
+ u8_ptr_pl.data.pointee_type = Type.u8;
+ const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base);
- if (!std.mem.isAligned(byte_offset, field_ptr_ty.ptrAlignment(target))) {
- return f.fail("TODO: CBE: unaligned packed struct field pointer", .{});
- }
+ var byte_offset_pl = Value.Payload.U64{
+ .base = .{ .tag = .int_u64 },
+ .data = byte_offset,
+ };
+ const byte_offset_val = Value.initPayload(&byte_offset_pl.base);
- try writer.writeAll("&((");
- try f.renderTypecast(writer, u8_ptr_ty);
- try writer.writeByte(')');
- try f.writeCValue(writer, struct_ptr, .Other);
- try writer.print(")[{}];\n", .{try f.fmtIntLiteral(Type.usize, byte_offset_val)});
- return local;
- } else @as(FieldLoc, .begin),
- },
- .@"union", .union_safety_tagged, .union_tagged => if (struct_ty.containerLayout() == .Packed) {
- try f.writeCValue(writer, struct_ptr, .Other);
- try writer.writeAll(";\n");
- return local;
- } else if (field_ty.hasRuntimeBitsIgnoreComptime()) FieldLoc{ .field = .{
- .identifier = struct_ty.unionFields().keys()[index],
- } } else @as(FieldLoc, .end),
- .tuple, .anon_struct => field_name: {
- const tuple = struct_ty.tupleFields();
- if (tuple.values[index].tag() != .unreachable_value) return CValue.none;
-
- var id: usize = 0;
- break :field_name for (tuple.values, 0..) |value, i| {
- if (value.tag() != .unreachable_value) continue;
- if (!tuple.types[i].hasRuntimeBitsIgnoreComptime()) continue;
- if (i >= index) break FieldLoc{ .field = .{ .field = id } };
- id += 1;
- } else @as(FieldLoc, .end);
+ try writer.writeAll("((");
+ try f.renderType(writer, u8_ptr_ty);
+ try writer.writeByte(')');
+ try f.writeCValue(writer, container_ptr_val, .Other);
+ try writer.print(" + {})", .{try f.fmtIntLiteral(Type.usize, byte_offset_val)});
},
- else => unreachable,
- };
-
- try writer.writeByte('&');
- switch (field_loc) {
- .begin, .end => {
+ .end => {
try writer.writeByte('(');
- try f.writeCValue(writer, struct_ptr, .Other);
- try writer.print(")[{}]", .{
- @boolToInt(field_loc == .end and struct_ty.hasRuntimeBitsIgnoreComptime()),
- });
+ try f.writeCValue(writer, container_ptr_val, .Other);
+ try writer.print(" + {})", .{try f.fmtIntLiteral(Type.usize, Value.one)});
},
- .field => |field| if (extra_name != .none) {
- try f.writeCValueDerefMember(writer, struct_ptr, extra_name);
- try writer.writeByte('.');
- try f.writeCValue(writer, field, .Other);
- } else try f.writeCValueDerefMember(writer, struct_ptr, field),
}
+
try writer.writeAll(";\n");
return local;
}
@@ -5504,13 +5352,13 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{extra.struct_operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
if (!inst_ty.hasRuntimeBitsIgnoreComptime()) {
try reap(f, inst, &.{extra.struct_operand});
- return CValue.none;
+ return .none;
}
const target = f.object.dg.module.getTarget();
@@ -5520,16 +5368,14 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
// Ensure complete type definition is visible before accessing fields.
- try f.renderType(std.io.null_writer, struct_ty);
-
- const extra_name: CValue = switch (struct_ty.tag()) {
- .union_tagged, .union_safety_tagged => .{ .identifier = "payload" },
- else => .none,
- };
+ _ = try f.typeToIndex(struct_ty, .complete);
const field_name: CValue = switch (struct_ty.tag()) {
- .@"struct" => switch (struct_ty.containerLayout()) {
- .Auto, .Extern => .{ .identifier = struct_ty.structFieldName(extra.field_index) },
+ .tuple, .anon_struct, .@"struct" => switch (struct_ty.containerLayout()) {
+ .Auto, .Extern => if (struct_ty.isSimpleTuple())
+ .{ .field = extra.field_index }
+ else
+ .{ .identifier = struct_ty.structFieldName(extra.field_index) },
.Packed => {
const struct_obj = struct_ty.castTag(.@"struct").?.data;
const int_info = struct_ty.intInfo(target);
@@ -5564,7 +5410,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(" = zig_wrap_");
try f.object.dg.renderTypeForBuiltinFnName(writer, field_int_ty);
try writer.writeAll("((");
- try f.renderTypecast(writer, field_int_ty);
+ try f.renderType(writer, field_int_ty);
try writer.writeByte(')');
const cant_cast = int_info.bits > 64;
if (cant_cast) {
@@ -5587,13 +5433,13 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const local = try f.allocLocal(inst, inst_ty);
try writer.writeAll("memcpy(");
- try f.writeCValue(writer, .{ .local_ref = local.local }, .FunctionArgument);
+ try f.writeCValue(writer, .{ .local_ref = local.new_local }, .FunctionArgument);
try writer.writeAll(", ");
- try f.writeCValue(writer, .{ .local_ref = temp_local.local }, .FunctionArgument);
+ try f.writeCValue(writer, .{ .local_ref = temp_local.new_local }, .FunctionArgument);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeAll("));\n");
- try freeLocal(f, inst, temp_local.local, 0);
+ try freeLocal(f, inst, temp_local.new_local, 0);
return local;
},
},
@@ -5613,48 +5459,37 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(", &");
try f.writeCValue(writer, operand_lval, .FunctionArgument);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeAll("));\n");
if (struct_byval == .constant) {
- try freeLocal(f, inst, operand_lval.local, 0);
+ try freeLocal(f, inst, operand_lval.new_local, 0);
}
return local;
- } else .{
- .identifier = struct_ty.unionFields().keys()[extra.field_index],
- },
- .tuple, .anon_struct => blk: {
- const tuple = struct_ty.tupleFields();
- if (tuple.values[extra.field_index].tag() != .unreachable_value) return CValue.none;
-
- var id: usize = 0;
- for (tuple.values[0..extra.field_index]) |value|
- id += @boolToInt(value.tag() == .unreachable_value);
- break :blk .{ .field = id };
+ } else field_name: {
+ const name = struct_ty.unionFields().keys()[extra.field_index];
+ break :field_name if (struct_ty.unionTagTypeSafety()) |_|
+ .{ .payload_identifier = name }
+ else
+ .{ .identifier = name };
},
else => unreachable,
};
- const is_array = lowersToArray(inst_ty, target);
const local = try f.allocLocal(inst, inst_ty);
- if (is_array) {
+ if (lowersToArray(inst_ty, target)) {
try writer.writeAll("memcpy(");
try f.writeCValue(writer, local, .FunctionArgument);
try writer.writeAll(", ");
+ try f.writeCValueMember(writer, struct_byval, field_name);
+ try writer.writeAll(", sizeof(");
+ try f.renderType(writer, inst_ty);
+ try writer.writeAll("))");
} else {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = ");
- }
- if (extra_name != .none) {
- try f.writeCValueMember(writer, struct_byval, extra_name);
- try writer.writeByte('.');
- try f.writeCValue(writer, field_name, .Other);
- } else try f.writeCValueMember(writer, struct_byval, field_name);
- if (is_array) {
- try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, inst_ty);
- try writer.writeAll("))");
+ try f.writeCValueMember(writer, struct_byval, field_name);
}
try writer.writeAll(";\n");
return local;
@@ -5667,7 +5502,7 @@ fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -5704,7 +5539,7 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -5715,13 +5550,13 @@ fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValu
const error_union_ty = if (operand_is_ptr) operand_ty.childType() else operand_ty;
if (!error_union_ty.errorUnionPayload().hasRuntimeBits()) {
- if (!is_ptr) return CValue.none;
+ if (!is_ptr) return .none;
const w = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
try f.writeCValue(w, local, .Other);
try w.writeAll(" = (");
- try f.renderTypecast(w, inst_ty);
+ try f.renderType(w, inst_ty);
try w.writeByte(')');
try f.writeCValue(w, operand, .Initializer);
try w.writeAll(";\n");
@@ -5746,7 +5581,7 @@ fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -5782,7 +5617,7 @@ fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(", ");
try f.writeCValue(writer, payload, .FunctionArgument);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, payload_ty);
+ try f.renderType(writer, payload_ty);
try writer.writeAll("));\n");
}
return local;
@@ -5792,7 +5627,7 @@ fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const writer = f.object.writer();
@@ -5846,7 +5681,7 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
// Then return the payload pointer (only if it is used)
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
const local = try f.allocLocal(inst, f.air.typeOfIndex(inst));
try f.writeCValue(writer, local, .Other);
@@ -5857,7 +5692,7 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
}
fn airErrReturnTrace(f: *Function, inst: Air.Inst.Index) !CValue {
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
return f.fail("TODO: C backend: implement airErrReturnTrace", .{});
}
@@ -5875,7 +5710,7 @@ fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -5902,7 +5737,7 @@ fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(", ");
try f.writeCValue(writer, payload, .FunctionArgument);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, payload_ty);
+ try f.renderType(writer, payload_ty);
try writer.writeAll("));\n");
}
return local;
@@ -5913,7 +5748,7 @@ fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const writer = f.object.writer();
@@ -5951,7 +5786,7 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -5959,26 +5794,28 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
const inst_ty = f.air.typeOfIndex(inst);
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
- try f.writeCValue(writer, local, .Other);
- const array_len = f.air.typeOf(ty_op.operand).elemType().arrayLen();
+ const array_ty = f.air.typeOf(ty_op.operand).childType();
- try writer.writeAll(".ptr = ");
+ try f.writeCValueMember(writer, local, .{ .identifier = "ptr" });
+ try writer.writeAll(" = ");
+ // Unfortunately, C does not support any equivalent to
+ // &(*(void *)p)[0], although LLVM does via GetElementPtr
if (operand == .undef) {
- // Unfortunately, C does not support any equivalent to
- // &(*(void *)p)[0], although LLVM does via GetElementPtr
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
- try f.writeCValue(writer, CValue{ .undef = inst_ty.slicePtrFieldType(&buf) }, .Initializer);
- } else {
+ try f.writeCValue(writer, .{ .undef = inst_ty.slicePtrFieldType(&buf) }, .Initializer);
+ } else if (array_ty.hasRuntimeBitsIgnoreComptime()) {
try writer.writeAll("&(");
try f.writeCValueDeref(writer, operand);
try writer.print(")[{}]", .{try f.fmtIntLiteral(Type.usize, Value.zero)});
- }
+ } else try f.writeCValue(writer, operand, .Initializer);
+ try writer.writeAll("; ");
+ const array_len = array_ty.arrayLen();
var len_pl: Value.Payload.U64 = .{ .base = .{ .tag = .int_u64 }, .data = array_len };
const len_val = Value.initPayload(&len_pl.base);
- try writer.writeAll("; ");
- try f.writeCValue(writer, local, .Other);
- try writer.print(".len = {};\n", .{try f.fmtIntLiteral(Type.usize, len_val)});
+ try f.writeCValueMember(writer, local, .{ .identifier = "len" });
+ try writer.print(" = {};\n", .{try f.fmtIntLiteral(Type.usize, len_val)});
+
return local;
}
@@ -5987,7 +5824,7 @@ fn airFloatCast(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -6035,7 +5872,7 @@ fn airPtrToInt(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(un_op);
@@ -6046,7 +5883,7 @@ fn airPtrToInt(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeByte(')');
try f.writeCValue(writer, operand, .Other);
try writer.writeAll(";\n");
@@ -6063,7 +5900,7 @@ fn airUnBuiltinCall(
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -6095,7 +5932,7 @@ fn airBinBuiltinCall(
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const lhs = try f.resolveInst(bin_op.lhs);
@@ -6168,7 +6005,7 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
try writer.writeAll(";\n");
try writer.writeAll("if (");
try writer.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor});
- try f.renderTypecast(writer, ptr_ty.childType());
+ try f.renderType(writer, ptr_ty.childType());
try writer.writeByte(')');
if (ptr_ty.isVolatilePtr()) try writer.writeAll(" volatile");
try writer.writeAll(" *)");
@@ -6197,7 +6034,7 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
try writer.writeAll(";\n");
try f.writeCValue(writer, local, .Other);
try writer.print(".is_null = zig_cmpxchg_{s}((zig_atomic(", .{flavor});
- try f.renderTypecast(writer, ptr_ty.childType());
+ try f.renderType(writer, ptr_ty.childType());
try writer.writeByte(')');
if (ptr_ty.isVolatilePtr()) try writer.writeAll(" volatile");
try writer.writeAll(" *)");
@@ -6217,8 +6054,8 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
}
if (f.liveness.isUnused(inst)) {
- try freeLocal(f, inst, local.local, 0);
- return CValue.none;
+ try freeLocal(f, inst, local.new_local, 0);
+ return .none;
}
return local;
@@ -6240,12 +6077,12 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue {
switch (extra.op()) {
else => {
try writer.writeAll("zig_atomic(");
- try f.renderTypecast(writer, ptr_ty.elemType());
+ try f.renderType(writer, ptr_ty.elemType());
try writer.writeByte(')');
},
.Nand, .Min, .Max => {
// These are missing from stdatomic.h, so no atomic types for now.
- try f.renderTypecast(writer, ptr_ty.elemType());
+ try f.renderType(writer, ptr_ty.elemType());
},
}
if (ptr_ty.isVolatilePtr()) try writer.writeAll(" volatile");
@@ -6260,8 +6097,8 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(");\n");
if (f.liveness.isUnused(inst)) {
- try freeLocal(f, inst, local.local, 0);
- return CValue.none;
+ try freeLocal(f, inst, local.new_local, 0);
+ return .none;
}
return local;
@@ -6273,7 +6110,7 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue {
try reap(f, inst, &.{atomic_load.ptr});
const ptr_ty = f.air.typeOf(atomic_load.ptr);
if (!ptr_ty.isVolatilePtr() and f.liveness.isUnused(inst)) {
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -6282,7 +6119,7 @@ fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = zig_atomic_load((zig_atomic(");
- try f.renderTypecast(writer, ptr_ty.elemType());
+ try f.renderType(writer, ptr_ty.elemType());
try writer.writeByte(')');
if (ptr_ty.isVolatilePtr()) try writer.writeAll(" volatile");
try writer.writeAll(" *)");
@@ -6305,7 +6142,7 @@ fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CVa
const writer = f.object.writer();
try writer.writeAll("zig_atomic_store((zig_atomic(");
- try f.renderTypecast(writer, ptr_ty.elemType());
+ try f.renderType(writer, ptr_ty.elemType());
try writer.writeByte(')');
if (ptr_ty.isVolatilePtr()) try writer.writeAll(" volatile");
try writer.writeAll(" *)");
@@ -6316,7 +6153,7 @@ fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CVa
try f.object.dg.renderTypeForBuiltinFnName(writer, ptr_ty.childType());
try writer.writeAll(");\n");
- return CValue.none;
+ return .none;
}
fn airMemset(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -6347,7 +6184,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(" += ");
try f.object.dg.renderValue(writer, Type.usize, Value.one, .Other);
try writer.writeAll(") ((");
- try f.renderTypecast(writer, u8_ptr_ty);
+ try f.renderType(writer, u8_ptr_ty);
try writer.writeByte(')');
try f.writeCValue(writer, dest_ptr, .FunctionArgument);
try writer.writeAll(")[");
@@ -6357,9 +6194,9 @@ fn airMemset(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
try reap(f, inst, &.{ pl_op.operand, extra.lhs, extra.rhs });
- try freeLocal(f, inst, index.local, 0);
+ try freeLocal(f, inst, index.new_local, 0);
- return CValue.none;
+ return .none;
}
try reap(f, inst, &.{ pl_op.operand, extra.lhs, extra.rhs });
@@ -6371,7 +6208,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, len, .FunctionArgument);
try writer.writeAll(");\n");
- return CValue.none;
+ return .none;
}
fn airMemcpy(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -6391,7 +6228,7 @@ fn airMemcpy(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, len, .FunctionArgument);
try writer.writeAll(");\n");
- return CValue.none;
+ return .none;
}
fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -6404,7 +6241,7 @@ fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
const union_ty = f.air.typeOf(bin_op.lhs).childType();
const target = f.object.dg.module.getTarget();
const layout = union_ty.unionGetLayout(target);
- if (layout.tag_size == 0) return CValue.none;
+ if (layout.tag_size == 0) return .none;
try writer.writeByte('(');
try f.writeCValue(writer, union_ptr, .Other);
@@ -6412,7 +6249,7 @@ fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, new_tag, .Other);
try writer.writeAll(";\n");
- return CValue.none;
+ return .none;
}
fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -6420,7 +6257,7 @@ fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(ty_op.operand);
@@ -6430,7 +6267,7 @@ fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
const target = f.object.dg.module.getTarget();
const layout = un_ty.unionGetLayout(target);
- if (layout.tag_size == 0) return CValue.none;
+ if (layout.tag_size == 0) return .none;
const inst_ty = f.air.typeOfIndex(inst);
const writer = f.object.writer();
@@ -6448,7 +6285,7 @@ fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -6459,7 +6296,9 @@ fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue {
const writer = f.object.writer();
const local = try f.allocLocal(inst, inst_ty);
try f.writeCValue(writer, local, .Other);
- try writer.print(" = {s}(", .{try f.object.dg.getTagNameFn(enum_ty)});
+ try writer.print(" = {s}(", .{
+ try f.getLazyFnName(.{ .tag_name = enum_ty.getOwnerDecl() }, .{ .tag_name = enum_ty }),
+ });
try f.writeCValue(writer, operand, .Other);
try writer.writeAll(");\n");
@@ -6471,7 +6310,7 @@ fn airErrorName(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const writer = f.object.writer();
@@ -6491,7 +6330,7 @@ fn airSplat(f: *Function, inst: Air.Inst.Index) !CValue {
const ty_op = f.air.instructions.items(.data)[inst].ty_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ty_op.operand});
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
@@ -6507,13 +6346,13 @@ fn airSplat(f: *Function, inst: Air.Inst.Index) !CValue {
}
fn airSelect(f: *Function, inst: Air.Inst.Index) !CValue {
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
return f.fail("TODO: C backend: implement airSelect", .{});
}
fn airShuffle(f: *Function, inst: Air.Inst.Index) !CValue {
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
return f.fail("TODO: C backend: implement airShuffle", .{});
}
@@ -6523,7 +6362,7 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{reduce.operand});
- return CValue.none;
+ return .none;
}
const target = f.object.dg.module.getTarget();
@@ -6599,10 +6438,9 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
//
// Equivalent to:
// reduce: {
- // var i: usize = 0;
// var accum: T = init;
- // while (i < vec.len) : (i += 1) {
- // accum = func(accum, vec[i]);
+ // for (vec) : (elem) {
+ // accum = func(accum, elem);
// }
// break :reduce accum;
// }
@@ -6674,7 +6512,7 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
- try freeLocal(f, inst, it.local, 0);
+ try freeLocal(f, inst, it.new_local, 0);
return accum;
}
@@ -6687,8 +6525,8 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const gpa = f.object.dg.gpa;
const resolved_elements = try gpa.alloc(CValue, elements.len);
defer gpa.free(resolved_elements);
- for (elements, 0..) |element, i| {
- resolved_elements[i] = try f.resolveInst(element);
+ for (resolved_elements, elements) |*resolved_element, element| {
+ resolved_element.* = try f.resolveInst(element);
}
{
var bt = iterateBigTomb(f, inst);
@@ -6697,7 +6535,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
}
}
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
const target = f.object.dg.module.getTarget();
@@ -6723,50 +6561,51 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
.Auto, .Extern => {
try f.writeCValue(writer, local, .Other);
try writer.writeAll(" = (");
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeAll(")");
try writer.writeByte('{');
var empty = true;
- for (elements, 0..) |element, index| {
- if (inst_ty.structFieldValueComptime(index)) |_| continue;
+ for (elements, resolved_elements, 0..) |element, resolved_element, field_i| {
+ if (inst_ty.structFieldValueComptime(field_i)) |_| continue;
if (!empty) try writer.writeAll(", ");
- if (!inst_ty.isTupleOrAnonStruct()) {
- try writer.print(".{ } = ", .{fmtIdent(inst_ty.structFieldName(index))});
- }
+
+ const field_name: CValue = if (inst_ty.isSimpleTuple())
+ .{ .field = field_i }
+ else
+ .{ .identifier = inst_ty.structFieldName(field_i) };
+ try writer.writeByte('.');
+ try f.object.dg.writeCValue(writer, field_name);
+ try writer.writeAll(" = ");
const element_ty = f.air.typeOf(element);
try f.writeCValue(writer, switch (element_ty.zigTypeTag()) {
- .Array => CValue{ .undef = element_ty },
- else => resolved_elements[index],
+ .Array => .{ .undef = element_ty },
+ else => resolved_element,
}, .Initializer);
empty = false;
}
- if (empty) try writer.print("{}", .{try f.fmtIntLiteral(Type.u8, Value.zero)});
try writer.writeAll("};\n");
- var field_id: usize = 0;
- for (elements, 0..) |element, index| {
- if (inst_ty.structFieldValueComptime(index)) |_| continue;
+ for (elements, resolved_elements, 0..) |element, resolved_element, field_i| {
+ if (inst_ty.structFieldValueComptime(field_i)) |_| continue;
const element_ty = f.air.typeOf(element);
if (element_ty.zigTypeTag() != .Array) continue;
- const field_name = if (inst_ty.isTupleOrAnonStruct())
- CValue{ .field = field_id }
+ const field_name: CValue = if (inst_ty.isSimpleTuple())
+ .{ .field = field_i }
else
- CValue{ .identifier = inst_ty.structFieldName(index) };
+ .{ .identifier = inst_ty.structFieldName(field_i) };
try writer.writeAll(";\n");
try writer.writeAll("memcpy(");
try f.writeCValueMember(writer, local, field_name);
try writer.writeAll(", ");
- try f.writeCValue(writer, resolved_elements[index], .FunctionArgument);
+ try f.writeCValue(writer, resolved_element, .FunctionArgument);
try writer.writeAll(", sizeof(");
- try f.renderTypecast(writer, element_ty);
+ try f.renderType(writer, element_ty);
try writer.writeAll("));\n");
-
- field_id += 1;
}
},
.Packed => {
@@ -6784,7 +6623,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const bit_offset_val = Value.initPayload(&bit_offset_val_pl.base);
var empty = true;
- for (elements, 0..) |_, index| {
+ for (0..elements.len) |index| {
const field_ty = inst_ty.structFieldType(index);
if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -6810,11 +6649,11 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
try f.renderIntCast(writer, inst_ty, element, field_ty, .FunctionArgument);
} else {
try writer.writeByte('(');
- try f.renderTypecast(writer, inst_ty);
+ try f.renderType(writer, inst_ty);
try writer.writeByte(')');
if (field_ty.isPtrAtRuntime()) {
try writer.writeByte('(');
- try f.renderTypecast(writer, switch (int_info.signedness) {
+ try f.renderType(writer, switch (int_info.signedness) {
.unsigned => Type.usize,
.signed => Type.isize,
});
@@ -6833,13 +6672,6 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
empty = false;
}
- if (empty) {
- try writer.writeByte('(');
- try f.renderTypecast(writer, inst_ty);
- try writer.writeByte(')');
- try f.writeCValue(writer, .{ .undef = inst_ty }, .Initializer);
- }
-
try writer.writeAll(";\n");
},
},
@@ -6855,7 +6687,7 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{extra.init});
- return CValue.none;
+ return .none;
}
const union_ty = f.air.typeOfIndex(inst);
@@ -6875,7 +6707,7 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
return local;
}
- if (union_ty.unionTagTypeSafety()) |tag_ty| {
+ const field: CValue = if (union_ty.unionTagTypeSafety()) |tag_ty| field: {
const layout = union_ty.unionGetLayout(target);
if (layout.tag_size != 0) {
const field_index = tag_ty.enumFieldIndex(field_name).?;
@@ -6892,18 +6724,13 @@ fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
try f.writeCValue(writer, local, .Other);
try writer.print(".tag = {}; ", .{try f.fmtIntLiteral(tag_ty, int_val)});
}
- try f.writeCValue(writer, local, .Other);
- try writer.print(".payload.{ } = ", .{fmtIdent(field_name)});
- try f.writeCValue(writer, payload, .Other);
- try writer.writeAll(";\n");
- return local;
- }
+ break :field .{ .payload_identifier = field_name };
+ } else .{ .identifier = field_name };
- try f.writeCValue(writer, local, .Other);
- try writer.print(".{ } = ", .{fmtIdent(field_name)});
+ try f.writeCValueMember(writer, local, field);
+ try writer.writeAll(" = ");
try f.writeCValue(writer, payload, .Other);
try writer.writeAll(";\n");
-
return local;
}
@@ -6914,7 +6741,7 @@ fn airPrefetch(f: *Function, inst: Air.Inst.Index) !CValue {
// The available prefetch intrinsics do not accept a cache argument; only
// address, rw, and locality. So unless the cache is data, we do not lower
// this instruction.
- .instruction => return CValue.none,
+ .instruction => return .none,
}
const ptr = try f.resolveInst(prefetch.ptr);
try reap(f, inst, &.{prefetch.ptr});
@@ -6924,11 +6751,11 @@ fn airPrefetch(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.print(", {d}, {d});\n", .{
@enumToInt(prefetch.rw), prefetch.locality,
});
- return CValue.none;
+ return .none;
}
fn airWasmMemorySize(f: *Function, inst: Air.Inst.Index) !CValue {
- if (f.liveness.isUnused(inst)) return CValue.none;
+ if (f.liveness.isUnused(inst)) return .none;
const pl_op = f.air.instructions.items(.data)[inst].pl_op;
@@ -6965,7 +6792,7 @@ fn airFloatNeg(f: *Function, inst: Air.Inst.Index) !CValue {
const un_op = f.air.instructions.items(.data)[inst].un_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(un_op);
@@ -6987,7 +6814,7 @@ fn airUnFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CVal
const un_op = f.air.instructions.items(.data)[inst].un_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{un_op});
- return CValue.none;
+ return .none;
}
const operand = try f.resolveInst(un_op);
try reap(f, inst, &.{un_op});
@@ -7009,7 +6836,7 @@ fn airBinFloatOp(f: *Function, inst: Air.Inst.Index, operation: []const u8) !CVa
const bin_op = f.air.instructions.items(.data)[inst].bin_op;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
- return CValue.none;
+ return .none;
}
const lhs = try f.resolveInst(bin_op.lhs);
const rhs = try f.resolveInst(bin_op.rhs);
@@ -7036,7 +6863,7 @@ fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue {
const bin_op = f.air.extraData(Air.Bin, pl_op.payload).data;
if (f.liveness.isUnused(inst)) {
try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs, pl_op.operand });
- return CValue.none;
+ return .none;
}
const inst_ty = f.air.typeOfIndex(inst);
const mulend1 = try f.resolveInst(bin_op.lhs);
@@ -7058,6 +6885,81 @@ fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue {
return local;
}
+fn airCVaStart(f: *Function, inst: Air.Inst.Index) !CValue {
+ if (f.liveness.isUnused(inst)) return .none;
+
+ const inst_ty = f.air.typeOfIndex(inst);
+ const fn_cty = try f.typeToCType(f.object.dg.decl.?.ty, .complete);
+ const param_len = fn_cty.castTag(.varargs_function).?.data.param_types.len;
+
+ const writer = f.object.writer();
+ const local = try f.allocLocal(inst, inst_ty);
+ try writer.writeAll("va_start(*(va_list *)&");
+ try f.writeCValue(writer, local, .Other);
+ if (param_len > 0) {
+ try writer.writeAll(", ");
+ try f.writeCValue(writer, .{ .arg = param_len - 1 }, .FunctionArgument);
+ }
+ try writer.writeAll(");\n");
+ return local;
+}
+
+fn airCVaArg(f: *Function, inst: Air.Inst.Index) !CValue {
+ const ty_op = f.air.instructions.items(.data)[inst].ty_op;
+ if (f.liveness.isUnused(inst)) {
+ try reap(f, inst, &.{ty_op.operand});
+ return .none;
+ }
+
+ const inst_ty = f.air.typeOfIndex(inst);
+ const va_list = try f.resolveInst(ty_op.operand);
+ try reap(f, inst, &.{ty_op.operand});
+
+ const writer = f.object.writer();
+ const local = try f.allocLocal(inst, inst_ty);
+ try f.writeCValue(writer, local, .Other);
+ try writer.writeAll(" = va_arg(*(va_list *)");
+ try f.writeCValue(writer, va_list, .Other);
+ try writer.writeAll(", ");
+ try f.renderType(writer, f.air.getRefType(ty_op.ty));
+ try writer.writeAll(");\n");
+ return local;
+}
+
+fn airCVaEnd(f: *Function, inst: Air.Inst.Index) !CValue {
+ const un_op = f.air.instructions.items(.data)[inst].un_op;
+
+ const va_list = try f.resolveInst(un_op);
+ try reap(f, inst, &.{un_op});
+
+ const writer = f.object.writer();
+ try writer.writeAll("va_end(*(va_list *)");
+ try f.writeCValue(writer, va_list, .Other);
+ try writer.writeAll(");\n");
+ return .none;
+}
+
+fn airCVaCopy(f: *Function, inst: Air.Inst.Index) !CValue {
+ const ty_op = f.air.instructions.items(.data)[inst].ty_op;
+ if (f.liveness.isUnused(inst)) {
+ try reap(f, inst, &.{ty_op.operand});
+ return .none;
+ }
+
+ const inst_ty = f.air.typeOfIndex(inst);
+ const va_list = try f.resolveInst(ty_op.operand);
+ try reap(f, inst, &.{ty_op.operand});
+
+ const writer = f.object.writer();
+ const local = try f.allocLocal(inst, inst_ty);
+ try writer.writeAll("va_copy(*(va_list *)&");
+ try f.writeCValue(writer, local, .Other);
+ try writer.writeAll(", *(va_list *)");
+ try f.writeCValue(writer, va_list, .Other);
+ try writer.writeAll(");\n");
+ return local;
+}
+
fn toMemoryOrder(order: std.builtin.AtomicOrder) [:0]const u8 {
return switch (order) {
// Note: unordered is actually even less atomic than relaxed
@@ -7243,8 +7145,9 @@ fn stringLiteral(child_stream: anytype) StringLiteral(@TypeOf(child_stream)) {
return .{ .counting_writer = std.io.countingWriter(child_stream) };
}
+const FormatStringContext = struct { str: []const u8, sentinel: ?u8 };
fn formatStringLiteral(
- str: []const u8,
+ data: FormatStringContext,
comptime fmt: []const u8,
_: std.fmt.FormatOptions,
writer: anytype,
@@ -7253,13 +7156,13 @@ fn formatStringLiteral(
var literal = stringLiteral(writer);
try literal.start();
- for (str) |c|
- try literal.writeChar(c);
+ for (data.str) |c| try literal.writeChar(c);
+ if (data.sentinel) |sentinel| if (sentinel != 0) try literal.writeChar(sentinel);
try literal.end();
}
-fn fmtStringLiteral(str: []const u8) std.fmt.Formatter(formatStringLiteral) {
- return .{ .data = str };
+fn fmtStringLiteral(str: []const u8, sentinel: ?u8) std.fmt.Formatter(formatStringLiteral) {
+ return .{ .data = .{ .str = str, .sentinel = sentinel } };
}
fn undefPattern(comptime IntType: type) IntType {
@@ -7344,7 +7247,7 @@ fn formatIntLiteral(
use_twos_comp = true;
} else {
// TODO: Use fmtIntLiteral for 0?
- try writer.print("zig_sub_{c}{d}(zig_as_{c}{d}(0, 0), ", .{ signAbbrev(int_info.signedness), c_bits, signAbbrev(int_info.signedness), c_bits });
+ try writer.print("zig_sub_{c}{d}(zig_make_{c}{d}(0, 0), ", .{ signAbbrev(int_info.signedness), c_bits, signAbbrev(int_info.signedness), c_bits });
}
} else {
try writer.writeByte('-');
@@ -7354,11 +7257,16 @@ fn formatIntLiteral(
switch (data.ty.tag()) {
.c_short, .c_ushort, .c_int, .c_uint, .c_long, .c_ulong, .c_longlong, .c_ulonglong => {},
else => {
- if (int_info.bits > 64 and data.location != null and data.location.? == .StaticInitializer) {
+ if (int_info.bits <= 64) {
+ try writer.print("{s}INT{d}_C(", .{ switch (int_info.signedness) {
+ .signed => "",
+ .unsigned => "U",
+ }, c_bits });
+ } else if (data.location != null and data.location.? == .StaticInitializer) {
// MSVC treats casting the struct initializer as not constant (C2099), so an alternate form is used in global initializers
- try writer.print("zig_as_constant_{c}{d}(", .{ signAbbrev(int_info.signedness), c_bits });
+ try writer.print("zig_make_constant_{c}{d}(", .{ signAbbrev(int_info.signedness), c_bits });
} else {
- try writer.print("zig_as_{c}{d}(", .{ signAbbrev(int_info.signedness), c_bits });
+ try writer.print("zig_make_{c}{d}(", .{ signAbbrev(int_info.signedness), c_bits });
}
},
}
@@ -7467,17 +7375,20 @@ fn isByRef(ty: Type) bool {
}
const LowerFnRetTyBuffer = struct {
+ names: [1][]const u8,
types: [1]Type,
values: [1]Value,
- payload: Type.Payload.Tuple,
+ payload: Type.Payload.AnonStruct,
};
fn lowerFnRetTy(ret_ty: Type, buffer: *LowerFnRetTyBuffer, target: std.Target) Type {
if (ret_ty.zigTypeTag() == .NoReturn) return Type.initTag(.noreturn);
if (lowersToArray(ret_ty, target)) {
+ buffer.names = [1][]const u8{"array"};
buffer.types = [1]Type{ret_ty};
buffer.values = [1]Value{Value.initTag(.unreachable_value)};
buffer.payload = .{ .data = .{
+ .names = &buffer.names,
.types = &buffer.types,
.values = &buffer.values,
} };
@@ -7533,7 +7444,7 @@ fn die(f: *Function, inst: Air.Inst.Index, ref: Air.Inst.Ref) !void {
if (f.air.instructions.items(.tag)[ref_inst] == .constant) return;
const c_value = (f.value_map.fetchRemove(ref) orelse return).value;
const local_index = switch (c_value) {
- .local => |l| l,
+ .local, .new_local => |l| l,
else => return,
};
try freeLocal(f, inst, local_index, ref_inst);
@@ -7544,21 +7455,16 @@ fn freeLocal(f: *Function, inst: Air.Inst.Index, local_index: LocalIndex, ref_in
const local = &f.locals.items[local_index];
log.debug("%{d}: freeing t{d} (operand %{d})", .{ inst, local_index, ref_inst });
if (local.loop_depth < f.free_locals_clone_depth) return;
- const gop = try f.free_locals_stack.items[local.loop_depth].getOrPutContext(
- gpa,
- local.ty,
- f.tyHashCtx(),
- );
+ const gop = try f.free_locals_stack.items[local.loop_depth].getOrPut(gpa, local.getType());
if (!gop.found_existing) gop.value_ptr.* = .{};
if (std.debug.runtime_safety) {
- // If this trips, it means a local is being inserted into the
- // free_locals map while it already exists in the map, which is not
- // allowed.
- assert(mem.indexOfScalar(LocalIndex, gop.value_ptr.items, local_index) == null);
// If this trips, an unfreeable allocation was attempted to be freed.
assert(!f.allocs.contains(local_index));
}
- try gop.value_ptr.append(gpa, local_index);
+ // If this trips, it means a local is being inserted into the
+ // free_locals map while it already exists in the map, which is not
+ // allowed.
+ try gop.value_ptr.putNoClobber(gpa, local_index, {});
}
const BigTomb = struct {
@@ -7607,14 +7513,36 @@ fn deinitFreeLocalsMap(gpa: mem.Allocator, map: *LocalsMap) void {
map.deinit(gpa);
}
-fn noticeBranchFrees(f: *Function, pre_locals_len: LocalIndex, inst: Air.Inst.Index) !void {
- for (f.locals.items[pre_locals_len..], 0..) |*local, local_offset| {
- const local_index = pre_locals_len + @intCast(LocalIndex, local_offset);
- if (f.allocs.contains(local_index)) continue; // allocs are not freeable
+fn noticeBranchFrees(
+ f: *Function,
+ pre_locals_len: LocalIndex,
+ pre_allocs_len: LocalIndex,
+ inst: Air.Inst.Index,
+) !void {
+ const free_locals = f.getFreeLocals();
+
+ for (f.locals.items[pre_locals_len..], pre_locals_len..) |*local, local_i| {
+ const local_index = @intCast(LocalIndex, local_i);
+ if (f.allocs.contains(local_index)) {
+ if (std.debug.runtime_safety) {
+ // new allocs are no longer freeable, so make sure they aren't in the free list
+ if (free_locals.getPtr(local.getType())) |locals_list| {
+ assert(!locals_list.contains(local_index));
+ }
+ }
+ continue;
+ }
// free more deeply nested locals from other branches at current depth
assert(local.loop_depth >= f.free_locals_stack.items.len - 1);
local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
try freeLocal(f, inst, local_index, 0);
}
+
+ for (f.allocs.keys()[pre_allocs_len..]) |local_i| {
+ const local_index = @intCast(LocalIndex, local_i);
+ const local = &f.locals.items[local_index];
+ // new allocs are no longer freeable, so remove them from the free list
+ if (free_locals.getPtr(local.getType())) |locals_list| _ = locals_list.swapRemove(local_index);
+ }
}
diff --git a/src/codegen/c/type.zig b/src/codegen/c/type.zig
new file mode 100644
index 0000000000..1f1a220cd2
--- /dev/null
+++ b/src/codegen/c/type.zig
@@ -0,0 +1,1896 @@
+const std = @import("std");
+const cstr = std.cstr;
+const mem = std.mem;
+const Allocator = mem.Allocator;
+const assert = std.debug.assert;
+const autoHash = std.hash.autoHash;
+const Target = std.Target;
+
+const Module = @import("../../Module.zig");
+const Type = @import("../../type.zig").Type;
+
+pub const CType = extern union {
+ /// If the tag value is less than Tag.no_payload_count, then no pointer
+ /// dereference is needed.
+ tag_if_small_enough: Tag,
+ ptr_otherwise: *const Payload,
+
+ pub fn initTag(small_tag: Tag) CType {
+ assert(!small_tag.hasPayload());
+ return .{ .tag_if_small_enough = small_tag };
+ }
+
+ pub fn initPayload(pl: anytype) CType {
+ const T = @typeInfo(@TypeOf(pl)).Pointer.child;
+ return switch (pl.base.tag) {
+ inline else => |t| if (comptime t.hasPayload() and t.Type() == T) .{
+ .ptr_otherwise = &pl.base,
+ } else unreachable,
+ };
+ }
+
+ pub fn hasPayload(self: CType) bool {
+ return self.tag_if_small_enough.hasPayload();
+ }
+
+ pub fn tag(self: CType) Tag {
+ return if (self.hasPayload()) self.ptr_otherwise.tag else self.tag_if_small_enough;
+ }
+
+ pub fn cast(self: CType, comptime T: type) ?*const T {
+ if (!self.hasPayload()) return null;
+ const pl = self.ptr_otherwise;
+ return switch (pl.tag) {
+ inline else => |t| if (comptime t.hasPayload() and t.Type() == T)
+ @fieldParentPtr(T, "base", pl)
+ else
+ null,
+ };
+ }
+
+ pub fn castTag(self: CType, comptime t: Tag) ?*const t.Type() {
+ return if (self.tag() == t) @fieldParentPtr(t.Type(), "base", self.ptr_otherwise) else null;
+ }
+
+ pub const Tag = enum(usize) {
+ // The first section of this enum are tags that require no payload.
+ void,
+
+ // C basic types
+ char,
+
+ @"signed char",
+ short,
+ int,
+ long,
+ @"long long",
+
+ _Bool,
+ @"unsigned char",
+ @"unsigned short",
+ @"unsigned int",
+ @"unsigned long",
+ @"unsigned long long",
+
+ float,
+ double,
+ @"long double",
+
+ // C header types
+ // - stdbool.h
+ bool,
+ // - stddef.h
+ size_t,
+ ptrdiff_t,
+ // - stdint.h
+ uint8_t,
+ int8_t,
+ uint16_t,
+ int16_t,
+ uint32_t,
+ int32_t,
+ uint64_t,
+ int64_t,
+ uintptr_t,
+ intptr_t,
+
+ // zig.h types
+ zig_u128,
+ zig_i128,
+ zig_f16,
+ zig_f32,
+ zig_f64,
+ zig_f80,
+ zig_f128,
+ zig_c_longdouble, // Keep last_no_payload_tag updated!
+
+ // After this, the tag requires a payload.
+ pointer,
+ pointer_const,
+ pointer_volatile,
+ pointer_const_volatile,
+ array,
+ vector,
+ fwd_anon_struct,
+ fwd_anon_union,
+ fwd_struct,
+ fwd_union,
+ unnamed_struct,
+ unnamed_union,
+ packed_unnamed_struct,
+ packed_unnamed_union,
+ anon_struct,
+ anon_union,
+ @"struct",
+ @"union",
+ packed_struct,
+ packed_union,
+ function,
+ varargs_function,
+
+ pub const last_no_payload_tag = Tag.zig_c_longdouble;
+ pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1;
+
+ pub fn hasPayload(self: Tag) bool {
+ return @enumToInt(self) >= no_payload_count;
+ }
+
+ pub fn toIndex(self: Tag) Index {
+ assert(!self.hasPayload());
+ return @intCast(Index, @enumToInt(self));
+ }
+
+ pub fn Type(comptime self: Tag) type {
+ return switch (self) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ .zig_c_longdouble,
+ => @compileError("Type Tag " ++ @tagName(self) ++ " has no payload"),
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => Payload.Child,
+
+ .array,
+ .vector,
+ => Payload.Sequence,
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => Payload.Fields,
+
+ .fwd_struct,
+ .fwd_union,
+ => Payload.FwdDecl,
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => Payload.Unnamed,
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => Payload.Aggregate,
+
+ .function,
+ .varargs_function,
+ => Payload.Function,
+ };
+ }
+ };
+
+ pub const Payload = struct {
+ tag: Tag,
+
+ pub const Child = struct {
+ base: Payload,
+ data: Index,
+ };
+
+ pub const Sequence = struct {
+ base: Payload,
+ data: struct {
+ len: u64,
+ elem_type: Index,
+ },
+ };
+
+ pub const FwdDecl = struct {
+ base: Payload,
+ data: Module.Decl.Index,
+ };
+
+ pub const Fields = struct {
+ base: Payload,
+ data: Data,
+
+ pub const Data = []const Field;
+ pub const Field = struct {
+ name: [*:0]const u8,
+ type: Index,
+ alignas: AlignAs,
+ };
+ };
+
+ pub const Unnamed = struct {
+ base: Payload,
+ data: struct {
+ fields: Fields.Data,
+ owner_decl: Module.Decl.Index,
+ id: u32,
+ },
+ };
+
+ pub const Aggregate = struct {
+ base: Payload,
+ data: struct {
+ fields: Fields.Data,
+ fwd_decl: Index,
+ },
+ };
+
+ pub const Function = struct {
+ base: Payload,
+ data: struct {
+ return_type: Index,
+ param_types: []const Index,
+ },
+ };
+ };
+
+ pub const AlignAs = struct {
+ @"align": std.math.Log2Int(u32),
+ abi: std.math.Log2Int(u32),
+
+ pub fn init(alignment: u32, abi_alignment: u32) AlignAs {
+ const actual_align = if (alignment != 0) alignment else abi_alignment;
+ assert(std.math.isPowerOfTwo(actual_align));
+ assert(std.math.isPowerOfTwo(abi_alignment));
+ return .{
+ .@"align" = std.math.log2_int(u32, actual_align),
+ .abi = std.math.log2_int(u32, abi_alignment),
+ };
+ }
+ pub fn abiAlign(ty: Type, target: Target) AlignAs {
+ const abi_align = ty.abiAlignment(target);
+ return init(abi_align, abi_align);
+ }
+ pub fn fieldAlign(struct_ty: Type, field_i: usize, target: Target) AlignAs {
+ return init(
+ struct_ty.structFieldAlign(field_i, target),
+ struct_ty.structFieldType(field_i).abiAlignment(target),
+ );
+ }
+ pub fn unionPayloadAlign(union_ty: Type, target: Target) AlignAs {
+ const union_obj = union_ty.cast(Type.Payload.Union).?.data;
+ const union_payload_align = union_obj.abiAlignment(target, false);
+ return init(union_payload_align, union_payload_align);
+ }
+
+ pub fn getAlign(self: AlignAs) u32 {
+ return @as(u32, 1) << self.@"align";
+ }
+ };
+
+ pub const Index = u32;
+ pub const Store = struct {
+ arena: std.heap.ArenaAllocator.State = .{},
+ set: Set = .{},
+
+ pub const Set = struct {
+ pub const Map = std.ArrayHashMapUnmanaged(CType, void, HashContext, true);
+ const HashContext = struct {
+ store: *const Set,
+
+ pub fn hash(self: @This(), cty: CType) Map.Hash {
+ return @truncate(Map.Hash, cty.hash(self.store.*));
+ }
+ pub fn eql(_: @This(), lhs: CType, rhs: CType, _: usize) bool {
+ return lhs.eql(rhs);
+ }
+ };
+
+ map: Map = .{},
+
+ pub fn indexToCType(self: Set, index: Index) CType {
+ if (index < Tag.no_payload_count) return initTag(@intToEnum(Tag, index));
+ return self.map.keys()[index - Tag.no_payload_count];
+ }
+
+ pub fn indexToHash(self: Set, index: Index) Map.Hash {
+ if (index < Tag.no_payload_count)
+ return (HashContext{ .store = &self }).hash(self.indexToCType(index));
+ return self.map.entries.items(.hash)[index - Tag.no_payload_count];
+ }
+
+ pub fn typeToIndex(self: Set, ty: Type, target: Target, kind: Kind) ?Index {
+ const lookup = Convert.Lookup{ .imm = .{ .set = &self, .target = target } };
+
+ var convert: Convert = undefined;
+ convert.initType(ty, kind, lookup) catch unreachable;
+
+ const t = convert.tag();
+ if (!t.hasPayload()) return t.toIndex();
+
+ return if (self.map.getIndexAdapted(
+ ty,
+ TypeAdapter32{ .kind = kind, .lookup = lookup, .convert = &convert },
+ )) |idx| @intCast(Index, Tag.no_payload_count + idx) else null;
+ }
+ };
+
+ pub const Promoted = struct {
+ arena: std.heap.ArenaAllocator,
+ set: Set,
+
+ pub fn gpa(self: *Promoted) Allocator {
+ return self.arena.child_allocator;
+ }
+
+ pub fn cTypeToIndex(self: *Promoted, cty: CType) Allocator.Error!Index {
+ const t = cty.tag();
+ if (@enumToInt(t) < Tag.no_payload_count) return @intCast(Index, @enumToInt(t));
+
+ const gop = try self.set.map.getOrPutContext(self.gpa(), cty, .{ .store = &self.set });
+ if (!gop.found_existing) gop.key_ptr.* = cty;
+ if (std.debug.runtime_safety) {
+ const key = &self.set.map.entries.items(.key)[gop.index];
+ assert(key == gop.key_ptr);
+ assert(cty.eql(key.*));
+ assert(cty.hash(self.set) == key.hash(self.set));
+ }
+ return @intCast(Index, Tag.no_payload_count + gop.index);
+ }
+
+ pub fn typeToIndex(
+ self: *Promoted,
+ ty: Type,
+ mod: *Module,
+ kind: Kind,
+ ) Allocator.Error!Index {
+ const lookup = Convert.Lookup{ .mut = .{ .promoted = self, .mod = mod } };
+
+ var convert: Convert = undefined;
+ try convert.initType(ty, kind, lookup);
+
+ const t = convert.tag();
+ if (!t.hasPayload()) return t.toIndex();
+
+ const gop = try self.set.map.getOrPutContextAdapted(
+ self.gpa(),
+ ty,
+ TypeAdapter32{ .kind = kind, .lookup = lookup.freeze(), .convert = &convert },
+ .{ .store = &self.set },
+ );
+ if (!gop.found_existing) {
+ errdefer _ = self.set.map.pop();
+ gop.key_ptr.* = try createFromConvert(self, ty, lookup.getTarget(), kind, convert);
+ }
+ if (std.debug.runtime_safety) {
+ const adapter = TypeAdapter64{
+ .kind = kind,
+ .lookup = lookup.freeze(),
+ .convert = &convert,
+ };
+ const cty = &self.set.map.entries.items(.key)[gop.index];
+ assert(cty == gop.key_ptr);
+ assert(adapter.eql(ty, cty.*));
+ assert(adapter.hash(ty) == cty.hash(self.set));
+ }
+ return @intCast(Index, Tag.no_payload_count + gop.index);
+ }
+ };
+
+ pub fn promote(self: Store, gpa: Allocator) Promoted {
+ return .{ .arena = self.arena.promote(gpa), .set = self.set };
+ }
+
+ pub fn demote(self: *Store, promoted: Promoted) void {
+ self.arena = promoted.arena.state;
+ self.set = promoted.set;
+ }
+
+ pub fn indexToCType(self: Store, index: Index) CType {
+ return self.set.indexToCType(index);
+ }
+
+ pub fn indexToHash(self: Store, index: Index) Set.Map.Hash {
+ return self.set.indexToHash(index);
+ }
+
+ pub fn cTypeToIndex(self: *Store, gpa: Allocator, cty: CType) !Index {
+ var promoted = self.promote(gpa);
+ defer self.demote(promoted);
+ return promoted.cTypeToIndex(cty);
+ }
+
+ pub fn typeToCType(self: *Store, gpa: Allocator, ty: Type, mod: *Module, kind: Kind) !CType {
+ const idx = try self.typeToIndex(gpa, ty, mod, kind);
+ return self.indexToCType(idx);
+ }
+
+ pub fn typeToIndex(self: *Store, gpa: Allocator, ty: Type, mod: *Module, kind: Kind) !Index {
+ var promoted = self.promote(gpa);
+ defer self.demote(promoted);
+ return promoted.typeToIndex(ty, mod, kind);
+ }
+
+ pub fn clearRetainingCapacity(self: *Store, gpa: Allocator) void {
+ var promoted = self.promote(gpa);
+ defer self.demote(promoted);
+ promoted.set.map.clearRetainingCapacity();
+ _ = promoted.arena.reset(.retain_capacity);
+ }
+
+ pub fn clearAndFree(self: *Store, gpa: Allocator) void {
+ var promoted = self.promote(gpa);
+ defer self.demote(promoted);
+ promoted.set.map.clearAndFree(gpa);
+ _ = promoted.arena.reset(.free_all);
+ }
+
+ pub fn shrinkRetainingCapacity(self: *Store, gpa: Allocator, new_len: usize) void {
+ self.set.map.shrinkRetainingCapacity(gpa, new_len);
+ }
+
+ pub fn shrinkAndFree(self: *Store, gpa: Allocator, new_len: usize) void {
+ self.set.map.shrinkAndFree(gpa, new_len);
+ }
+
+ pub fn count(self: Store) usize {
+ return self.set.map.count();
+ }
+
+ pub fn move(self: *Store) Store {
+ const moved = self.*;
+ self.* = .{};
+ return moved;
+ }
+
+ pub fn deinit(self: *Store, gpa: Allocator) void {
+ var promoted = self.promote(gpa);
+ promoted.set.map.deinit(gpa);
+ _ = promoted.arena.deinit();
+ self.* = undefined;
+ }
+ };
+
+ pub fn isPacked(self: CType) bool {
+ return switch (self.tag()) {
+ else => false,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ .packed_struct,
+ .packed_union,
+ => true,
+ };
+ }
+
+ pub fn fields(self: CType) Payload.Fields.Data {
+ return if (self.cast(Payload.Aggregate)) |pl|
+ pl.data.fields
+ else if (self.cast(Payload.Unnamed)) |pl|
+ pl.data.fields
+ else if (self.cast(Payload.Fields)) |pl|
+ pl.data
+ else
+ unreachable;
+ }
+
+ pub fn eql(lhs: CType, rhs: CType) bool {
+ return lhs.eqlContext(rhs, struct {
+ pub fn eqlIndex(_: @This(), lhs_idx: Index, rhs_idx: Index) bool {
+ return lhs_idx == rhs_idx;
+ }
+ }{});
+ }
+
+ pub fn eqlContext(lhs: CType, rhs: CType, ctx: anytype) bool {
+ // As a shortcut, if the small tags / addresses match, we're done.
+ if (lhs.tag_if_small_enough == rhs.tag_if_small_enough) return true;
+
+ const lhs_tag = lhs.tag();
+ const rhs_tag = rhs.tag();
+ if (lhs_tag != rhs_tag) return false;
+
+ return switch (lhs_tag) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ .zig_c_longdouble,
+ => false,
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => ctx.eqlIndex(lhs.cast(Payload.Child).?.data, rhs.cast(Payload.Child).?.data),
+
+ .array,
+ .vector,
+ => {
+ const lhs_data = lhs.cast(Payload.Sequence).?.data;
+ const rhs_data = rhs.cast(Payload.Sequence).?.data;
+ return lhs_data.len == rhs_data.len and
+ ctx.eqlIndex(lhs_data.elem_type, rhs_data.elem_type);
+ },
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => {
+ const lhs_data = lhs.cast(Payload.Fields).?.data;
+ const rhs_data = rhs.cast(Payload.Fields).?.data;
+ if (lhs_data.len != rhs_data.len) return false;
+ for (lhs_data, rhs_data) |lhs_field, rhs_field| {
+ if (!ctx.eqlIndex(lhs_field.type, rhs_field.type)) return false;
+ if (lhs_field.alignas.@"align" != rhs_field.alignas.@"align") return false;
+ if (cstr.cmp(lhs_field.name, rhs_field.name) != 0) return false;
+ }
+ return true;
+ },
+
+ .fwd_struct,
+ .fwd_union,
+ => lhs.cast(Payload.FwdDecl).?.data == rhs.cast(Payload.FwdDecl).?.data,
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => {
+ const lhs_data = lhs.cast(Payload.Unnamed).?.data;
+ const rhs_data = rhs.cast(Payload.Unnamed).?.data;
+ return lhs_data.owner_decl == rhs_data.owner_decl and lhs_data.id == rhs_data.id;
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => ctx.eqlIndex(
+ lhs.cast(Payload.Aggregate).?.data.fwd_decl,
+ rhs.cast(Payload.Aggregate).?.data.fwd_decl,
+ ),
+
+ .function,
+ .varargs_function,
+ => {
+ const lhs_data = lhs.cast(Payload.Function).?.data;
+ const rhs_data = rhs.cast(Payload.Function).?.data;
+ if (lhs_data.param_types.len != rhs_data.param_types.len) return false;
+ if (!ctx.eqlIndex(lhs_data.return_type, rhs_data.return_type)) return false;
+ for (lhs_data.param_types, rhs_data.param_types) |lhs_param_idx, rhs_param_idx| {
+ if (!ctx.eqlIndex(lhs_param_idx, rhs_param_idx)) return false;
+ }
+ return true;
+ },
+ };
+ }
+
+ pub fn hash(self: CType, store: Store.Set) u64 {
+ var hasher = std.hash.Wyhash.init(0);
+ self.updateHasher(&hasher, store);
+ return hasher.final();
+ }
+
+ pub fn updateHasher(self: CType, hasher: anytype, store: Store.Set) void {
+ const t = self.tag();
+ autoHash(hasher, t);
+ switch (t) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ .zig_c_longdouble,
+ => {},
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => store.indexToCType(self.cast(Payload.Child).?.data).updateHasher(hasher, store),
+
+ .array,
+ .vector,
+ => {
+ const data = self.cast(Payload.Sequence).?.data;
+ autoHash(hasher, data.len);
+ store.indexToCType(data.elem_type).updateHasher(hasher, store);
+ },
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => for (self.cast(Payload.Fields).?.data) |field| {
+ store.indexToCType(field.type).updateHasher(hasher, store);
+ hasher.update(mem.span(field.name));
+ autoHash(hasher, field.alignas.@"align");
+ },
+
+ .fwd_struct,
+ .fwd_union,
+ => autoHash(hasher, self.cast(Payload.FwdDecl).?.data),
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => {
+ const data = self.cast(Payload.Unnamed).?.data;
+ autoHash(hasher, data.owner_decl);
+ autoHash(hasher, data.id);
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => store.indexToCType(self.cast(Payload.Aggregate).?.data.fwd_decl)
+ .updateHasher(hasher, store),
+
+ .function,
+ .varargs_function,
+ => {
+ const data = self.cast(Payload.Function).?.data;
+ store.indexToCType(data.return_type).updateHasher(hasher, store);
+ for (data.param_types) |param_ty| {
+ store.indexToCType(param_ty).updateHasher(hasher, store);
+ }
+ },
+ }
+ }
+
+ pub const Kind = enum { forward, forward_parameter, complete, global, parameter, payload };
+
+ const Convert = struct {
+ storage: union {
+ none: void,
+ child: Payload.Child,
+ seq: Payload.Sequence,
+ fwd: Payload.FwdDecl,
+ anon: struct {
+ fields: [2]Payload.Fields.Field,
+ pl: union {
+ forward: Payload.Fields,
+ complete: Payload.Aggregate,
+ },
+ },
+ },
+ value: union(enum) {
+ tag: Tag,
+ cty: CType,
+ },
+
+ pub fn init(self: *@This(), t: Tag) void {
+ self.* = if (t.hasPayload()) .{
+ .storage = .{ .none = {} },
+ .value = .{ .tag = t },
+ } else .{
+ .storage = .{ .none = {} },
+ .value = .{ .cty = initTag(t) },
+ };
+ }
+
+ pub fn tag(self: @This()) Tag {
+ return switch (self.value) {
+ .tag => |t| t,
+ .cty => |c| c.tag(),
+ };
+ }
+
+ fn tagFromIntInfo(signedness: std.builtin.Signedness, bits: u16) Tag {
+ return switch (bits) {
+ 0 => .void,
+ 1...8 => switch (signedness) {
+ .unsigned => .uint8_t,
+ .signed => .int8_t,
+ },
+ 9...16 => switch (signedness) {
+ .unsigned => .uint16_t,
+ .signed => .int16_t,
+ },
+ 17...32 => switch (signedness) {
+ .unsigned => .uint32_t,
+ .signed => .int32_t,
+ },
+ 33...64 => switch (signedness) {
+ .unsigned => .uint64_t,
+ .signed => .int64_t,
+ },
+ 65...128 => switch (signedness) {
+ .unsigned => .zig_u128,
+ .signed => .zig_i128,
+ },
+ else => .array,
+ };
+ }
+
+ pub const Lookup = union(enum) {
+ fail: Target,
+ imm: struct {
+ set: *const Store.Set,
+ target: Target,
+ },
+ mut: struct {
+ promoted: *Store.Promoted,
+ mod: *Module,
+ },
+
+ pub fn isMutable(self: @This()) bool {
+ return switch (self) {
+ .fail, .imm => false,
+ .mut => true,
+ };
+ }
+
+ pub fn getTarget(self: @This()) Target {
+ return switch (self) {
+ .fail => |target| target,
+ .imm => |imm| imm.target,
+ .mut => |mut| mut.mod.getTarget(),
+ };
+ }
+
+ pub fn getSet(self: @This()) ?*const Store.Set {
+ return switch (self) {
+ .fail => null,
+ .imm => |imm| imm.set,
+ .mut => |mut| &mut.promoted.set,
+ };
+ }
+
+ pub fn typeToIndex(self: @This(), ty: Type, kind: Kind) !?Index {
+ return switch (self) {
+ .fail => null,
+ .imm => |imm| imm.set.typeToIndex(ty, imm.target, kind),
+ .mut => |mut| try mut.promoted.typeToIndex(ty, mut.mod, kind),
+ };
+ }
+
+ pub fn indexToCType(self: @This(), index: Index) ?CType {
+ return if (self.getSet()) |set| set.indexToCType(index) else null;
+ }
+
+ pub fn freeze(self: @This()) @This() {
+ return switch (self) {
+ .fail, .imm => self,
+ .mut => |mut| .{ .imm = .{ .set = &mut.promoted.set, .target = self.getTarget() } },
+ };
+ }
+ };
+
+ fn sortFields(self: *@This(), fields_len: usize) []Payload.Fields.Field {
+ const Field = Payload.Fields.Field;
+ const slice = self.storage.anon.fields[0..fields_len];
+ std.sort.sort(Field, slice, {}, struct {
+ fn before(_: void, lhs: Field, rhs: Field) bool {
+ return lhs.alignas.@"align" > rhs.alignas.@"align";
+ }
+ }.before);
+ return slice;
+ }
+
+ fn initAnon(self: *@This(), kind: Kind, fwd_idx: Index, fields_len: usize) void {
+ switch (kind) {
+ .forward, .forward_parameter => {
+ self.storage.anon.pl = .{ .forward = .{
+ .base = .{ .tag = .fwd_anon_struct },
+ .data = self.sortFields(fields_len),
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.anon.pl.forward) };
+ },
+ .complete, .parameter, .global => {
+ self.storage.anon.pl = .{ .complete = .{
+ .base = .{ .tag = .anon_struct },
+ .data = .{
+ .fields = self.sortFields(fields_len),
+ .fwd_decl = fwd_idx,
+ },
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.anon.pl.complete) };
+ },
+ .payload => unreachable,
+ }
+ }
+
+ fn initArrayParameter(self: *@This(), ty: Type, kind: Kind, lookup: Lookup) !void {
+ if (switch (kind) {
+ .forward_parameter => @as(Index, undefined),
+ .parameter => try lookup.typeToIndex(ty, .forward_parameter),
+ .forward, .complete, .global, .payload => unreachable,
+ }) |fwd_idx| {
+ if (try lookup.typeToIndex(ty, switch (kind) {
+ .forward_parameter => .forward,
+ .parameter => .complete,
+ .forward, .complete, .global, .payload => unreachable,
+ })) |array_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
+ .name = "array",
+ .type = array_idx,
+ .alignas = AlignAs.abiAlign(ty, lookup.getTarget()),
+ };
+ self.initAnon(kind, fwd_idx, 1);
+ } else self.init(switch (kind) {
+ .forward_parameter => .fwd_anon_struct,
+ .parameter => .anon_struct,
+ .forward, .complete, .global, .payload => unreachable,
+ });
+ } else self.init(.anon_struct);
+ }
+
+ pub fn initType(self: *@This(), ty: Type, kind: Kind, lookup: Lookup) !void {
+ const target = lookup.getTarget();
+
+ self.* = undefined;
+ if (!ty.isFnOrHasRuntimeBitsIgnoreComptime())
+ self.init(.void)
+ else if (ty.isAbiInt()) switch (ty.tag()) {
+ .usize => self.init(.uintptr_t),
+ .isize => self.init(.intptr_t),
+ .c_short => self.init(.short),
+ .c_ushort => self.init(.@"unsigned short"),
+ .c_int => self.init(.int),
+ .c_uint => self.init(.@"unsigned int"),
+ .c_long => self.init(.long),
+ .c_ulong => self.init(.@"unsigned long"),
+ .c_longlong => self.init(.@"long long"),
+ .c_ulonglong => self.init(.@"unsigned long long"),
+ else => {
+ const info = ty.intInfo(target);
+ const t = tagFromIntInfo(info.signedness, info.bits);
+ switch (t) {
+ .void => unreachable,
+ else => self.init(t),
+ .array => switch (kind) {
+ .forward, .complete, .global => {
+ const abi_size = ty.abiSize(target);
+ const abi_align = ty.abiAlignment(target);
+ self.storage = .{ .seq = .{ .base = .{ .tag = .array }, .data = .{
+ .len = @divExact(abi_size, abi_align),
+ .elem_type = tagFromIntInfo(
+ .unsigned,
+ @intCast(u16, abi_align * 8),
+ ).toIndex(),
+ } } };
+ self.value = .{ .cty = initPayload(&self.storage.seq) };
+ },
+ .forward_parameter,
+ .parameter,
+ => try self.initArrayParameter(ty, kind, lookup),
+ .payload => unreachable,
+ },
+ }
+ },
+ } else switch (ty.zigTypeTag()) {
+ .Frame => unreachable,
+ .AnyFrame => unreachable,
+
+ .Int,
+ .Enum,
+ .ErrorSet,
+ .Type,
+ .Void,
+ .NoReturn,
+ .ComptimeFloat,
+ .ComptimeInt,
+ .Undefined,
+ .Null,
+ .EnumLiteral,
+ => unreachable,
+
+ .Bool => self.init(.bool),
+
+ .Float => self.init(switch (ty.tag()) {
+ .f16 => .zig_f16,
+ .f32 => .zig_f32,
+ .f64 => .zig_f64,
+ .f80 => .zig_f80,
+ .f128 => .zig_f128,
+ .c_longdouble => .zig_c_longdouble,
+ else => unreachable,
+ }),
+
+ .Pointer => {
+ const info = ty.ptrInfo().data;
+ switch (info.size) {
+ .Slice => {
+ if (switch (kind) {
+ .forward, .forward_parameter => @as(Index, undefined),
+ .complete, .parameter, .global => try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }) |fwd_idx| {
+ var buf: Type.SlicePtrFieldTypeBuffer = undefined;
+ const ptr_ty = ty.slicePtrFieldType(&buf);
+ if (try lookup.typeToIndex(ptr_ty, kind)) |ptr_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
+ .name = "ptr",
+ .type = ptr_idx,
+ .alignas = AlignAs.abiAlign(ptr_ty, target),
+ };
+ self.storage.anon.fields[1] = .{
+ .name = "len",
+ .type = Tag.uintptr_t.toIndex(),
+ .alignas = AlignAs.abiAlign(Type.usize, target),
+ };
+ self.initAnon(kind, fwd_idx, 2);
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
+ } else self.init(.anon_struct);
+ },
+
+ .One, .Many, .C => {
+ const t: Tag = switch (info.@"volatile") {
+ false => switch (info.mutable) {
+ true => .pointer,
+ false => .pointer_const,
+ },
+ true => switch (info.mutable) {
+ true => .pointer_volatile,
+ false => .pointer_const_volatile,
+ },
+ };
+
+ var host_int_pl = Type.Payload.Bits{
+ .base = .{ .tag = .int_unsigned },
+ .data = info.host_size * 8,
+ };
+ const pointee_ty = if (info.host_size > 0)
+ Type.initPayload(&host_int_pl.base)
+ else
+ info.pointee_type;
+
+ if (if (info.size == .C and pointee_ty.tag() == .u8)
+ Tag.char.toIndex()
+ else
+ try lookup.typeToIndex(pointee_ty, .forward)) |child_idx|
+ {
+ self.storage = .{ .child = .{
+ .base = .{ .tag = t },
+ .data = child_idx,
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.child) };
+ } else self.init(t);
+ },
+ }
+ },
+
+ .Struct, .Union => |zig_ty_tag| if (ty.containerLayout() == .Packed) {
+ if (ty.castTag(.@"struct")) |struct_obj| {
+ try self.initType(struct_obj.data.backing_int_ty, kind, lookup);
+ } else {
+ var buf: Type.Payload.Bits = .{
+ .base = .{ .tag = .int_unsigned },
+ .data = @intCast(u16, ty.bitSize(target)),
+ };
+ try self.initType(Type.initPayload(&buf.base), kind, lookup);
+ }
+ } else if (ty.isTupleOrAnonStruct()) {
+ if (lookup.isMutable()) {
+ for (0..switch (zig_ty_tag) {
+ .Struct => ty.structFieldCount(),
+ .Union => ty.unionFields().count(),
+ else => unreachable,
+ }) |field_i| {
+ const field_ty = ty.structFieldType(field_i);
+ if ((zig_ty_tag == .Struct and ty.structFieldIsComptime(field_i)) or
+ !field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+ _ = try lookup.typeToIndex(field_ty, switch (kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ });
+ }
+ switch (kind) {
+ .forward, .forward_parameter => {},
+ .complete, .parameter, .global => _ = try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }
+ }
+ self.init(switch (kind) {
+ .forward, .forward_parameter => switch (zig_ty_tag) {
+ .Struct => .fwd_anon_struct,
+ .Union => .fwd_anon_union,
+ else => unreachable,
+ },
+ .complete, .parameter, .global => switch (zig_ty_tag) {
+ .Struct => .anon_struct,
+ .Union => .anon_union,
+ else => unreachable,
+ },
+ .payload => unreachable,
+ });
+ } else {
+ const tag_ty = ty.unionTagTypeSafety();
+ const is_tagged_union_wrapper = kind != .payload and tag_ty != null;
+ const is_struct = zig_ty_tag == .Struct or is_tagged_union_wrapper;
+ switch (kind) {
+ .forward, .forward_parameter => {
+ self.storage = .{ .fwd = .{
+ .base = .{ .tag = if (is_struct) .fwd_struct else .fwd_union },
+ .data = ty.getOwnerDecl(),
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.fwd) };
+ },
+ .complete, .parameter, .global, .payload => if (is_tagged_union_wrapper) {
+ const fwd_idx = try lookup.typeToIndex(ty, .forward);
+ const payload_idx = try lookup.typeToIndex(ty, .payload);
+ const tag_idx = try lookup.typeToIndex(tag_ty.?, kind);
+ if (fwd_idx != null and payload_idx != null and tag_idx != null) {
+ self.storage = .{ .anon = undefined };
+ var field_count: usize = 0;
+ if (payload_idx != Tag.void.toIndex()) {
+ self.storage.anon.fields[field_count] = .{
+ .name = "payload",
+ .type = payload_idx.?,
+ .alignas = AlignAs.unionPayloadAlign(ty, target),
+ };
+ field_count += 1;
+ }
+ if (tag_idx != Tag.void.toIndex()) {
+ self.storage.anon.fields[field_count] = .{
+ .name = "tag",
+ .type = tag_idx.?,
+ .alignas = AlignAs.abiAlign(tag_ty.?, target),
+ };
+ field_count += 1;
+ }
+ self.storage.anon.pl = .{ .complete = .{
+ .base = .{ .tag = .@"struct" },
+ .data = .{
+ .fields = self.sortFields(field_count),
+ .fwd_decl = fwd_idx.?,
+ },
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.anon.pl.complete) };
+ } else self.init(.@"struct");
+ } else if (kind == .payload and ty.unionHasAllZeroBitFieldTypes()) {
+ self.init(.void);
+ } else {
+ var is_packed = false;
+ for (0..switch (zig_ty_tag) {
+ .Struct => ty.structFieldCount(),
+ .Union => ty.unionFields().count(),
+ else => unreachable,
+ }) |field_i| {
+ const field_ty = ty.structFieldType(field_i);
+ if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+
+ const field_align = AlignAs.fieldAlign(ty, field_i, target);
+ if (field_align.@"align" < field_align.abi) {
+ is_packed = true;
+ if (!lookup.isMutable()) break;
+ }
+
+ if (lookup.isMutable()) {
+ _ = try lookup.typeToIndex(field_ty, switch (kind) {
+ .forward, .forward_parameter => unreachable,
+ .complete, .parameter, .payload => .complete,
+ .global => .global,
+ });
+ }
+ }
+ switch (kind) {
+ .forward, .forward_parameter => unreachable,
+ .complete, .parameter, .global => {
+ _ = try lookup.typeToIndex(ty, .forward);
+ self.init(if (is_struct)
+ if (is_packed) .packed_struct else .@"struct"
+ else if (is_packed) .packed_union else .@"union");
+ },
+ .payload => self.init(if (is_packed)
+ .packed_unnamed_union
+ else
+ .unnamed_union),
+ }
+ },
+ }
+ },
+
+ .Array, .Vector => |zig_ty_tag| {
+ switch (kind) {
+ .forward, .complete, .global => {
+ const t: Tag = switch (zig_ty_tag) {
+ .Array => .array,
+ .Vector => .vector,
+ else => unreachable,
+ };
+ if (try lookup.typeToIndex(ty.childType(), kind)) |child_idx| {
+ self.storage = .{ .seq = .{ .base = .{ .tag = t }, .data = .{
+ .len = ty.arrayLenIncludingSentinel(),
+ .elem_type = child_idx,
+ } } };
+ self.value = .{ .cty = initPayload(&self.storage.seq) };
+ } else self.init(t);
+ },
+ .forward_parameter, .parameter => try self.initArrayParameter(ty, kind, lookup),
+ .payload => unreachable,
+ }
+ },
+
+ .Optional => {
+ var buf: Type.Payload.ElemType = undefined;
+ const payload_ty = ty.optionalChild(&buf);
+ if (payload_ty.hasRuntimeBitsIgnoreComptime()) {
+ if (ty.optionalReprIsPayload()) {
+ try self.initType(payload_ty, kind, lookup);
+ } else if (switch (kind) {
+ .forward, .forward_parameter => @as(Index, undefined),
+ .complete, .parameter, .global => try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }) |fwd_idx| {
+ if (try lookup.typeToIndex(payload_ty, switch (kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ })) |payload_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
+ .name = "payload",
+ .type = payload_idx,
+ .alignas = AlignAs.abiAlign(payload_ty, target),
+ };
+ self.storage.anon.fields[1] = .{
+ .name = "is_null",
+ .type = Tag.bool.toIndex(),
+ .alignas = AlignAs.abiAlign(Type.bool, target),
+ };
+ self.initAnon(kind, fwd_idx, 2);
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
+ } else self.init(.anon_struct);
+ } else self.init(.bool);
+ },
+
+ .ErrorUnion => {
+ if (switch (kind) {
+ .forward, .forward_parameter => @as(Index, undefined),
+ .complete, .parameter, .global => try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }) |fwd_idx| {
+ const payload_ty = ty.errorUnionPayload();
+ if (try lookup.typeToIndex(payload_ty, switch (kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ })) |payload_idx| {
+ const error_ty = ty.errorUnionSet();
+ if (payload_idx == Tag.void.toIndex()) {
+ try self.initType(error_ty, kind, lookup);
+ } else if (try lookup.typeToIndex(error_ty, kind)) |error_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
+ .name = "payload",
+ .type = payload_idx,
+ .alignas = AlignAs.abiAlign(payload_ty, target),
+ };
+ self.storage.anon.fields[1] = .{
+ .name = "error",
+ .type = error_idx,
+ .alignas = AlignAs.abiAlign(error_ty, target),
+ };
+ self.initAnon(kind, fwd_idx, 2);
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
+ } else self.init(.anon_struct);
+ },
+
+ .Opaque => switch (ty.tag()) {
+ .anyopaque => self.init(.void),
+ .@"opaque" => {
+ self.storage = .{ .fwd = .{
+ .base = .{ .tag = .fwd_struct },
+ .data = ty.getOwnerDecl(),
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.fwd) };
+ },
+ else => unreachable,
+ },
+
+ .Fn => {
+ const info = ty.fnInfo();
+ if (!info.is_generic) {
+ if (lookup.isMutable()) {
+ const param_kind: Kind = switch (kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
+ };
+ _ = try lookup.typeToIndex(info.return_type, param_kind);
+ for (info.param_types) |param_type| {
+ if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
+ _ = try lookup.typeToIndex(param_type, param_kind);
+ }
+ }
+ self.init(if (info.is_var_args) .varargs_function else .function);
+ } else self.init(.void);
+ },
+ }
+ }
+ };
+
+ pub fn copy(self: CType, arena: Allocator) !CType {
+ return self.copyContext(struct {
+ arena: Allocator,
+ pub fn copyIndex(_: @This(), idx: Index) Index {
+ return idx;
+ }
+ }{ .arena = arena });
+ }
+
+ fn copyFields(ctx: anytype, old_fields: Payload.Fields.Data) !Payload.Fields.Data {
+ const new_fields = try ctx.arena.alloc(Payload.Fields.Field, old_fields.len);
+ for (new_fields, old_fields) |*new_field, old_field| {
+ new_field.name = try ctx.arena.dupeZ(u8, mem.span(old_field.name));
+ new_field.type = ctx.copyIndex(old_field.type);
+ new_field.alignas = old_field.alignas;
+ }
+ return new_fields;
+ }
+
+ fn copyParams(ctx: anytype, old_param_types: []const Index) ![]const Index {
+ const new_param_types = try ctx.arena.alloc(Index, old_param_types.len);
+ for (new_param_types, old_param_types) |*new_param_type, old_param_type|
+ new_param_type.* = ctx.copyIndex(old_param_type);
+ return new_param_types;
+ }
+
+ pub fn copyContext(self: CType, ctx: anytype) !CType {
+ switch (self.tag()) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ .zig_c_longdouble,
+ => return self,
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => {
+ const pl = self.cast(Payload.Child).?;
+ const new_pl = try ctx.arena.create(Payload.Child);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = ctx.copyIndex(pl.data) };
+ return initPayload(new_pl);
+ },
+
+ .array,
+ .vector,
+ => {
+ const pl = self.cast(Payload.Sequence).?;
+ const new_pl = try ctx.arena.create(Payload.Sequence);
+ new_pl.* = .{
+ .base = .{ .tag = pl.base.tag },
+ .data = .{ .len = pl.data.len, .elem_type = ctx.copyIndex(pl.data.elem_type) },
+ };
+ return initPayload(new_pl);
+ },
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => {
+ const pl = self.cast(Payload.Fields).?;
+ const new_pl = try ctx.arena.create(Payload.Fields);
+ new_pl.* = .{
+ .base = .{ .tag = pl.base.tag },
+ .data = try copyFields(ctx, pl.data),
+ };
+ return initPayload(new_pl);
+ },
+
+ .fwd_struct,
+ .fwd_union,
+ => {
+ const pl = self.cast(Payload.FwdDecl).?;
+ const new_pl = try ctx.arena.create(Payload.FwdDecl);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = pl.data };
+ return initPayload(new_pl);
+ },
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => {
+ const pl = self.cast(Payload.Unnamed).?;
+ const new_pl = try ctx.arena.create(Payload.Unnamed);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = .{
+ .fields = try copyFields(ctx, pl.data.fields),
+ .owner_decl = pl.data.owner_decl,
+ .id = pl.data.id,
+ } };
+ return initPayload(new_pl);
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {
+ const pl = self.cast(Payload.Aggregate).?;
+ const new_pl = try ctx.arena.create(Payload.Aggregate);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = .{
+ .fields = try copyFields(ctx, pl.data.fields),
+ .fwd_decl = ctx.copyIndex(pl.data.fwd_decl),
+ } };
+ return initPayload(new_pl);
+ },
+
+ .function,
+ .varargs_function,
+ => {
+ const pl = self.cast(Payload.Function).?;
+ const new_pl = try ctx.arena.create(Payload.Function);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = .{
+ .return_type = ctx.copyIndex(pl.data.return_type),
+ .param_types = try copyParams(ctx, pl.data.param_types),
+ } };
+ return initPayload(new_pl);
+ },
+ }
+ }
+
+ fn createFromType(store: *Store.Promoted, ty: Type, target: Target, kind: Kind) !CType {
+ var convert: Convert = undefined;
+ try convert.initType(ty, kind, .{ .imm = .{ .set = &store.set, .target = target } });
+ return createFromConvert(store, ty, target, kind, &convert);
+ }
+
+ fn createFromConvert(
+ store: *Store.Promoted,
+ ty: Type,
+ target: Target,
+ kind: Kind,
+ convert: Convert,
+ ) !CType {
+ const arena = store.arena.allocator();
+ switch (convert.value) {
+ .cty => |c| return c.copy(arena),
+ .tag => |t| switch (t) {
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {
+ const zig_ty_tag = ty.zigTypeTag();
+ const fields_len = switch (zig_ty_tag) {
+ .Struct => ty.structFieldCount(),
+ .Union => ty.unionFields().count(),
+ else => unreachable,
+ };
+
+ var c_fields_len: usize = 0;
+ for (0..fields_len) |field_i| {
+ const field_ty = ty.structFieldType(field_i);
+ if ((zig_ty_tag == .Struct and ty.structFieldIsComptime(field_i)) or
+ !field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+ c_fields_len += 1;
+ }
+
+ const fields_pl = try arena.alloc(Payload.Fields.Field, c_fields_len);
+ var c_field_i: usize = 0;
+ for (0..fields_len) |field_i| {
+ const field_ty = ty.structFieldType(field_i);
+ if ((zig_ty_tag == .Struct and ty.structFieldIsComptime(field_i)) or
+ !field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+
+ defer c_field_i += 1;
+ fields_pl[c_field_i] = .{
+ .name = try if (ty.isSimpleTuple())
+ std.fmt.allocPrintZ(arena, "f{}", .{field_i})
+ else
+ arena.dupeZ(u8, switch (zig_ty_tag) {
+ .Struct => ty.structFieldName(field_i),
+ .Union => ty.unionFields().keys()[field_i],
+ else => unreachable,
+ }),
+ .type = store.set.typeToIndex(field_ty, target, switch (kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter, .payload => .complete,
+ .global => .global,
+ }).?,
+ .alignas = AlignAs.fieldAlign(ty, field_i, target),
+ };
+ }
+
+ switch (t) {
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => {
+ const anon_pl = try arena.create(Payload.Fields);
+ anon_pl.* = .{ .base = .{ .tag = t }, .data = fields_pl };
+ return initPayload(anon_pl);
+ },
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => {
+ const unnamed_pl = try arena.create(Payload.Unnamed);
+ unnamed_pl.* = .{ .base = .{ .tag = t }, .data = .{
+ .fields = fields_pl,
+ .owner_decl = ty.getOwnerDecl(),
+ .id = if (ty.unionTagTypeSafety()) |_| 0 else unreachable,
+ } };
+ return initPayload(unnamed_pl);
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {
+ const struct_pl = try arena.create(Payload.Aggregate);
+ struct_pl.* = .{ .base = .{ .tag = t }, .data = .{
+ .fields = fields_pl,
+ .fwd_decl = store.set.typeToIndex(ty, target, .forward).?,
+ } };
+ return initPayload(struct_pl);
+ },
+
+ else => unreachable,
+ }
+ },
+
+ .function,
+ .varargs_function,
+ => {
+ const info = ty.fnInfo();
+ assert(!info.is_generic);
+ const param_kind: Kind = switch (kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
+ };
+
+ var c_params_len: usize = 0;
+ for (info.param_types) |param_type| {
+ if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
+ c_params_len += 1;
+ }
+
+ const params_pl = try arena.alloc(Index, c_params_len);
+ var c_param_i: usize = 0;
+ for (info.param_types) |param_type| {
+ if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
+ params_pl[c_param_i] = store.set.typeToIndex(param_type, target, param_kind).?;
+ c_param_i += 1;
+ }
+
+ const fn_pl = try arena.create(Payload.Function);
+ fn_pl.* = .{ .base = .{ .tag = t }, .data = .{
+ .return_type = store.set.typeToIndex(info.return_type, target, param_kind).?,
+ .param_types = params_pl,
+ } };
+ return initPayload(fn_pl);
+ },
+
+ else => unreachable,
+ },
+ }
+ }
+
+ pub const TypeAdapter64 = struct {
+ kind: Kind,
+ lookup: Convert.Lookup,
+ convert: *const Convert,
+
+ fn eqlRecurse(self: @This(), ty: Type, cty: Index, kind: Kind) bool {
+ assert(!self.lookup.isMutable());
+
+ var convert: Convert = undefined;
+ convert.initType(ty, kind, self.lookup) catch unreachable;
+
+ const self_recurse = @This(){ .kind = kind, .lookup = self.lookup, .convert = &convert };
+ return self_recurse.eql(ty, self.lookup.indexToCType(cty).?);
+ }
+
+ pub fn eql(self: @This(), ty: Type, cty: CType) bool {
+ switch (self.convert.value) {
+ .cty => |c| return c.eql(cty),
+ .tag => |t| {
+ if (t != cty.tag()) return false;
+
+ const target = self.lookup.getTarget();
+ switch (t) {
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => {
+ if (!ty.isTupleOrAnonStruct()) return false;
+
+ var name_buf: [
+ std.fmt.count("f{}", .{std.math.maxInt(usize)})
+ ]u8 = undefined;
+ const c_fields = cty.cast(Payload.Fields).?.data;
+
+ const zig_ty_tag = ty.zigTypeTag();
+ var c_field_i: usize = 0;
+ for (0..switch (zig_ty_tag) {
+ .Struct => ty.structFieldCount(),
+ .Union => ty.unionFields().count(),
+ else => unreachable,
+ }) |field_i| {
+ const field_ty = ty.structFieldType(field_i);
+ if ((zig_ty_tag == .Struct and ty.structFieldIsComptime(field_i)) or
+ !field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+
+ defer c_field_i += 1;
+ const c_field = &c_fields[c_field_i];
+
+ if (!self.eqlRecurse(field_ty, c_field.type, switch (self.kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ }) or !mem.eql(
+ u8,
+ if (ty.isSimpleTuple())
+ std.fmt.bufPrint(&name_buf, "f{}", .{field_i}) catch unreachable
+ else switch (zig_ty_tag) {
+ .Struct => ty.structFieldName(field_i),
+ .Union => ty.unionFields().keys()[field_i],
+ else => unreachable,
+ },
+ mem.span(c_field.name),
+ ) or AlignAs.fieldAlign(ty, field_i, target).@"align" !=
+ c_field.alignas.@"align") return false;
+ }
+ return true;
+ },
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => switch (self.kind) {
+ .forward, .forward_parameter, .complete, .parameter, .global => unreachable,
+ .payload => if (ty.unionTagTypeSafety()) |_| {
+ const data = cty.cast(Payload.Unnamed).?.data;
+ return ty.getOwnerDecl() == data.owner_decl and data.id == 0;
+ } else unreachable,
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => return self.eqlRecurse(
+ ty,
+ cty.cast(Payload.Aggregate).?.data.fwd_decl,
+ .forward,
+ ),
+
+ .function,
+ .varargs_function,
+ => {
+ if (ty.zigTypeTag() != .Fn) return false;
+
+ const info = ty.fnInfo();
+ assert(!info.is_generic);
+ const data = cty.cast(Payload.Function).?.data;
+ const param_kind: Kind = switch (self.kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
+ };
+
+ if (!self.eqlRecurse(info.return_type, data.return_type, param_kind))
+ return false;
+
+ var c_param_i: usize = 0;
+ for (info.param_types) |param_type| {
+ if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
+
+ if (c_param_i >= data.param_types.len) return false;
+ const param_cty = data.param_types[c_param_i];
+ c_param_i += 1;
+
+ if (!self.eqlRecurse(param_type, param_cty, param_kind))
+ return false;
+ }
+ return c_param_i == data.param_types.len;
+ },
+
+ else => unreachable,
+ }
+ },
+ }
+ }
+
+ pub fn hash(self: @This(), ty: Type) u64 {
+ var hasher = std.hash.Wyhash.init(0);
+ self.updateHasher(&hasher, ty);
+ return hasher.final();
+ }
+
+ fn updateHasherRecurse(self: @This(), hasher: anytype, ty: Type, kind: Kind) void {
+ assert(!self.lookup.isMutable());
+
+ var convert: Convert = undefined;
+ convert.initType(ty, kind, self.lookup) catch unreachable;
+
+ const self_recurse = @This(){ .kind = kind, .lookup = self.lookup, .convert = &convert };
+ self_recurse.updateHasher(hasher, ty);
+ }
+
+ pub fn updateHasher(self: @This(), hasher: anytype, ty: Type) void {
+ switch (self.convert.value) {
+ .cty => |c| return c.updateHasher(hasher, self.lookup.getSet().?.*),
+ .tag => |t| {
+ autoHash(hasher, t);
+
+ const target = self.lookup.getTarget();
+ switch (t) {
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => {
+ var name_buf: [
+ std.fmt.count("f{}", .{std.math.maxInt(usize)})
+ ]u8 = undefined;
+
+ const zig_ty_tag = ty.zigTypeTag();
+ for (0..switch (ty.zigTypeTag()) {
+ .Struct => ty.structFieldCount(),
+ .Union => ty.unionFields().count(),
+ else => unreachable,
+ }) |field_i| {
+ const field_ty = ty.structFieldType(field_i);
+ if ((zig_ty_tag == .Struct and ty.structFieldIsComptime(field_i)) or
+ !field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+
+ self.updateHasherRecurse(hasher, field_ty, switch (self.kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ });
+ hasher.update(if (ty.isSimpleTuple())
+ std.fmt.bufPrint(&name_buf, "f{}", .{field_i}) catch unreachable
+ else switch (zig_ty_tag) {
+ .Struct => ty.structFieldName(field_i),
+ .Union => ty.unionFields().keys()[field_i],
+ else => unreachable,
+ });
+ autoHash(hasher, AlignAs.fieldAlign(ty, field_i, target).@"align");
+ }
+ },
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => switch (self.kind) {
+ .forward, .forward_parameter, .complete, .parameter, .global => unreachable,
+ .payload => if (ty.unionTagTypeSafety()) |_| {
+ autoHash(hasher, ty.getOwnerDecl());
+ autoHash(hasher, @as(u32, 0));
+ } else unreachable,
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => self.updateHasherRecurse(hasher, ty, .forward),
+
+ .function,
+ .varargs_function,
+ => {
+ const info = ty.fnInfo();
+ assert(!info.is_generic);
+ const param_kind: Kind = switch (self.kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
+ };
+
+ self.updateHasherRecurse(hasher, info.return_type, param_kind);
+ for (info.param_types) |param_type| {
+ if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
+ self.updateHasherRecurse(hasher, param_type, param_kind);
+ }
+ },
+
+ else => unreachable,
+ }
+ },
+ }
+ }
+ };
+
+ pub const TypeAdapter32 = struct {
+ kind: Kind,
+ lookup: Convert.Lookup,
+ convert: *const Convert,
+
+ fn to64(self: @This()) TypeAdapter64 {
+ return .{ .kind = self.kind, .lookup = self.lookup, .convert = self.convert };
+ }
+
+ pub fn eql(self: @This(), ty: Type, cty: CType, cty_index: usize) bool {
+ _ = cty_index;
+ return self.to64().eql(ty, cty);
+ }
+
+ pub fn hash(self: @This(), ty: Type) u32 {
+ return @truncate(u32, self.to64().hash(ty));
+ }
+ };
+};
diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig
index 21c57b5e13..f0d5097f90 100644
--- a/src/codegen/llvm.zig
+++ b/src/codegen/llvm.zig
@@ -6025,8 +6025,8 @@ pub const FuncGen = struct {
const field_ptr = try self.resolveInst(extra.field_ptr);
const target = self.dg.module.getTarget();
- const struct_ty = self.air.getRefType(ty_pl.ty).childType();
- const field_offset = struct_ty.structFieldOffset(extra.field_index, target);
+ const parent_ty = self.air.getRefType(ty_pl.ty).childType();
+ const field_offset = parent_ty.structFieldOffset(extra.field_index, target);
const res_ty = try self.dg.lowerType(self.air.getRefType(ty_pl.ty));
if (field_offset == 0) {