diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2023-05-18 22:02:55 -0700 |
|---|---|---|
| committer | Andrew Kelley <andrew@ziglang.org> | 2023-06-10 20:47:53 -0700 |
| commit | 7bf91fc79ac9e4eae575baf3a2ca9549bc3bf6c2 (patch) | |
| tree | f07c76f10c294cdfa7cc302097278ac4ff720c65 /src | |
| parent | 607737d841bc2279cbe5fee68a0a546b9a5a802e (diff) | |
| download | zig-7bf91fc79ac9e4eae575baf3a2ca9549bc3bf6c2.tar.gz zig-7bf91fc79ac9e4eae575baf3a2ca9549bc3bf6c2.zip | |
compiler: eliminate legacy Type.Tag.pointer
Now pointer types are stored only in InternPool.
Diffstat (limited to 'src')
| -rw-r--r-- | src/InternPool.zig | 55 | ||||
| -rw-r--r-- | src/Module.zig | 36 | ||||
| -rw-r--r-- | src/Sema.zig | 119 | ||||
| -rw-r--r-- | src/arch/aarch64/CodeGen.zig | 3 | ||||
| -rw-r--r-- | src/arch/arm/CodeGen.zig | 3 | ||||
| -rw-r--r-- | src/arch/sparc64/CodeGen.zig | 3 | ||||
| -rw-r--r-- | src/arch/x86_64/CodeGen.zig | 21 | ||||
| -rw-r--r-- | src/codegen.zig | 9 | ||||
| -rw-r--r-- | src/codegen/c.zig | 56 | ||||
| -rw-r--r-- | src/codegen/c/type.zig | 3 | ||||
| -rw-r--r-- | src/codegen/llvm.zig | 119 | ||||
| -rw-r--r-- | src/codegen/spirv.zig | 6 | ||||
| -rw-r--r-- | src/link/Dwarf.zig | 3 | ||||
| -rw-r--r-- | src/type.zig | 520 | ||||
| -rw-r--r-- | src/value.zig | 12 |
15 files changed, 295 insertions, 673 deletions
diff --git a/src/InternPool.zig b/src/InternPool.zig index bf48aeda84..81035bffc5 100644 --- a/src/InternPool.zig +++ b/src/InternPool.zig @@ -186,17 +186,11 @@ pub const Key = union(enum) { pub const PtrType = struct { elem_type: Index, sentinel: Index = .none, - /// If zero use pointee_type.abiAlignment() - /// When creating pointer types, if alignment is equal to pointee type - /// abi alignment, this value should be set to 0 instead. - /// - /// Please don't change this to u32 or u29. If you want to save bits, - /// migrate the rest of the codebase to use the `Alignment` type rather - /// than using byte units. The LLVM backend can only handle `c_uint` - /// byte units; we can emit a semantic analysis error if alignment that - /// overflows that amount is attempted to be used, but it shouldn't - /// affect the other backends. - alignment: u64 = 0, + /// `none` indicates the ABI alignment of the pointee_type. In this + /// case, this field *must* be set to `none`, otherwise the + /// `InternPool` equality and hashing functions will return incorrect + /// results. + alignment: Alignment = .none, /// If this is non-zero it means the pointer points to a sub-byte /// range of data, which is backed by a "host integer" with this /// number of bytes. @@ -378,15 +372,11 @@ pub const Key = union(enum) { /// Tells whether a parameter is noalias. See `paramIsNoalias` helper /// method for accessing this. noalias_bits: u32, - /// If zero use default target function code alignment. - /// - /// Please don't change this to u32 or u29. If you want to save bits, - /// migrate the rest of the codebase to use the `Alignment` type rather - /// than using byte units. The LLVM backend can only handle `c_uint` - /// byte units; we can emit a semantic analysis error if alignment that - /// overflows that amount is attempted to be used, but it shouldn't - /// affect the other backends. - alignment: u64, + /// `none` indicates the function has the default alignment for + /// function code on the target. In this case, this field *must* be set + /// to `none`, otherwise the `InternPool` equality and hashing + /// functions will return incorrect results. + alignment: Alignment, cc: std.builtin.CallingConvention, is_var_args: bool, is_generic: bool, @@ -1500,6 +1490,13 @@ pub const Alignment = enum(u6) { none = std.math.maxInt(u6), _, + pub fn toByteUnitsOptional(a: Alignment) ?u64 { + return switch (a) { + .none => null, + _ => @as(u64, 1) << @enumToInt(a), + }; + } + pub fn toByteUnits(a: Alignment, default: u64) u64 { return switch (a) { .none => default, @@ -1509,8 +1506,14 @@ pub const Alignment = enum(u6) { pub fn fromByteUnits(n: u64) Alignment { if (n == 0) return .none; + assert(std.math.isPowerOfTwo(n)); return @intToEnum(Alignment, @ctz(n)); } + + pub fn fromNonzeroByteUnits(n: u64) Alignment { + assert(n != 0); + return fromByteUnits(n); + } }; /// Used for non-sentineled arrays that have length fitting in u32, as well as @@ -1773,7 +1776,7 @@ pub fn indexToKey(ip: InternPool, index: Index) Key { return .{ .ptr_type = .{ .elem_type = ptr_info.child, .sentinel = ptr_info.sentinel, - .alignment = ptr_info.flags.alignment.toByteUnits(0), + .alignment = ptr_info.flags.alignment, .size = ptr_info.flags.size, .is_const = ptr_info.flags.is_const, .is_volatile = ptr_info.flags.is_volatile, @@ -2013,7 +2016,7 @@ fn indexToKeyFuncType(ip: InternPool, data: u32) Key.FuncType { .return_type = type_function.data.return_type, .comptime_bits = type_function.data.comptime_bits, .noalias_bits = type_function.data.noalias_bits, - .alignment = type_function.data.flags.alignment.toByteUnits(0), + .alignment = type_function.data.flags.alignment, .cc = type_function.data.flags.cc, .is_var_args = type_function.data.flags.is_var_args, .is_generic = type_function.data.flags.is_generic, @@ -2100,16 +2103,18 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index { return @intToEnum(Index, ip.items.len - 1); } + const is_allowzero = ptr_type.is_allowzero or ptr_type.size == .C; + ip.items.appendAssumeCapacity(.{ .tag = .type_pointer, .data = try ip.addExtra(gpa, Pointer{ .child = ptr_type.elem_type, .sentinel = ptr_type.sentinel, .flags = .{ - .alignment = Alignment.fromByteUnits(ptr_type.alignment), + .alignment = ptr_type.alignment, .is_const = ptr_type.is_const, .is_volatile = ptr_type.is_volatile, - .is_allowzero = ptr_type.is_allowzero, + .is_allowzero = is_allowzero, .size = ptr_type.size, .address_space = ptr_type.address_space, .vector_index = ptr_type.vector_index, @@ -2316,7 +2321,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index { .comptime_bits = func_type.comptime_bits, .noalias_bits = func_type.noalias_bits, .flags = .{ - .alignment = Alignment.fromByteUnits(func_type.alignment), + .alignment = func_type.alignment, .cc = func_type.cc, .is_var_args = func_type.is_var_args, .is_generic = func_type.is_generic, diff --git a/src/Module.zig b/src/Module.zig index 0a063a8ddc..5cd0d237b4 100644 --- a/src/Module.zig +++ b/src/Module.zig @@ -6532,8 +6532,7 @@ pub fn populateTestFunctions( try mod.ensureDeclAnalyzed(decl_index); } const decl = mod.declPtr(decl_index); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const tmp_test_fn_ty = decl.ty.slicePtrFieldType(&buf, mod).childType(mod); + const tmp_test_fn_ty = decl.ty.slicePtrFieldType(mod).childType(mod); const array_decl_index = d: { // Add mod.test_functions to an array decl then make the test_functions @@ -6843,28 +6842,31 @@ pub fn ptrType(mod: *Module, info: InternPool.Key.PtrType) Allocator.Error!Type } pub fn singleMutPtrType(mod: *Module, child_type: Type) Allocator.Error!Type { - if (child_type.ip_index == .none) { - // TODO remove this after all types can be represented via the InternPool - return Type.Tag.pointer.create(mod.tmp_hack_arena.allocator(), .{ - .pointee_type = child_type, - .@"addrspace" = .generic, - }); - } return ptrType(mod, .{ .elem_type = child_type.ip_index }); } pub fn singleConstPtrType(mod: *Module, child_type: Type) Allocator.Error!Type { - if (child_type.ip_index == .none) { - // TODO remove this after all types can be represented via the InternPool - return Type.Tag.pointer.create(mod.tmp_hack_arena.allocator(), .{ - .pointee_type = child_type, - .mutable = false, - .@"addrspace" = .generic, - }); - } return ptrType(mod, .{ .elem_type = child_type.ip_index, .is_const = true }); } +pub fn adjustPtrTypeChild(mod: *Module, ptr_ty: Type, new_child: Type) Allocator.Error!Type { + const info = ptr_ty.ptrInfoIp(mod.intern_pool); + return mod.ptrType(.{ + .elem_type = new_child.toIntern(), + + .sentinel = info.sentinel, + .alignment = info.alignment, + .host_size = info.host_size, + .bit_offset = info.bit_offset, + .vector_index = info.vector_index, + .size = info.size, + .is_const = info.is_const, + .is_volatile = info.is_volatile, + .is_allowzero = info.is_allowzero, + .address_space = info.address_space, + }); +} + pub fn funcType(mod: *Module, info: InternPool.Key.FuncType) Allocator.Error!Type { return (try intern(mod, .{ .func_type = info })).toType(); } diff --git a/src/Sema.zig b/src/Sema.zig index 8492fd441f..74efe9d141 100644 --- a/src/Sema.zig +++ b/src/Sema.zig @@ -9163,7 +9163,7 @@ fn funcCommon( .return_type = return_type.toIntern(), .cc = cc_resolved, .cc_is_generic = cc == null, - .alignment = alignment orelse 0, + .alignment = if (alignment) |a| InternPool.Alignment.fromByteUnits(a) else .none, .align_is_generic = alignment == null, .section_is_generic = section == .generic, .addrspace_is_generic = address_space == null, @@ -17740,10 +17740,10 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air extra_i += 1; const coerced = try sema.coerce(block, elem_ty, try sema.resolveInst(ref), sentinel_src); const val = try sema.resolveConstValue(block, sentinel_src, coerced, "pointer sentinel value must be comptime-known"); - break :blk val; - } else null; + break :blk val.toIntern(); + } else .none; - const abi_align: u32 = if (inst_data.flags.has_align) blk: { + const abi_align: InternPool.Alignment = if (inst_data.flags.has_align) blk: { const ref = @intToEnum(Zir.Inst.Ref, sema.code.extra[extra_i]); extra_i += 1; const coerced = try sema.coerce(block, Type.u32, try sema.resolveInst(ref), align_src); @@ -17752,13 +17752,13 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air // which case we can make this 0 without resolving it. if (val.castTag(.lazy_align)) |payload| { if (payload.data.eql(elem_ty, sema.mod)) { - break :blk 0; + break :blk .none; } } const abi_align = @intCast(u32, (try val.getUnsignedIntAdvanced(mod, sema)).?); try sema.validateAlign(block, align_src, abi_align); - break :blk abi_align; - } else 0; + break :blk InternPool.Alignment.fromByteUnits(abi_align); + } else .none; const address_space: std.builtin.AddressSpace = if (inst_data.flags.has_addrspace) blk: { const ref = @intToEnum(Zir.Inst.Ref, sema.code.extra[extra_i]); @@ -17789,7 +17789,7 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air return sema.fail(block, elem_ty_src, "function pointers must be single pointers", .{}); } const fn_align = mod.typeToFunc(elem_ty).?.alignment; - if (inst_data.flags.has_align and abi_align != 0 and fn_align != 0 and + if (inst_data.flags.has_align and abi_align != .none and fn_align != .none and abi_align != fn_align) { return sema.fail(block, align_src, "function pointer alignment disagrees with function alignment", .{}); @@ -17815,16 +17815,16 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air } } - const ty = try Type.ptr(sema.arena, sema.mod, .{ - .pointee_type = elem_ty, + const ty = try mod.ptrType(.{ + .elem_type = elem_ty.toIntern(), .sentinel = sentinel, - .@"align" = abi_align, - .@"addrspace" = address_space, + .alignment = abi_align, + .address_space = address_space, .bit_offset = bit_offset, .host_size = host_size, - .mutable = inst_data.flags.is_mutable, - .@"allowzero" = inst_data.flags.is_allowzero, - .@"volatile" = inst_data.flags.is_volatile, + .is_const = !inst_data.flags.is_mutable, + .is_allowzero = inst_data.flags.is_allowzero, + .is_volatile = inst_data.flags.is_volatile, .size = inst_data.size, }); return sema.addType(ty); @@ -18905,10 +18905,13 @@ fn zirReify( if (!try sema.intFitsInType(alignment_val, Type.u32, null)) { return sema.fail(block, src, "alignment must fit in 'u32'", .{}); } - const abi_align = @intCast(u29, (try alignment_val.getUnsignedIntAdvanced(mod, sema)).?); + + const abi_align = InternPool.Alignment.fromByteUnits( + (try alignment_val.getUnsignedIntAdvanced(mod, sema)).?, + ); const unresolved_elem_ty = child_val.toType(); - const elem_ty = if (abi_align == 0) + const elem_ty = if (abi_align == .none) unresolved_elem_ty else t: { const elem_ty = try sema.resolveTypeFields(unresolved_elem_ty); @@ -18918,18 +18921,21 @@ fn zirReify( const ptr_size = mod.toEnum(std.builtin.Type.Pointer.Size, size_val); - var actual_sentinel: ?Value = null; - if (!sentinel_val.isNull(mod)) { - if (ptr_size == .One or ptr_size == .C) { - return sema.fail(block, src, "sentinels are only allowed on slices and unknown-length pointers", .{}); + const actual_sentinel: InternPool.Index = s: { + if (!sentinel_val.isNull(mod)) { + if (ptr_size == .One or ptr_size == .C) { + return sema.fail(block, src, "sentinels are only allowed on slices and unknown-length pointers", .{}); + } + const sentinel_ptr_val = sentinel_val.castTag(.opt_payload).?.data; + const ptr_ty = try Type.ptr(sema.arena, mod, .{ + .@"addrspace" = .generic, + .pointee_type = try elem_ty.copy(sema.arena), + }); + const sent_val = (try sema.pointerDeref(block, src, sentinel_ptr_val, ptr_ty)).?; + break :s sent_val.toIntern(); } - const sentinel_ptr_val = sentinel_val.castTag(.opt_payload).?.data; - const ptr_ty = try Type.ptr(sema.arena, mod, .{ - .@"addrspace" = .generic, - .pointee_type = try elem_ty.copy(sema.arena), - }); - actual_sentinel = (try sema.pointerDeref(block, src, sentinel_ptr_val, ptr_ty)).?; - } + break :s .none; + }; if (elem_ty.zigTypeTag(mod) == .NoReturn) { return sema.fail(block, src, "pointer to noreturn not allowed", .{}); @@ -18938,7 +18944,7 @@ fn zirReify( return sema.fail(block, src, "function pointers must be single pointers", .{}); } const fn_align = mod.typeToFunc(elem_ty).?.alignment; - if (abi_align != 0 and fn_align != 0 and + if (abi_align != .none and fn_align != .none and abi_align != fn_align) { return sema.fail(block, src, "function pointer alignment disagrees with function alignment", .{}); @@ -18964,14 +18970,14 @@ fn zirReify( } } - const ty = try Type.ptr(sema.arena, mod, .{ + const ty = try mod.ptrType(.{ .size = ptr_size, - .mutable = !is_const_val.toBool(mod), - .@"volatile" = is_volatile_val.toBool(mod), - .@"align" = abi_align, - .@"addrspace" = mod.toEnum(std.builtin.AddressSpace, address_space_val), - .pointee_type = try elem_ty.copy(sema.arena), - .@"allowzero" = is_allowzero_val.toBool(mod), + .is_const = is_const_val.toBool(mod), + .is_volatile = is_volatile_val.toBool(mod), + .alignment = abi_align, + .address_space = mod.toEnum(std.builtin.AddressSpace, address_space_val), + .elem_type = elem_ty.toIntern(), + .is_allowzero = is_allowzero_val.toBool(mod), .sentinel = actual_sentinel, }); return sema.addType(ty); @@ -19470,9 +19476,9 @@ fn zirReify( } const alignment = @intCast(u29, alignment_val.toUnsignedInt(mod)); if (alignment == target_util.defaultFunctionAlignment(target)) { - break :alignment 0; + break :alignment .none; } else { - break :alignment alignment; + break :alignment InternPool.Alignment.fromByteUnits(alignment); } }; const return_type = return_type_val.optionalValue(mod) orelse @@ -24291,8 +24297,7 @@ fn fieldPtr( const attr_ptr_ty = if (is_pointer_to) object_ty else object_ptr_ty; if (mem.eql(u8, field_name, "ptr")) { - const buf = try sema.arena.create(Type.SlicePtrFieldTypeBuffer); - const slice_ptr_ty = inner_ty.slicePtrFieldType(buf, mod); + const slice_ptr_ty = inner_ty.slicePtrFieldType(mod); const result_ty = try Type.ptr(sema.arena, mod, .{ .pointee_type = slice_ptr_ty, @@ -27914,7 +27919,7 @@ fn beginComptimePtrMutation( sema, block, src, - parent.ty.slicePtrFieldType(try sema.arena.create(Type.SlicePtrFieldTypeBuffer), mod), + parent.ty.slicePtrFieldType(mod), &val_ptr.castTag(.slice).?.data.ptr, ptr_elem_ty, parent.decl_ref_mut, @@ -27981,7 +27986,7 @@ fn beginComptimePtrMutation( sema, block, src, - parent.ty.slicePtrFieldType(try sema.arena.create(Type.SlicePtrFieldTypeBuffer), mod), + parent.ty.slicePtrFieldType(mod), &val_ptr.castTag(.slice).?.data.ptr, ptr_elem_ty, parent.decl_ref_mut, @@ -28363,7 +28368,7 @@ fn beginComptimePtrLoad( const slice_val = tv.val.castTag(.slice).?.data; deref.pointee = switch (field_index) { Value.Payload.Slice.ptr_index => TypedValue{ - .ty = field_ptr.container_ty.slicePtrFieldType(try sema.arena.create(Type.SlicePtrFieldTypeBuffer), mod), + .ty = field_ptr.container_ty.slicePtrFieldType(mod), .val = slice_val.ptr, }, Value.Payload.Slice.len_index => TypedValue{ @@ -29454,8 +29459,7 @@ fn analyzeSlicePtr( slice_ty: Type, ) CompileError!Air.Inst.Ref { const mod = sema.mod; - const buf = try sema.arena.create(Type.SlicePtrFieldTypeBuffer); - const result_ty = slice_ty.slicePtrFieldType(buf, mod); + const result_ty = slice_ty.slicePtrFieldType(mod); if (try sema.resolveMaybeUndefVal(slice)) |val| { if (val.isUndef(mod)) return sema.addConstUndef(result_ty); return sema.addConstant(result_ty, val.slicePtr()); @@ -31611,15 +31615,6 @@ pub fn resolveTypeRequiresComptime(sema: *Sema, ty: Type) CompileError!bool { .inferred_alloc_mut => unreachable, .inferred_alloc_const => unreachable, - .pointer => { - const child_ty = ty.childType(mod); - if (child_ty.zigTypeTag(mod) == .Fn) { - return mod.typeToFunc(child_ty).?.is_generic; - } else { - return sema.resolveTypeRequiresComptime(child_ty); - } - }, - .error_union => return sema.resolveTypeRequiresComptime(ty.errorUnionPayload()), }, else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { @@ -33048,7 +33043,6 @@ pub fn typeHasOnePossibleValue(sema: *Sema, ty: Type) CompileError!?Value { .error_set_merged, .error_union, .error_set_inferred, - .pointer, => return null, .inferred_alloc_const => unreachable, @@ -33604,12 +33598,6 @@ fn typePtrOrOptionalPtrTy(sema: *Sema, ty: Type) !?Type { }; switch (ty.tag()) { - .pointer => switch (ty.ptrSize(mod)) { - .Slice => return null, - .C => return ty.optionalChild(mod), - else => return ty, - }, - .inferred_alloc_const => unreachable, .inferred_alloc_mut => unreachable, @@ -33638,15 +33626,6 @@ pub fn typeRequiresComptime(sema: *Sema, ty: Type) CompileError!bool { .inferred_alloc_mut => unreachable, .inferred_alloc_const => unreachable, - .pointer => { - const child_ty = ty.childType(mod); - if (child_ty.zigTypeTag(mod) == .Fn) { - return mod.typeToFunc(child_ty).?.is_generic; - } else { - return sema.typeRequiresComptime(child_ty); - } - }, - .error_union => return sema.typeRequiresComptime(ty.errorUnionPayload()), }, else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { diff --git a/src/arch/aarch64/CodeGen.zig b/src/arch/aarch64/CodeGen.zig index dea5b63129..8b84189e18 100644 --- a/src/arch/aarch64/CodeGen.zig +++ b/src/arch/aarch64/CodeGen.zig @@ -3434,8 +3434,7 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void { const bin_op = self.air.instructions.items(.data)[inst].bin_op; const slice_ty = self.typeOf(bin_op.lhs); const result: MCValue = if (!slice_ty.isVolatilePtr(mod) and self.liveness.isUnused(inst)) .dead else result: { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = slice_ty.slicePtrFieldType(&buf, mod); + const ptr_ty = slice_ty.slicePtrFieldType(mod); const slice_mcv = try self.resolveInst(bin_op.lhs); const base_mcv = slicePtr(slice_mcv); diff --git a/src/arch/arm/CodeGen.zig b/src/arch/arm/CodeGen.zig index e84c4de981..a6a715c75d 100644 --- a/src/arch/arm/CodeGen.zig +++ b/src/arch/arm/CodeGen.zig @@ -2432,8 +2432,7 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void { const bin_op = self.air.instructions.items(.data)[inst].bin_op; const slice_ty = self.typeOf(bin_op.lhs); const result: MCValue = if (!slice_ty.isVolatilePtr(mod) and self.liveness.isUnused(inst)) .dead else result: { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = slice_ty.slicePtrFieldType(&buf, mod); + const ptr_ty = slice_ty.slicePtrFieldType(mod); const slice_mcv = try self.resolveInst(bin_op.lhs); const base_mcv = slicePtr(slice_mcv); diff --git a/src/arch/sparc64/CodeGen.zig b/src/arch/sparc64/CodeGen.zig index 9d58dd9f29..072d3ed098 100644 --- a/src/arch/sparc64/CodeGen.zig +++ b/src/arch/sparc64/CodeGen.zig @@ -2462,8 +2462,7 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void { const elem_ty = slice_ty.childType(mod); const elem_size = elem_ty.abiSize(mod); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf, mod); + const slice_ptr_field_type = slice_ty.slicePtrFieldType(mod); const index_lock: ?RegisterLock = if (index_mcv == .register) self.register_manager.lockRegAssumeUnused(index_mcv.register) diff --git a/src/arch/x86_64/CodeGen.zig b/src/arch/x86_64/CodeGen.zig index 149f872c9a..e83644269f 100644 --- a/src/arch/x86_64/CodeGen.zig +++ b/src/arch/x86_64/CodeGen.zig @@ -4052,8 +4052,7 @@ fn genSliceElemPtr(self: *Self, lhs: Air.Inst.Ref, rhs: Air.Inst.Ref) !MCValue { const elem_ty = slice_ty.childType(mod); const elem_size = elem_ty.abiSize(mod); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf, mod); + const slice_ptr_field_type = slice_ty.slicePtrFieldType(mod); const index_ty = self.typeOf(rhs); const index_mcv = try self.resolveInst(rhs); @@ -4082,8 +4081,7 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void { const bin_op = self.air.instructions.items(.data)[inst].bin_op; const slice_ty = self.typeOf(bin_op.lhs); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf, mod); + const slice_ptr_field_type = slice_ty.slicePtrFieldType(mod); const elem_ptr = try self.genSliceElemPtr(bin_op.lhs, bin_op.rhs); const dst_mcv = try self.allocRegOrMem(inst, false); try self.load(dst_mcv, slice_ptr_field_type, elem_ptr); @@ -4281,11 +4279,7 @@ fn airSetUnionTag(self: *Self, inst: Air.Inst.Index) !void { break :blk MCValue{ .register = reg }; } else ptr; - var ptr_tag_pl: Type.Payload.Pointer = .{ - .data = ptr_union_ty.ptrInfo(mod), - }; - ptr_tag_pl.data.pointee_type = tag_ty; - const ptr_tag_ty = Type.initPayload(&ptr_tag_pl.base); + const ptr_tag_ty = try mod.adjustPtrTypeChild(ptr_union_ty, tag_ty); try self.store(ptr_tag_ty, adjusted_ptr, tag); return self.finishAir(inst, .none, .{ bin_op.lhs, bin_op.rhs, .none }); @@ -8671,9 +8665,8 @@ fn isNull(self: *Self, inst: Air.Inst.Index, opt_ty: Type, opt_mcv: MCValue) !MC const pl_ty = opt_ty.optionalChild(mod); - var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; const some_info: struct { off: i32, ty: Type } = if (opt_ty.optionalReprIsPayload(mod)) - .{ .off = 0, .ty = if (pl_ty.isSlice(mod)) pl_ty.slicePtrFieldType(&ptr_buf, mod) else pl_ty } + .{ .off = 0, .ty = if (pl_ty.isSlice(mod)) pl_ty.slicePtrFieldType(mod) else pl_ty } else .{ .off = @intCast(i32, pl_ty.abiSize(mod)), .ty = Type.bool }; @@ -8763,9 +8756,8 @@ fn isNullPtr(self: *Self, inst: Air.Inst.Index, ptr_ty: Type, ptr_mcv: MCValue) const opt_ty = ptr_ty.childType(mod); const pl_ty = opt_ty.optionalChild(mod); - var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; const some_info: struct { off: i32, ty: Type } = if (opt_ty.optionalReprIsPayload(mod)) - .{ .off = 0, .ty = if (pl_ty.isSlice(mod)) pl_ty.slicePtrFieldType(&ptr_buf, mod) else pl_ty } + .{ .off = 0, .ty = if (pl_ty.isSlice(mod)) pl_ty.slicePtrFieldType(mod) else pl_ty } else .{ .off = @intCast(i32, pl_ty.abiSize(mod)), .ty = Type.bool }; @@ -10803,8 +10795,7 @@ fn airMemset(self: *Self, inst: Air.Inst.Index, safety: bool) !void { // here to elide it. switch (dst_ptr_ty.ptrSize(mod)) { .Slice => { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const slice_ptr_ty = dst_ptr_ty.slicePtrFieldType(&buf, mod); + const slice_ptr_ty = dst_ptr_ty.slicePtrFieldType(mod); // TODO: this only handles slices stored in the stack const ptr = dst_ptr; diff --git a/src/codegen.zig b/src/codegen.zig index 9eb294feac..8e145a3b32 100644 --- a/src/codegen.zig +++ b/src/codegen.zig @@ -347,8 +347,7 @@ pub fn generateSymbol( const slice = typed_value.val.castTag(.slice).?.data; // generate ptr - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const slice_ptr_field_type = typed_value.ty.slicePtrFieldType(&buf, mod); + const slice_ptr_field_type = typed_value.ty.slicePtrFieldType(mod); switch (try generateSymbol(bin_file, src_loc, .{ .ty = slice_ptr_field_type, .val = slice.ptr, @@ -850,10 +849,9 @@ fn lowerParentPtr( reloc_info.offset(@intCast(u32, switch (field_ptr.container_ty.zigTypeTag(mod)) { .Pointer => offset: { assert(field_ptr.container_ty.isSlice(mod)); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; break :offset switch (field_ptr.field_index) { 0 => 0, - 1 => field_ptr.container_ty.slicePtrFieldType(&buf, mod).abiSize(mod), + 1 => field_ptr.container_ty.slicePtrFieldType(mod).abiSize(mod), else => unreachable, }; }, @@ -952,8 +950,7 @@ fn lowerDeclRef( const mod = bin_file.options.module.?; if (typed_value.ty.isSlice(mod)) { // generate ptr - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const slice_ptr_field_type = typed_value.ty.slicePtrFieldType(&buf, mod); + const slice_ptr_field_type = typed_value.ty.slicePtrFieldType(mod); switch (try generateSymbol(bin_file, src_loc, .{ .ty = slice_ptr_field_type, .val = typed_value.val, diff --git a/src/codegen/c.zig b/src/codegen/c.zig index 601382c1fd..c2a108d68e 100644 --- a/src/codegen/c.zig +++ b/src/codegen/c.zig @@ -566,8 +566,7 @@ pub const DeclGen = struct { try writer.writeAll("){ .ptr = "); } - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - try dg.renderValue(writer, ty.slicePtrFieldType(&buf, mod), val.slicePtr(), .Initializer); + try dg.renderValue(writer, ty.slicePtrFieldType(mod), val.slicePtr(), .Initializer); const len_val = try mod.intValue(Type.usize, val.sliceLen(mod)); @@ -631,11 +630,7 @@ pub const DeclGen = struct { // Ensure complete type definition is visible before accessing fields. _ = try dg.typeToIndex(field_ptr.container_ty, .complete); - var container_ptr_pl: Type.Payload.Pointer = .{ - .data = ptr_ty.ptrInfo(mod), - }; - container_ptr_pl.data.pointee_type = field_ptr.container_ty; - const container_ptr_ty = Type.initPayload(&container_ptr_pl.base); + const container_ptr_ty = try mod.adjustPtrTypeChild(ptr_ty, field_ptr.container_ty); switch (fieldLocation( field_ptr.container_ty, @@ -661,11 +656,7 @@ pub const DeclGen = struct { try dg.writeCValue(writer, field); }, .byte_offset => |byte_offset| { - var u8_ptr_pl: Type.Payload.Pointer = .{ - .data = ptr_ty.ptrInfo(mod), - }; - u8_ptr_pl.data.pointee_type = Type.u8; - const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base); + const u8_ptr_ty = try mod.adjustPtrTypeChild(ptr_ty, Type.u8); const byte_offset_val = try mod.intValue(Type.usize, byte_offset); @@ -788,8 +779,7 @@ pub const DeclGen = struct { } try writer.writeAll("{("); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); try dg.renderType(writer, ptr_ty); return writer.print("){x}, {0x}}}", .{try dg.fmtIntLiteral(Type.usize, val, .Other)}); } else { @@ -1068,10 +1058,9 @@ pub const DeclGen = struct { } const slice = val.castTag(.slice).?.data; - var buf: Type.SlicePtrFieldTypeBuffer = undefined; try writer.writeByte('{'); - try dg.renderValue(writer, ty.slicePtrFieldType(&buf, mod), slice.ptr, initializer_type); + try dg.renderValue(writer, ty.slicePtrFieldType(mod), slice.ptr, initializer_type); try writer.writeAll(", "); try dg.renderValue(writer, Type.usize, slice.len, initializer_type); try writer.writeByte('}'); @@ -1536,8 +1525,8 @@ pub const DeclGen = struct { switch (kind) { .forward => {}, - .complete => if (fn_info.alignment > 0) - try w.print(" zig_align_fn({})", .{fn_info.alignment}), + .complete => if (fn_info.alignment.toByteUnitsOptional()) |a| + try w.print(" zig_align_fn({})", .{a}), else => unreachable, } @@ -1561,8 +1550,8 @@ pub const DeclGen = struct { ); switch (kind) { - .forward => if (fn_info.alignment > 0) - try w.print(" zig_align_fn({})", .{fn_info.alignment}), + .forward => if (fn_info.alignment.toByteUnitsOptional()) |a| + try w.print(" zig_align_fn({})", .{a}), .complete => {}, else => unreachable, } @@ -4062,8 +4051,7 @@ fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue { try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs }); const inst_ty = f.typeOfIndex(inst); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = inst_ty.slicePtrFieldType(&buf, mod); + const ptr_ty = inst_ty.slicePtrFieldType(mod); const writer = f.object.writer(); const local = try f.allocLocal(inst, inst_ty); @@ -5047,7 +5035,6 @@ fn airIsNull( const operand_ty = f.typeOf(un_op); const optional_ty = if (is_ptr) operand_ty.childType(mod) else operand_ty; const payload_ty = optional_ty.optionalChild(mod); - var slice_ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; const rhs = if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) TypedValue{ .ty = Type.bool, .val = Value.true } @@ -5058,7 +5045,7 @@ fn airIsNull( TypedValue{ .ty = payload_ty, .val = try mod.intValue(payload_ty, 0) } else if (payload_ty.isSlice(mod) and optional_ty.optionalReprIsPayload(mod)) rhs: { try writer.writeAll(".ptr"); - const slice_ptr_ty = payload_ty.slicePtrFieldType(&slice_ptr_buf, mod); + const slice_ptr_ty = payload_ty.slicePtrFieldType(mod); break :rhs TypedValue{ .ty = slice_ptr_ty, .val = Value.null }; } else rhs: { try writer.writeAll(".is_null"); @@ -5278,11 +5265,7 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue { switch (fieldLocation(container_ty, field_ptr_ty, extra.field_index, mod)) { .begin => try f.writeCValue(writer, field_ptr_val, .Initializer), .field => |field| { - var u8_ptr_pl: Type.Payload.Pointer = .{ - .data = field_ptr_ty.ptrInfo(mod), - }; - u8_ptr_pl.data.pointee_type = Type.u8; - const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base); + const u8_ptr_ty = try mod.adjustPtrTypeChild(field_ptr_ty, Type.u8); try writer.writeAll("(("); try f.renderType(writer, u8_ptr_ty); @@ -5295,11 +5278,7 @@ fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue { try writer.writeAll("))"); }, .byte_offset => |byte_offset| { - var u8_ptr_pl: Type.Payload.Pointer = .{ - .data = field_ptr_ty.ptrInfo(mod), - }; - u8_ptr_pl.data.pointee_type = Type.u8; - const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base); + const u8_ptr_ty = try mod.adjustPtrTypeChild(field_ptr_ty, Type.u8); const byte_offset_val = try mod.intValue(Type.usize, byte_offset); @@ -5347,11 +5326,7 @@ fn fieldPtr( try f.writeCValueDerefMember(writer, container_ptr_val, field); }, .byte_offset => |byte_offset| { - var u8_ptr_pl: Type.Payload.Pointer = .{ - .data = field_ptr_ty.ptrInfo(mod), - }; - u8_ptr_pl.data.pointee_type = Type.u8; - const u8_ptr_ty = Type.initPayload(&u8_ptr_pl.base); + const u8_ptr_ty = try mod.adjustPtrTypeChild(field_ptr_ty, Type.u8); const byte_offset_val = try mod.intValue(Type.usize, byte_offset); @@ -5794,8 +5769,7 @@ fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue { // Unfortunately, C does not support any equivalent to // &(*(void *)p)[0], although LLVM does via GetElementPtr if (operand == .undef) { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - try f.writeCValue(writer, .{ .undef = inst_ty.slicePtrFieldType(&buf, mod) }, .Initializer); + try f.writeCValue(writer, .{ .undef = inst_ty.slicePtrFieldType(mod) }, .Initializer); } else if (array_ty.hasRuntimeBitsIgnoreComptime(mod)) { try writer.writeAll("&("); try f.writeCValueDeref(writer, operand); diff --git a/src/codegen/c/type.zig b/src/codegen/c/type.zig index a2af395a98..70426972af 100644 --- a/src/codegen/c/type.zig +++ b/src/codegen/c/type.zig @@ -1431,8 +1431,7 @@ pub const CType = extern union { .complete, .parameter, .global => try lookup.typeToIndex(ty, .forward), .payload => unreachable, }) |fwd_idx| { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); if (try lookup.typeToIndex(ptr_ty, kind)) |ptr_idx| { self.storage = .{ .anon = undefined }; self.storage.anon.fields[0] = .{ diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index 476f73cbe4..0c503edee4 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -1591,40 +1591,30 @@ pub const Object = struct { }, .Pointer => { // Normalize everything that the debug info does not represent. - const ptr_info = ty.ptrInfo(mod); + const ptr_info = ty.ptrInfoIp(mod.intern_pool); - if (ptr_info.sentinel != null or - ptr_info.@"addrspace" != .generic or + if (ptr_info.sentinel != .none or + ptr_info.address_space != .generic or ptr_info.bit_offset != 0 or ptr_info.host_size != 0 or ptr_info.vector_index != .none or - ptr_info.@"allowzero" or - !ptr_info.mutable or - ptr_info.@"volatile" or + ptr_info.is_allowzero or + ptr_info.is_const or + ptr_info.is_volatile or ptr_info.size == .Many or ptr_info.size == .C or - !ptr_info.pointee_type.hasRuntimeBitsIgnoreComptime(mod)) + !ptr_info.elem_type.toType().hasRuntimeBitsIgnoreComptime(mod)) { - var payload: Type.Payload.Pointer = .{ - .data = .{ - .pointee_type = ptr_info.pointee_type, - .sentinel = null, - .@"align" = ptr_info.@"align", - .@"addrspace" = .generic, - .bit_offset = 0, - .host_size = 0, - .@"allowzero" = false, - .mutable = true, - .@"volatile" = false, - .size = switch (ptr_info.size) { - .Many, .C, .One => .One, - .Slice => .Slice, - }, + const bland_ptr_ty = try mod.ptrType(.{ + .elem_type = if (!ptr_info.elem_type.toType().hasRuntimeBitsIgnoreComptime(mod)) + .anyopaque_type + else + ptr_info.elem_type, + .alignment = ptr_info.alignment, + .size = switch (ptr_info.size) { + .Many, .C, .One => .One, + .Slice => .Slice, }, - }; - if (!ptr_info.pointee_type.hasRuntimeBitsIgnoreComptime(mod)) { - payload.data.pointee_type = Type.anyopaque; - } - const bland_ptr_ty = Type.initPayload(&payload.base); + }); const ptr_di_ty = try o.lowerDebugType(bland_ptr_ty, resolve); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.init(ptr_di_ty, resolve), .{ .mod = o.module }); @@ -1632,8 +1622,7 @@ pub const Object = struct { } if (ty.isSlice(mod)) { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); const len_ty = Type.usize; const name = try ty.nameAlloc(gpa, o.module); @@ -1711,7 +1700,7 @@ pub const Object = struct { return full_di_ty; } - const elem_di_ty = try o.lowerDebugType(ptr_info.pointee_type, .fwd); + const elem_di_ty = try o.lowerDebugType(ptr_info.elem_type.toType(), .fwd); const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); const ptr_di_ty = dib.createPointerType( @@ -2625,8 +2614,8 @@ pub const DeclGen = struct { }, } - if (fn_info.alignment != 0) { - llvm_fn.setAlignment(@intCast(c_uint, fn_info.alignment)); + if (fn_info.alignment.toByteUnitsOptional()) |a| { + llvm_fn.setAlignment(@intCast(c_uint, a)); } // Function attributes that are independent of analysis results of the function body. @@ -2819,8 +2808,7 @@ pub const DeclGen = struct { .Bool => return dg.context.intType(1), .Pointer => { if (t.isSlice(mod)) { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_type = t.slicePtrFieldType(&buf, mod); + const ptr_type = t.slicePtrFieldType(mod); const fields: [2]*llvm.Type = .{ try dg.lowerType(ptr_type), @@ -3176,11 +3164,10 @@ pub const DeclGen = struct { }, .slice => { const param_ty = fn_info.param_types[it.zig_index - 1].toType(); - var buf: Type.SlicePtrFieldTypeBuffer = undefined; const ptr_ty = if (param_ty.zigTypeTag(mod) == .Optional) - param_ty.optionalChild(mod).slicePtrFieldType(&buf, mod) + param_ty.optionalChild(mod).slicePtrFieldType(mod) else - param_ty.slicePtrFieldType(&buf, mod); + param_ty.slicePtrFieldType(mod); const ptr_llvm_ty = try dg.lowerType(ptr_ty); const len_llvm_ty = try dg.lowerType(Type.usize); @@ -3368,10 +3355,9 @@ pub const DeclGen = struct { }, .slice => { const slice = tv.val.castTag(.slice).?.data; - var buf: Type.SlicePtrFieldTypeBuffer = undefined; const fields: [2]*llvm.Value = .{ try dg.lowerValue(.{ - .ty = tv.ty.slicePtrFieldType(&buf, mod), + .ty = tv.ty.slicePtrFieldType(mod), .val = slice.ptr, }), try dg.lowerValue(.{ @@ -4171,8 +4157,7 @@ pub const DeclGen = struct { ) Error!*llvm.Value { const mod = self.module; if (tv.ty.isSlice(mod)) { - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = tv.ty.slicePtrFieldType(&buf, mod); + const ptr_ty = tv.ty.slicePtrFieldType(mod); const fields: [2]*llvm.Value = .{ try self.lowerValue(.{ .ty = ptr_ty, @@ -6043,17 +6028,14 @@ pub const FuncGen = struct { const field_ptr = self.builder.buildStructGEP(struct_llvm_ty, struct_llvm_val, llvm_field.index, ""); const field_ptr_ty = try mod.ptrType(.{ .elem_type = llvm_field.ty.ip_index, - .alignment = llvm_field.alignment, + .alignment = InternPool.Alignment.fromNonzeroByteUnits(llvm_field.alignment), }); if (isByRef(field_ty, mod)) { if (canElideLoad(self, body_tail)) return field_ptr; - const field_alignment = if (llvm_field.alignment != 0) - llvm_field.alignment - else - llvm_field.ty.abiAlignment(mod); - return self.loadByRef(field_ptr, field_ty, field_alignment, false); + assert(llvm_field.alignment != 0); + return self.loadByRef(field_ptr, field_ty, llvm_field.alignment, false); } else { return self.load(field_ptr, field_ptr_ty); } @@ -6151,7 +6133,7 @@ pub const FuncGen = struct { const fn_ty = try mod.funcType(.{ .param_types = &.{}, .return_type = .void_type, - .alignment = 0, + .alignment = .none, .noalias_bits = 0, .comptime_bits = 0, .cc = .Unspecified, @@ -6655,8 +6637,7 @@ pub const FuncGen = struct { operand; if (payload_ty.isSlice(mod)) { const slice_ptr = self.builder.buildExtractValue(loaded, 0, ""); - var slice_buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = try self.dg.lowerType(payload_ty.slicePtrFieldType(&slice_buf, mod)); + const ptr_ty = try self.dg.lowerType(payload_ty.slicePtrFieldType(mod)); return self.builder.buildICmp(pred, slice_ptr, ptr_ty.constNull(), ""); } return self.builder.buildICmp(pred, loaded, optional_llvm_ty.constNull(), ""); @@ -6923,7 +6904,7 @@ pub const FuncGen = struct { const field_ptr = self.builder.buildStructGEP(struct_llvm_ty, self.err_ret_trace.?, llvm_field.index, ""); const field_ptr_ty = try mod.ptrType(.{ .elem_type = llvm_field.ty.ip_index, - .alignment = llvm_field.alignment, + .alignment = InternPool.Alignment.fromNonzeroByteUnits(llvm_field.alignment), }); return self.load(field_ptr, field_ptr_ty); } @@ -9319,14 +9300,12 @@ pub const FuncGen = struct { const llvm_i = llvmField(result_ty, i, mod).?.index; indices[1] = llvm_u32.constInt(llvm_i, .False); const field_ptr = self.builder.buildInBoundsGEP(llvm_result_ty, alloca_inst, &indices, indices.len, ""); - var field_ptr_payload: Type.Payload.Pointer = .{ - .data = .{ - .pointee_type = self.typeOf(elem), - .@"align" = result_ty.structFieldAlign(i, mod), - .@"addrspace" = .generic, - }, - }; - const field_ptr_ty = Type.initPayload(&field_ptr_payload.base); + const field_ptr_ty = try mod.ptrType(.{ + .elem_type = self.typeOf(elem).toIntern(), + .alignment = InternPool.Alignment.fromNonzeroByteUnits( + result_ty.structFieldAlign(i, mod), + ), + }); try self.store(field_ptr, field_ptr_ty, llvm_elem, .NotAtomic); } @@ -9350,13 +9329,9 @@ pub const FuncGen = struct { const alloca_inst = self.buildAlloca(llvm_result_ty, result_ty.abiAlignment(mod)); const array_info = result_ty.arrayInfo(mod); - var elem_ptr_payload: Type.Payload.Pointer = .{ - .data = .{ - .pointee_type = array_info.elem_type, - .@"addrspace" = .generic, - }, - }; - const elem_ptr_ty = Type.initPayload(&elem_ptr_payload.base); + const elem_ptr_ty = try mod.ptrType(.{ + .elem_type = array_info.elem_type.toIntern(), + }); for (elements, 0..) |elem, i| { const indices: [2]*llvm.Value = .{ @@ -9476,14 +9451,10 @@ pub const FuncGen = struct { // tag and the payload. const index_type = self.context.intType(32); - var field_ptr_payload: Type.Payload.Pointer = .{ - .data = .{ - .pointee_type = field.ty, - .@"align" = field_align, - .@"addrspace" = .generic, - }, - }; - const field_ptr_ty = Type.initPayload(&field_ptr_payload.base); + const field_ptr_ty = try mod.ptrType(.{ + .elem_type = field.ty.toIntern(), + .alignment = InternPool.Alignment.fromNonzeroByteUnits(field_align), + }); if (layout.tag_size == 0) { const indices: [3]*llvm.Value = .{ index_type.constNull(), diff --git a/src/codegen/spirv.zig b/src/codegen/spirv.zig index 777bb1cff9..eada74e6d4 100644 --- a/src/codegen/spirv.zig +++ b/src/codegen/spirv.zig @@ -669,8 +669,7 @@ pub const DeclGen = struct { .slice => { const slice = val.castTag(.slice).?.data; - var buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); try self.lower(ptr_ty, slice.ptr); try self.addInt(Type.usize, slice.len); @@ -2991,9 +2990,8 @@ pub const DeclGen = struct { if (optional_ty.optionalReprIsPayload(mod)) { // Pointer payload represents nullability: pointer or slice. - var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; const ptr_ty = if (payload_ty.isSlice(mod)) - payload_ty.slicePtrFieldType(&ptr_buf, mod) + payload_ty.slicePtrFieldType(mod) else payload_ty; diff --git a/src/link/Dwarf.zig b/src/link/Dwarf.zig index 92ea2a15dc..f4f19f30d0 100644 --- a/src/link/Dwarf.zig +++ b/src/link/Dwarf.zig @@ -277,8 +277,7 @@ pub const DeclState = struct { // DW.AT.type, DW.FORM.ref4 var index = dbg_info_buffer.items.len; try dbg_info_buffer.resize(index + 4); - var buf = try arena.create(Type.SlicePtrFieldTypeBuffer); - const ptr_ty = ty.slicePtrFieldType(buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); try self.addTypeRelocGlobal(atom_index, ptr_ty, @intCast(u32, index)); // DW.AT.data_member_location, DW.FORM.udata try dbg_info_buffer.ensureUnusedCapacity(6); diff --git a/src/type.zig b/src/type.zig index 735d532c46..ebe3d52b05 100644 --- a/src/type.zig +++ b/src/type.zig @@ -42,7 +42,6 @@ pub const Type = struct { .error_set_merged, => return .ErrorSet, - .pointer, .inferred_alloc_const, .inferred_alloc_mut, => return .Pointer, @@ -250,17 +249,9 @@ pub const Type = struct { return elem_ty; } + /// Asserts the type is a pointer. pub fn ptrIsMutable(ty: Type, mod: *const Module) bool { - return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.mutable, - else => unreachable, - }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .ptr_type => |ptr_type| !ptr_type.is_const, - else => unreachable, - }, - }; + return !mod.intern_pool.indexToKey(ty.ip_index).ptr_type.is_const; } pub const ArrayInfo = struct { @@ -277,24 +268,21 @@ pub const Type = struct { }; } - pub fn ptrInfo(ty: Type, mod: *const Module) Payload.Pointer.Data { - return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data, - - else => unreachable, - }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .ptr_type => |p| Payload.Pointer.Data.fromKey(p), - .opt_type => |child| switch (mod.intern_pool.indexToKey(child)) { - .ptr_type => |p| Payload.Pointer.Data.fromKey(p), - else => unreachable, - }, + pub fn ptrInfoIp(ty: Type, ip: InternPool) InternPool.Key.PtrType { + return switch (ip.indexToKey(ty.ip_index)) { + .ptr_type => |p| p, + .opt_type => |child| switch (ip.indexToKey(child)) { + .ptr_type => |p| p, else => unreachable, }, + else => unreachable, }; } + pub fn ptrInfo(ty: Type, mod: *const Module) Payload.Pointer.Data { + return Payload.Pointer.Data.fromKey(ptrInfoIp(ty, mod.intern_pool)); + } + pub fn eql(a: Type, b: Type, mod: *Module) bool { if (a.ip_index != .none or b.ip_index != .none) { // The InternPool data structure hashes based on Key to make interned objects @@ -335,7 +323,6 @@ pub const Type = struct { return true; }, - .pointer, .inferred_alloc_const, .inferred_alloc_mut, => { @@ -434,7 +421,6 @@ pub const Type = struct { std.hash.autoHash(hasher, ies); }, - .pointer, .inferred_alloc_const, .inferred_alloc_mut, => { @@ -512,26 +498,6 @@ pub const Type = struct { .inferred_alloc_mut, => unreachable, - .pointer => { - const payload = self.castTag(.pointer).?.data; - const sent: ?Value = if (payload.sentinel) |some| - try some.copy(allocator) - else - null; - return Tag.pointer.create(allocator, .{ - .pointee_type = try payload.pointee_type.copy(allocator), - .sentinel = sent, - .@"align" = payload.@"align", - .@"addrspace" = payload.@"addrspace", - .bit_offset = payload.bit_offset, - .host_size = payload.host_size, - .vector_index = payload.vector_index, - .@"allowzero" = payload.@"allowzero", - .mutable = payload.mutable, - .@"volatile" = payload.@"volatile", - .size = payload.size, - }); - }, .error_union => { const payload = self.castTag(.error_union).?.data; return Tag.error_union.create(allocator, .{ @@ -623,41 +589,6 @@ pub const Type = struct { while (true) { const t = ty.tag(); switch (t) { - .pointer => { - const payload = ty.castTag(.pointer).?.data; - if (payload.sentinel) |some| switch (payload.size) { - .One, .C => unreachable, - .Many => try writer.print("[*:{}]", .{some.fmtDebug()}), - .Slice => try writer.print("[:{}]", .{some.fmtDebug()}), - } else switch (payload.size) { - .One => try writer.writeAll("*"), - .Many => try writer.writeAll("[*]"), - .C => try writer.writeAll("[*c]"), - .Slice => try writer.writeAll("[]"), - } - if (payload.@"align" != 0 or payload.host_size != 0 or payload.vector_index != .none) { - try writer.print("align({d}", .{payload.@"align"}); - - if (payload.bit_offset != 0 or payload.host_size != 0) { - try writer.print(":{d}:{d}", .{ payload.bit_offset, payload.host_size }); - } - if (payload.vector_index == .runtime) { - try writer.writeAll(":?"); - } else if (payload.vector_index != .none) { - try writer.print(":{d}", .{@enumToInt(payload.vector_index)}); - } - try writer.writeAll(") "); - } - if (payload.@"addrspace" != .generic) { - try writer.print("addrspace(.{s}) ", .{@tagName(payload.@"addrspace")}); - } - if (!payload.mutable) try writer.writeAll("const "); - if (payload.@"volatile") try writer.writeAll("volatile "); - if (payload.@"allowzero" and payload.size != .C) try writer.writeAll("allowzero "); - - ty = payload.pointee_type; - continue; - }, .error_union => { const payload = ty.castTag(.error_union).?.data; try payload.error_set.dump("", .{}, writer); @@ -734,47 +665,6 @@ pub const Type = struct { try print(error_union.payload, writer, mod); }, - .pointer => { - const info = ty.ptrInfo(mod); - - if (info.sentinel) |s| switch (info.size) { - .One, .C => unreachable, - .Many => try writer.print("[*:{}]", .{s.fmtValue(info.pointee_type, mod)}), - .Slice => try writer.print("[:{}]", .{s.fmtValue(info.pointee_type, mod)}), - } else switch (info.size) { - .One => try writer.writeAll("*"), - .Many => try writer.writeAll("[*]"), - .C => try writer.writeAll("[*c]"), - .Slice => try writer.writeAll("[]"), - } - if (info.@"align" != 0 or info.host_size != 0 or info.vector_index != .none) { - if (info.@"align" != 0) { - try writer.print("align({d}", .{info.@"align"}); - } else { - const alignment = info.pointee_type.abiAlignment(mod); - try writer.print("align({d}", .{alignment}); - } - - if (info.bit_offset != 0 or info.host_size != 0) { - try writer.print(":{d}:{d}", .{ info.bit_offset, info.host_size }); - } - if (info.vector_index == .runtime) { - try writer.writeAll(":?"); - } else if (info.vector_index != .none) { - try writer.print(":{d}", .{@enumToInt(info.vector_index)}); - } - try writer.writeAll(") "); - } - if (info.@"addrspace" != .generic) { - try writer.print("addrspace(.{s}) ", .{@tagName(info.@"addrspace")}); - } - if (!info.mutable) try writer.writeAll("const "); - if (info.@"volatile") try writer.writeAll("volatile "); - if (info.@"allowzero" and info.size != .C) try writer.writeAll("allowzero "); - - try print(info.pointee_type, writer, mod); - }, - .error_set => { const names = ty.castTag(.error_set).?.data.names.keys(); try writer.writeAll("error{"); @@ -951,8 +841,8 @@ pub const Type = struct { try writer.writeAll("..."); } try writer.writeAll(") "); - if (fn_info.alignment != 0) { - try writer.print("align({d}) ", .{fn_info.alignment}); + if (fn_info.alignment.toByteUnitsOptional()) |a| { + try writer.print("align({d}) ", .{a}); } if (fn_info.cc != .Unspecified) { try writer.writeAll("callconv(."); @@ -1032,20 +922,6 @@ pub const Type = struct { .error_set_merged, => return true, - // Pointers to zero-bit types still have a runtime address; however, pointers - // to comptime-only types do not, with the exception of function pointers. - .pointer => { - if (ignore_comptime_only) { - return true; - } else if (ty.childType(mod).zigTypeTag(mod) == .Fn) { - return !mod.typeToFunc(ty.childType(mod)).?.is_generic; - } else if (strat == .sema) { - return !(try strat.sema.typeRequiresComptime(ty)); - } else { - return !comptimeOnly(ty, mod); - } - }, - .inferred_alloc_const => unreachable, .inferred_alloc_mut => unreachable, }, @@ -1231,8 +1107,6 @@ pub const Type = struct { .empty_struct_type => false, .none => switch (ty.tag()) { - .pointer => true, - .error_set, .error_set_single, .error_set_inferred, @@ -1410,51 +1284,27 @@ pub const Type = struct { } pub fn ptrAlignmentAdvanced(ty: Type, mod: *Module, opt_sema: ?*Sema) !u32 { - switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => { - const ptr_info = ty.castTag(.pointer).?.data; - if (ptr_info.@"align" != 0) { - return ptr_info.@"align"; - } else if (opt_sema) |sema| { - const res = try ptr_info.pointee_type.abiAlignmentAdvanced(mod, .{ .sema = sema }); - return res.scalar; - } else { - return (ptr_info.pointee_type.abiAlignmentAdvanced(mod, .eager) catch unreachable).scalar; - } - }, - - else => unreachable, - }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .ptr_type => |ptr_type| { - if (ptr_type.alignment != 0) { - return @intCast(u32, ptr_type.alignment); - } else if (opt_sema) |sema| { - const res = try ptr_type.elem_type.toType().abiAlignmentAdvanced(mod, .{ .sema = sema }); - return res.scalar; - } else { - return (ptr_type.elem_type.toType().abiAlignmentAdvanced(mod, .eager) catch unreachable).scalar; - } - }, - .opt_type => |child| return child.toType().ptrAlignmentAdvanced(mod, opt_sema), - else => unreachable, + return switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .ptr_type => |ptr_type| { + if (ptr_type.alignment.toByteUnitsOptional()) |a| { + return @intCast(u32, a); + } else if (opt_sema) |sema| { + const res = try ptr_type.elem_type.toType().abiAlignmentAdvanced(mod, .{ .sema = sema }); + return res.scalar; + } else { + return (ptr_type.elem_type.toType().abiAlignmentAdvanced(mod, .eager) catch unreachable).scalar; + } }, - } + .opt_type => |child| child.toType().ptrAlignmentAdvanced(mod, opt_sema), + else => unreachable, + }; } pub fn ptrAddressSpace(ty: Type, mod: *const Module) std.builtin.AddressSpace { - return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.@"addrspace", - - else => unreachable, - }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .ptr_type => |ptr_type| ptr_type.address_space, - .opt_type => |child| mod.intern_pool.indexToKey(child).ptr_type.address_space, - else => unreachable, - }, + return switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .ptr_type => |ptr_type| ptr_type.address_space, + .opt_type => |child| mod.intern_pool.indexToKey(child).ptr_type.address_space, + else => unreachable, }; } @@ -1504,7 +1354,6 @@ pub const Type = struct { switch (ty.ip_index) { .empty_struct_type => return AbiAlignmentAdvanced{ .scalar = 0 }, .none => switch (ty.tag()) { - .pointer => return AbiAlignmentAdvanced{ .scalar = @divExact(target.ptrBitWidth(), 8) }, // TODO revisit this when we have the concept of the error tag type .error_set_inferred, @@ -1541,10 +1390,11 @@ pub const Type = struct { .opt_type => return abiAlignmentAdvancedOptional(ty, mod, strat), .error_union_type => return abiAlignmentAdvancedErrorUnion(ty, mod, strat), // represents machine code; not a pointer - .func_type => |func_type| { - const alignment = @intCast(u32, func_type.alignment); - if (alignment != 0) return AbiAlignmentAdvanced{ .scalar = alignment }; - return AbiAlignmentAdvanced{ .scalar = target_util.defaultFunctionAlignment(target) }; + .func_type => |func_type| return AbiAlignmentAdvanced{ + .scalar = if (func_type.alignment.toByteUnitsOptional()) |a| + @intCast(u32, a) + else + target_util.defaultFunctionAlignment(target), }, .simple_type => |t| switch (t) { @@ -1882,11 +1732,6 @@ pub const Type = struct { .inferred_alloc_const => unreachable, .inferred_alloc_mut => unreachable, - .pointer => switch (ty.castTag(.pointer).?.data.size) { - .Slice => return AbiSizeAdvanced{ .scalar = @divExact(target.ptrBitWidth(), 8) * 2 }, - else => return AbiSizeAdvanced{ .scalar = @divExact(target.ptrBitWidth(), 8) }, - }, - // TODO revisit this when we have the concept of the error tag type .error_set_inferred, .error_set, @@ -2201,11 +2046,6 @@ pub const Type = struct { .inferred_alloc_const => unreachable, .inferred_alloc_mut => unreachable, - .pointer => switch (ty.castTag(.pointer).?.data.size) { - .Slice => return target.ptrBitWidth() * 2, - else => return target.ptrBitWidth(), - }, - .error_set, .error_set_single, .error_set_inferred, @@ -2384,8 +2224,6 @@ pub const Type = struct { .inferred_alloc_mut, => true, - .pointer => ty.castTag(.pointer).?.data.size == .One, - else => false, }, else => return switch (mod.intern_pool.indexToKey(ty.ip_index)) { @@ -2408,8 +2246,6 @@ pub const Type = struct { .inferred_alloc_mut, => .One, - .pointer => ty.castTag(.pointer).?.data.size, - else => null, }, else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { @@ -2421,10 +2257,7 @@ pub const Type = struct { pub fn isSlice(ty: Type, mod: *const Module) bool { return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.size == .Slice, - else => false, - }, + .none => false, else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { .ptr_type => |ptr_type| ptr_type.size == .Slice, else => false, @@ -2432,50 +2265,14 @@ pub const Type = struct { }; } - pub const SlicePtrFieldTypeBuffer = union { - pointer: Payload.Pointer, - }; - - pub fn slicePtrFieldType(ty: Type, buffer: *SlicePtrFieldTypeBuffer, mod: *const Module) Type { - switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => { - const payload = ty.castTag(.pointer).?.data; - assert(payload.size == .Slice); - - buffer.* = .{ - .pointer = .{ - .data = .{ - .pointee_type = payload.pointee_type, - .sentinel = payload.sentinel, - .@"align" = payload.@"align", - .@"addrspace" = payload.@"addrspace", - .bit_offset = payload.bit_offset, - .host_size = payload.host_size, - .vector_index = payload.vector_index, - .@"allowzero" = payload.@"allowzero", - .mutable = payload.mutable, - .@"volatile" = payload.@"volatile", - .size = .Many, - }, - }, - }; - return Type.initPayload(&buffer.pointer.base); - }, - - else => unreachable, - }, - else => return mod.intern_pool.slicePtrType(ty.ip_index).toType(), - } + pub fn slicePtrFieldType(ty: Type, mod: *const Module) Type { + return mod.intern_pool.slicePtrType(ty.ip_index).toType(); } pub fn isConstPtr(ty: Type, mod: *const Module) bool { return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => !ty.castTag(.pointer).?.data.mutable, - else => false, - }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .none => false, + else => return switch (mod.intern_pool.indexToKey(ty.ip_index)) { .ptr_type => |ptr_type| ptr_type.is_const, else => false, }, @@ -2488,10 +2285,7 @@ pub const Type = struct { pub fn isVolatilePtrIp(ty: Type, ip: InternPool) bool { return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.@"volatile", - else => false, - }, + .none => false, else => switch (ip.indexToKey(ty.ip_index)) { .ptr_type => |ptr_type| ptr_type.is_volatile, else => false, @@ -2501,12 +2295,10 @@ pub const Type = struct { pub fn isAllowzeroPtr(ty: Type, mod: *const Module) bool { return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.@"allowzero", - else => ty.zigTypeTag(mod) == .Optional, - }, + .none => false, else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { .ptr_type => |ptr_type| ptr_type.is_allowzero, + .opt_type => true, else => false, }, }; @@ -2514,10 +2306,7 @@ pub const Type = struct { pub fn isCPtr(ty: Type, mod: *const Module) bool { return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.size == .C, - else => false, - }, + .none => false, else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { .ptr_type => |ptr_type| ptr_type.size == .C, else => false, @@ -2526,16 +2315,9 @@ pub const Type = struct { } pub fn isPtrAtRuntime(ty: Type, mod: *const Module) bool { - switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => switch (ty.castTag(.pointer).?.data.size) { - .Slice => return false, - .One, .Many, .C => return true, - }, - - else => return false, - }, - else => return switch (mod.intern_pool.indexToKey(ty.ip_index)) { + return switch (ty.ip_index) { + .none => false, + else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { .ptr_type => |ptr_type| switch (ptr_type.size) { .Slice => false, .One, .Many, .C => true, @@ -2549,7 +2331,7 @@ pub const Type = struct { }, else => false, }, - } + }; } /// For pointer-like optionals, returns true, otherwise returns the allowzero property @@ -2563,47 +2345,43 @@ pub const Type = struct { /// See also `isPtrLikeOptional`. pub fn optionalReprIsPayload(ty: Type, mod: *const Module) bool { - if (ty.ip_index != .none) return switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .opt_type => |child| switch (child.toType().zigTypeTag(mod)) { - .Pointer => { - const info = child.toType().ptrInfo(mod); - switch (info.size) { - .C => return false, - else => return !info.@"allowzero", - } + return switch (ty.ip_index) { + .none => false, + else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .opt_type => |child| switch (child.toType().zigTypeTag(mod)) { + .Pointer => { + const info = child.toType().ptrInfo(mod); + return switch (info.size) { + .C => false, + else => !info.@"allowzero", + }; + }, + .ErrorSet => true, + else => false, }, - .ErrorSet => true, else => false, }, - else => false, }; - switch (ty.tag()) { - .pointer => return ty.castTag(.pointer).?.data.size == .C, - - else => return false, - } } /// Returns true if the type is optional and would be lowered to a single pointer /// address value, using 0 for null. Note that this returns true for C pointers. /// This function must be kept in sync with `Sema.typePtrOrOptionalPtrTy`. pub fn isPtrLikeOptional(ty: Type, mod: *const Module) bool { - if (ty.ip_index != .none) return switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .ptr_type => |ptr_type| ptr_type.size == .C, - .opt_type => |child| switch (mod.intern_pool.indexToKey(child)) { - .ptr_type => |ptr_type| switch (ptr_type.size) { - .Slice, .C => false, - .Many, .One => !ptr_type.is_allowzero, + return switch (ty.ip_index) { + .none => false, + else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .ptr_type => |ptr_type| ptr_type.size == .C, + .opt_type => |child| switch (mod.intern_pool.indexToKey(child)) { + .ptr_type => |ptr_type| switch (ptr_type.size) { + .Slice, .C => false, + .Many, .One => !ptr_type.is_allowzero, + }, + else => false, }, else => false, }, - else => false, }; - switch (ty.tag()) { - .pointer => return ty.castTag(.pointer).?.data.size == .C, - - else => return false, - } } /// For *[N]T, returns [N]T. @@ -2614,14 +2392,7 @@ pub const Type = struct { } pub fn childTypeIp(ty: Type, ip: InternPool) Type { - return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.pointee_type, - - else => unreachable, - }, - else => ip.childType(ty.ip_index).toType(), - }; + return ip.childType(ty.ip_index).toType(); } /// For *[N]T, returns T. @@ -2634,34 +2405,19 @@ pub const Type = struct { /// For []T, returns T. /// For anyframe->T, returns T. pub fn elemType2(ty: Type, mod: *const Module) Type { - return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => { - const info = ty.castTag(.pointer).?.data; - const child_ty = info.pointee_type; - if (info.size == .One) { - return child_ty.shallowElemType(mod); - } else { - return child_ty; - } - }, - - else => unreachable, + return switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .ptr_type => |ptr_type| switch (ptr_type.size) { + .One => ptr_type.elem_type.toType().shallowElemType(mod), + .Many, .C, .Slice => ptr_type.elem_type.toType(), }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .ptr_type => |ptr_type| switch (ptr_type.size) { - .One => ptr_type.elem_type.toType().shallowElemType(mod), - .Many, .C, .Slice => ptr_type.elem_type.toType(), - }, - .anyframe_type => |child| { - assert(child != .none); - return child.toType(); - }, - .vector_type => |vector_type| vector_type.child.toType(), - .array_type => |array_type| array_type.child.toType(), - .opt_type => |child| mod.intern_pool.childType(child).toType(), - else => unreachable, + .anyframe_type => |child| { + assert(child != .none); + return child.toType(); }, + .vector_type => |vector_type| vector_type.child.toType(), + .array_type => |array_type| array_type.child.toType(), + .opt_type => |child| mod.intern_pool.childType(child).toType(), + else => unreachable, }; } @@ -2683,21 +2439,13 @@ pub const Type = struct { /// Asserts that the type is an optional. /// Note that for C pointers this returns the type unmodified. pub fn optionalChild(ty: Type, mod: *const Module) Type { - return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer, // here we assume it is a C pointer - => return ty, - - else => unreachable, - }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .opt_type => |child| child.toType(), - .ptr_type => |ptr_type| b: { - assert(ptr_type.size == .C); - break :b ty; - }, - else => unreachable, + return switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .opt_type => |child| child.toType(), + .ptr_type => |ptr_type| b: { + assert(ptr_type.size == .C); + break :b ty; }, + else => unreachable, }; } @@ -2921,23 +2669,16 @@ pub const Type = struct { /// Asserts the type is an array, pointer or vector. pub fn sentinel(ty: Type, mod: *const Module) ?Value { - return switch (ty.ip_index) { - .none => switch (ty.tag()) { - .pointer => ty.castTag(.pointer).?.data.sentinel, - - else => unreachable, - }, - else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { - .vector_type, - .struct_type, - .anon_struct_type, - => null, + return switch (mod.intern_pool.indexToKey(ty.ip_index)) { + .vector_type, + .struct_type, + .anon_struct_type, + => null, - .array_type => |t| if (t.sentinel != .none) t.sentinel.toValue() else null, - .ptr_type => |t| if (t.sentinel != .none) t.sentinel.toValue() else null, + .array_type => |t| if (t.sentinel != .none) t.sentinel.toValue() else null, + .ptr_type => |t| if (t.sentinel != .none) t.sentinel.toValue() else null, - else => unreachable, - }, + else => unreachable, }; } @@ -3196,7 +2937,6 @@ pub const Type = struct { .error_set, .error_set_merged, .error_set_inferred, - .pointer, => return null, .inferred_alloc_const => unreachable, @@ -3400,15 +3140,6 @@ pub const Type = struct { .inferred_alloc_mut => unreachable, .inferred_alloc_const => unreachable, - .pointer => { - const child_ty = ty.childType(mod); - if (child_ty.zigTypeTag(mod) == .Fn) { - return false; - } else { - return child_ty.comptimeOnly(mod); - } - }, - .error_union => return ty.errorUnionPayload().comptimeOnly(mod), }, else => switch (mod.intern_pool.indexToKey(ty.ip_index)) { @@ -4096,7 +3827,6 @@ pub const Type = struct { inferred_alloc_const, // See last_no_payload_tag below. // After this, the tag requires a payload. - pointer, error_union, error_set, error_set_single, @@ -4117,7 +3847,6 @@ pub const Type = struct { .error_set_inferred => Payload.ErrorSetInferred, .error_set_merged => Payload.ErrorSetMerged, - .pointer => Payload.Pointer, .error_union => Payload.ErrorUnion, .error_set_single => Payload.Name, }; @@ -4230,10 +3959,8 @@ pub const Type = struct { data: *Module.Fn.InferredErrorSet, }; + /// TODO: remove this data structure since we have `InternPool.Key.PtrType`. pub const Pointer = struct { - pub const base_tag = Tag.pointer; - - base: Payload = Payload{ .tag = base_tag }, data: Data, pub const Data = struct { @@ -4270,7 +3997,7 @@ pub const Type = struct { return .{ .pointee_type = p.elem_type.toType(), .sentinel = if (p.sentinel != .none) p.sentinel.toValue() else null, - .@"align" = @intCast(u32, p.alignment), + .@"align" = @intCast(u32, p.alignment.toByteUnits(0)), .@"addrspace" = p.address_space, .bit_offset = p.bit_offset, .host_size = p.host_size, @@ -4368,11 +4095,11 @@ pub const Type = struct { pub const err_int = Type.u16; pub fn ptr(arena: Allocator, mod: *Module, data: Payload.Pointer.Data) !Type { - var d = data; + // TODO: update callsites of this function to directly call mod.ptrType + // and then delete this function. + _ = arena; - if (d.size == .C) { - d.@"allowzero" = true; - } + var d = data; // Canonicalize non-zero alignment. If it matches the ABI alignment of the pointee // type, we change it to 0 here. If this causes an assertion trip because the @@ -4396,32 +4123,19 @@ pub const Type = struct { } } - ip: { - if (d.pointee_type.ip_index == .none) break :ip; - - if (d.sentinel) |s| { - switch (s.ip_index) { - .none, .null_value => break :ip, - else => {}, - } - } - - return mod.ptrType(.{ - .elem_type = d.pointee_type.ip_index, - .sentinel = if (d.sentinel) |s| s.ip_index else .none, - .alignment = d.@"align", - .host_size = d.host_size, - .bit_offset = d.bit_offset, - .vector_index = d.vector_index, - .size = d.size, - .is_const = !d.mutable, - .is_volatile = d.@"volatile", - .is_allowzero = d.@"allowzero", - .address_space = d.@"addrspace", - }); - } - - return Type.Tag.pointer.create(arena, d); + return mod.ptrType(.{ + .elem_type = d.pointee_type.ip_index, + .sentinel = if (d.sentinel) |s| s.ip_index else .none, + .alignment = InternPool.Alignment.fromByteUnits(d.@"align"), + .host_size = d.host_size, + .bit_offset = d.bit_offset, + .vector_index = d.vector_index, + .size = d.size, + .is_const = !d.mutable, + .is_volatile = d.@"volatile", + .is_allowzero = d.@"allowzero", + .address_space = d.@"addrspace", + }); } pub fn array( diff --git a/src/value.zig b/src/value.zig index 35d144f912..3100496085 100644 --- a/src/value.zig +++ b/src/value.zig @@ -1844,8 +1844,7 @@ pub const Value = struct { return false; } - var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&ptr_buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); return eqlAdvanced(a_payload.ptr, ptr_ty, b_payload.ptr, ptr_ty, mod, opt_sema); }, @@ -2001,8 +2000,7 @@ pub const Value = struct { return false; } - var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&ptr_buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); const a_ptr = switch (a_ty.ptrSize(mod)) { .Slice => a.slicePtr(), .One => a, @@ -2121,8 +2119,7 @@ pub const Value = struct { .Bool, .Int, .ComptimeInt, .Pointer => switch (val.tag()) { .slice => { const slice = val.castTag(.slice).?.data; - var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&ptr_buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); hash(slice.ptr, ptr_ty, hasher, mod); hash(slice.len, Type.usize, hasher, mod); }, @@ -2253,8 +2250,7 @@ pub const Value = struct { .Bool, .Int, .ComptimeInt, .Pointer, .Fn => switch (val.tag()) { .slice => { const slice = val.castTag(.slice).?.data; - var ptr_buf: Type.SlicePtrFieldTypeBuffer = undefined; - const ptr_ty = ty.slicePtrFieldType(&ptr_buf, mod); + const ptr_ty = ty.slicePtrFieldType(mod); slice.ptr.hashUncoerced(ptr_ty, hasher, mod); }, else => val.hashPtr(hasher, mod), |
