diff options
Diffstat (limited to 'src/codegen/llvm.zig')
| -rw-r--r-- | src/codegen/llvm.zig | 247 |
1 files changed, 134 insertions, 113 deletions
diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index cdb9addcff..ba637cb99e 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -161,6 +161,7 @@ pub fn targetTriple(allocator: Allocator, target: std.Target) ![:0]u8 { pub const Object = struct { gpa: Allocator, + module: *Module, llvm_module: *const llvm.Module, di_builder: ?*llvm.DIBuilder, /// One of these mappings: @@ -181,7 +182,7 @@ pub const Object = struct { /// version of the name and incorrectly get function not found in the llvm module. /// * it works for functions not all globals. /// Therefore, this table keeps track of the mapping. - decl_map: std.AutoHashMapUnmanaged(*const Module.Decl, *const llvm.Value), + decl_map: std.AutoHashMapUnmanaged(Module.Decl.Index, *const llvm.Value), /// Maps Zig types to LLVM types. The table memory itself is backed by the GPA of /// the compiler, but the Type/Value memory here is backed by `type_map_arena`. /// TODO we need to remove entries from this map in response to incremental compilation @@ -340,6 +341,7 @@ pub const Object = struct { return Object{ .gpa = gpa, + .module = options.module.?, .llvm_module = llvm_module, .di_map = .{}, .di_builder = opt_di_builder, @@ -568,18 +570,20 @@ pub const Object = struct { air: Air, liveness: Liveness, ) !void { - const decl = func.owner_decl; + const decl_index = func.owner_decl; + const decl = module.declPtr(decl_index); var dg: DeclGen = .{ .context = o.context, .object = o, .module = module, + .decl_index = decl_index, .decl = decl, .err_msg = null, .gpa = module.gpa, }; - const llvm_func = try dg.resolveLlvmFunction(decl); + const llvm_func = try dg.resolveLlvmFunction(decl_index); if (module.align_stack_fns.get(func)) |align_info| { dg.addFnAttrInt(llvm_func, "alignstack", align_info.alignment); @@ -632,7 +636,7 @@ pub const Object = struct { const line_number = decl.src_line + 1; const is_internal_linkage = decl.val.tag() != .extern_fn and - !dg.module.decl_exports.contains(decl); + !dg.module.decl_exports.contains(decl_index); const noret_bit: c_uint = if (fn_info.return_type.isNoReturn()) llvm.DIFlags.NoReturn else @@ -684,48 +688,51 @@ pub const Object = struct { fg.genBody(air.getMainBody()) catch |err| switch (err) { error.CodegenFail => { decl.analysis = .codegen_failure; - try module.failed_decls.put(module.gpa, decl, dg.err_msg.?); + try module.failed_decls.put(module.gpa, decl_index, dg.err_msg.?); dg.err_msg = null; return; }, else => |e| return e, }; - const decl_exports = module.decl_exports.get(decl) orelse &[0]*Module.Export{}; - try o.updateDeclExports(module, decl, decl_exports); + const decl_exports = module.decl_exports.get(decl_index) orelse &[0]*Module.Export{}; + try o.updateDeclExports(module, decl_index, decl_exports); } - pub fn updateDecl(self: *Object, module: *Module, decl: *Module.Decl) !void { + pub fn updateDecl(self: *Object, module: *Module, decl_index: Module.Decl.Index) !void { + const decl = module.declPtr(decl_index); var dg: DeclGen = .{ .context = self.context, .object = self, .module = module, .decl = decl, + .decl_index = decl_index, .err_msg = null, .gpa = module.gpa, }; dg.genDecl() catch |err| switch (err) { error.CodegenFail => { decl.analysis = .codegen_failure; - try module.failed_decls.put(module.gpa, decl, dg.err_msg.?); + try module.failed_decls.put(module.gpa, decl_index, dg.err_msg.?); dg.err_msg = null; return; }, else => |e| return e, }; - const decl_exports = module.decl_exports.get(decl) orelse &[0]*Module.Export{}; - try self.updateDeclExports(module, decl, decl_exports); + const decl_exports = module.decl_exports.get(decl_index) orelse &[0]*Module.Export{}; + try self.updateDeclExports(module, decl_index, decl_exports); } pub fn updateDeclExports( self: *Object, - module: *const Module, - decl: *const Module.Decl, + module: *Module, + decl_index: Module.Decl.Index, exports: []const *Module.Export, ) !void { // If the module does not already have the function, we ignore this function call // because we call `updateDeclExports` at the end of `updateFunc` and `updateDecl`. - const llvm_global = self.decl_map.get(decl) orelse return; + const llvm_global = self.decl_map.get(decl_index) orelse return; + const decl = module.declPtr(decl_index); if (decl.isExtern()) { llvm_global.setValueName(decl.name); llvm_global.setUnnamedAddr(.False); @@ -798,7 +805,7 @@ pub const Object = struct { } } } else { - const fqn = try decl.getFullyQualifiedName(module.gpa); + const fqn = try decl.getFullyQualifiedName(module); defer module.gpa.free(fqn); llvm_global.setValueName2(fqn.ptr, fqn.len); llvm_global.setLinkage(.Internal); @@ -814,8 +821,8 @@ pub const Object = struct { } } - pub fn freeDecl(self: *Object, decl: *Module.Decl) void { - const llvm_value = self.decl_map.get(decl) orelse return; + pub fn freeDecl(self: *Object, decl_index: Module.Decl.Index) void { + const llvm_value = self.decl_map.get(decl_index) orelse return; llvm_value.deleteGlobal(); } @@ -847,7 +854,7 @@ pub const Object = struct { const gpa = o.gpa; // Be careful not to reference this `gop` variable after any recursive calls // to `lowerDebugType`. - const gop = try o.di_type_map.getOrPutContext(gpa, ty, .{ .target = o.target }); + const gop = try o.di_type_map.getOrPutContext(gpa, ty, .{ .mod = o.module }); if (gop.found_existing) { const annotated = gop.value_ptr.*; const di_type = annotated.toDIType(); @@ -860,7 +867,7 @@ pub const Object = struct { }; return o.lowerDebugTypeImpl(entry, resolve, di_type); } - errdefer assert(o.di_type_map.orderedRemoveContext(ty, .{ .target = o.target })); + errdefer assert(o.di_type_map.orderedRemoveContext(ty, .{ .mod = o.module })); // The Type memory is ephemeral; since we want to store a longer-lived // reference, we need to copy it here. gop.key_ptr.* = try ty.copy(o.type_map_arena.allocator()); @@ -891,7 +898,7 @@ pub const Object = struct { .Int => { const info = ty.intInfo(target); assert(info.bits != 0); - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); const dwarf_encoding: c_uint = switch (info.signedness) { .signed => DW.ATE.signed, @@ -902,13 +909,14 @@ pub const Object = struct { return di_type; }, .Enum => { - const owner_decl = ty.getOwnerDecl(); + const owner_decl_index = ty.getOwnerDecl(); + const owner_decl = o.module.declPtr(owner_decl_index); if (!ty.hasRuntimeBitsIgnoreComptime()) { - const enum_di_ty = try o.makeEmptyNamespaceDIType(owner_decl); + const enum_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` // means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(enum_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(enum_di_ty), .{ .mod = o.module }); return enum_di_ty; } @@ -938,7 +946,7 @@ pub const Object = struct { const di_file = try o.getDIFile(gpa, owner_decl.src_namespace.file_scope); const di_scope = try o.namespaceToDebugScope(owner_decl.src_namespace); - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); var buffer: Type.Payload.Bits = undefined; const int_ty = ty.intTagType(&buffer); @@ -956,12 +964,12 @@ pub const Object = struct { "", ); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(enum_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(enum_di_ty), .{ .mod = o.module }); return enum_di_ty; }, .Float => { const bits = ty.floatBits(target); - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); const di_type = dib.createBasicType(name, bits, DW.ATE.float); gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_type); @@ -1009,7 +1017,7 @@ pub const Object = struct { const bland_ptr_ty = Type.initPayload(&payload.base); const ptr_di_ty = try o.lowerDebugType(bland_ptr_ty, resolve); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.init(ptr_di_ty, resolve), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.init(ptr_di_ty, resolve), .{ .mod = o.module }); return ptr_di_ty; } @@ -1018,7 +1026,7 @@ pub const Object = struct { const ptr_ty = ty.slicePtrFieldType(&buf); const len_ty = Type.usize; - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); const di_file: ?*llvm.DIFile = null; const line = 0; @@ -1089,12 +1097,12 @@ pub const Object = struct { ); dib.replaceTemporary(fwd_decl, full_di_ty); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .mod = o.module }); return full_di_ty; } const elem_di_ty = try o.lowerDebugType(ptr_info.pointee_type, .fwd); - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); const ptr_di_ty = dib.createPointerType( elem_di_ty, @@ -1103,7 +1111,7 @@ pub const Object = struct { name, ); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(ptr_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(ptr_di_ty), .{ .mod = o.module }); return ptr_di_ty; }, .Opaque => { @@ -1112,9 +1120,10 @@ pub const Object = struct { gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_ty); return di_ty; } - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); - const owner_decl = ty.getOwnerDecl(); + const owner_decl_index = ty.getOwnerDecl(); + const owner_decl = o.module.declPtr(owner_decl_index); const opaque_di_ty = dib.createForwardDeclType( DW.TAG.structure_type, name, @@ -1124,7 +1133,7 @@ pub const Object = struct { ); // The recursive call to `lowerDebugType` va `namespaceToDebugScope` // means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(opaque_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(opaque_di_ty), .{ .mod = o.module }); return opaque_di_ty; }, .Array => { @@ -1135,7 +1144,7 @@ pub const Object = struct { @intCast(c_int, ty.arrayLen()), ); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(array_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(array_di_ty), .{ .mod = o.module }); return array_di_ty; }, .Vector => { @@ -1146,11 +1155,11 @@ pub const Object = struct { ty.vectorLen(), ); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(vector_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(vector_di_ty), .{ .mod = o.module }); return vector_di_ty; }, .Optional => { - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); var buf: Type.Payload.ElemType = undefined; const child_ty = ty.optionalChild(&buf); @@ -1162,7 +1171,7 @@ pub const Object = struct { if (ty.isPtrLikeOptional()) { const ptr_di_ty = try o.lowerDebugType(child_ty, resolve); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(ptr_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(ptr_di_ty), .{ .mod = o.module }); return ptr_di_ty; } @@ -1235,7 +1244,7 @@ pub const Object = struct { ); dib.replaceTemporary(fwd_decl, full_di_ty); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .mod = o.module }); return full_di_ty; }, .ErrorUnion => { @@ -1244,10 +1253,10 @@ pub const Object = struct { if (!payload_ty.hasRuntimeBitsIgnoreComptime()) { const err_set_di_ty = try o.lowerDebugType(err_set_ty, .full); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(err_set_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(err_set_di_ty), .{ .mod = o.module }); return err_set_di_ty; } - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); const di_file: ?*llvm.DIFile = null; const line = 0; @@ -1332,7 +1341,7 @@ pub const Object = struct { ); dib.replaceTemporary(fwd_decl, full_di_ty); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .mod = o.module }); return full_di_ty; }, .ErrorSet => { @@ -1344,7 +1353,7 @@ pub const Object = struct { }, .Struct => { const compile_unit_scope = o.di_compile_unit.?.toScope(); - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); if (ty.castTag(.@"struct")) |payload| { @@ -1431,7 +1440,7 @@ pub const Object = struct { ); dib.replaceTemporary(fwd_decl, full_di_ty); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .mod = o.module }); return full_di_ty; } @@ -1445,23 +1454,23 @@ pub const Object = struct { // into. Therefore we can satisfy this by making an empty namespace, // rather than changing the frontend to unnecessarily resolve the // struct field types. - const owner_decl = ty.getOwnerDecl(); - const struct_di_ty = try o.makeEmptyNamespaceDIType(owner_decl); + const owner_decl_index = ty.getOwnerDecl(); + const struct_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); dib.replaceTemporary(fwd_decl, struct_di_ty); // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` // means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(struct_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(struct_di_ty), .{ .mod = o.module }); return struct_di_ty; } } if (!ty.hasRuntimeBitsIgnoreComptime()) { - const owner_decl = ty.getOwnerDecl(); - const struct_di_ty = try o.makeEmptyNamespaceDIType(owner_decl); + const owner_decl_index = ty.getOwnerDecl(); + const struct_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); dib.replaceTemporary(fwd_decl, struct_di_ty); // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` // means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(struct_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(struct_di_ty), .{ .mod = o.module }); return struct_di_ty; } @@ -1516,14 +1525,14 @@ pub const Object = struct { ); dib.replaceTemporary(fwd_decl, full_di_ty); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .mod = o.module }); return full_di_ty; }, .Union => { const compile_unit_scope = o.di_compile_unit.?.toScope(); - const owner_decl = ty.getOwnerDecl(); + const owner_decl_index = ty.getOwnerDecl(); - const name = try ty.nameAlloc(gpa, target); + const name = try ty.nameAlloc(gpa, o.module); defer gpa.free(name); const fwd_decl = opt_fwd_decl orelse blk: { @@ -1540,11 +1549,11 @@ pub const Object = struct { }; if (!ty.hasRuntimeBitsIgnoreComptime()) { - const union_di_ty = try o.makeEmptyNamespaceDIType(owner_decl); + const union_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); dib.replaceTemporary(fwd_decl, union_di_ty); // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` // means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(union_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(union_di_ty), .{ .mod = o.module }); return union_di_ty; } @@ -1572,7 +1581,7 @@ pub const Object = struct { dib.replaceTemporary(fwd_decl, full_di_ty); // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` // means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .mod = o.module }); return full_di_ty; } @@ -1626,7 +1635,7 @@ pub const Object = struct { if (layout.tag_size == 0) { dib.replaceTemporary(fwd_decl, union_di_ty); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(union_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(union_di_ty), .{ .mod = o.module }); return union_di_ty; } @@ -1685,7 +1694,7 @@ pub const Object = struct { ); dib.replaceTemporary(fwd_decl, full_di_ty); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(full_di_ty), .{ .mod = o.module }); return full_di_ty; }, .Fn => { @@ -1733,7 +1742,7 @@ pub const Object = struct { 0, ); // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(fn_di_ty), .{ .target = o.target }); + try o.di_type_map.putContext(gpa, ty, AnnotatedDITypePtr.initFull(fn_di_ty), .{ .mod = o.module }); return fn_di_ty; }, .ComptimeInt => unreachable, @@ -1762,7 +1771,8 @@ pub const Object = struct { /// This is to be used instead of void for debug info types, to avoid tripping /// Assertion `!isa<DIType>(Scope) && "shouldn't make a namespace scope for a type"' /// when targeting CodeView (Windows). - fn makeEmptyNamespaceDIType(o: *Object, decl: *const Module.Decl) !*llvm.DIType { + fn makeEmptyNamespaceDIType(o: *Object, decl_index: Module.Decl.Index) !*llvm.DIType { + const decl = o.module.declPtr(decl_index); const fields: [0]*llvm.DIType = .{}; return o.di_builder.?.createStructType( try o.namespaceToDebugScope(decl.src_namespace), @@ -1787,6 +1797,7 @@ pub const DeclGen = struct { object: *Object, module: *Module, decl: *Module.Decl, + decl_index: Module.Decl.Index, gpa: Allocator, err_msg: ?*Module.ErrorMsg, @@ -1804,6 +1815,7 @@ pub const DeclGen = struct { fn genDecl(dg: *DeclGen) !void { const decl = dg.decl; + const decl_index = dg.decl_index; assert(decl.has_tv); log.debug("gen: {s} type: {}, value: {}", .{ @@ -1817,7 +1829,7 @@ pub const DeclGen = struct { _ = try dg.resolveLlvmFunction(extern_fn.data.owner_decl); } else { const target = dg.module.getTarget(); - var global = try dg.resolveGlobalDecl(decl); + var global = try dg.resolveGlobalDecl(decl_index); global.setAlignment(decl.getAlignment(target)); assert(decl.has_tv); const init_val = if (decl.val.castTag(.variable)) |payload| init_val: { @@ -1858,7 +1870,7 @@ pub const DeclGen = struct { // old uses. const new_global_ptr = new_global.constBitCast(global.typeOf()); global.replaceAllUsesWith(new_global_ptr); - dg.object.decl_map.putAssumeCapacity(decl, new_global); + dg.object.decl_map.putAssumeCapacity(decl_index, new_global); new_global.takeName(global); global.deleteGlobal(); global = new_global; @@ -1869,7 +1881,7 @@ pub const DeclGen = struct { const di_file = try dg.object.getDIFile(dg.gpa, decl.src_namespace.file_scope); const line_number = decl.src_line + 1; - const is_internal_linkage = !dg.module.decl_exports.contains(decl); + const is_internal_linkage = !dg.module.decl_exports.contains(decl_index); const di_global = dib.createGlobalVariable( di_file.toScope(), decl.name, @@ -1888,12 +1900,10 @@ pub const DeclGen = struct { /// If the llvm function does not exist, create it. /// Note that this can be called before the function's semantic analysis has /// completed, so if any attributes rely on that, they must be done in updateFunc, not here. - fn resolveLlvmFunction(dg: *DeclGen, decl: *Module.Decl) !*const llvm.Value { - return dg.resolveLlvmFunctionExtra(decl, decl.ty); - } - - fn resolveLlvmFunctionExtra(dg: *DeclGen, decl: *Module.Decl, zig_fn_type: Type) !*const llvm.Value { - const gop = try dg.object.decl_map.getOrPut(dg.gpa, decl); + fn resolveLlvmFunction(dg: *DeclGen, decl_index: Module.Decl.Index) !*const llvm.Value { + const decl = dg.module.declPtr(decl_index); + const zig_fn_type = decl.ty; + const gop = try dg.object.decl_map.getOrPut(dg.gpa, decl_index); if (gop.found_existing) return gop.value_ptr.*; assert(decl.has_tv); @@ -1903,7 +1913,7 @@ pub const DeclGen = struct { const fn_type = try dg.llvmType(zig_fn_type); - const fqn = try decl.getFullyQualifiedName(dg.gpa); + const fqn = try decl.getFullyQualifiedName(dg.module); defer dg.gpa.free(fqn); const llvm_addrspace = dg.llvmAddressSpace(decl.@"addrspace"); @@ -1996,12 +2006,13 @@ pub const DeclGen = struct { // TODO add target-cpu and target-features fn attributes } - fn resolveGlobalDecl(dg: *DeclGen, decl: *Module.Decl) Error!*const llvm.Value { - const gop = try dg.object.decl_map.getOrPut(dg.gpa, decl); + fn resolveGlobalDecl(dg: *DeclGen, decl_index: Module.Decl.Index) Error!*const llvm.Value { + const gop = try dg.object.decl_map.getOrPut(dg.gpa, decl_index); if (gop.found_existing) return gop.value_ptr.*; - errdefer assert(dg.object.decl_map.remove(decl)); + errdefer assert(dg.object.decl_map.remove(decl_index)); - const fqn = try decl.getFullyQualifiedName(dg.gpa); + const decl = dg.module.declPtr(decl_index); + const fqn = try decl.getFullyQualifiedName(dg.module); defer dg.gpa.free(fqn); const llvm_type = try dg.llvmType(decl.ty); @@ -2122,7 +2133,7 @@ pub const DeclGen = struct { }, .Opaque => switch (t.tag()) { .@"opaque" => { - const gop = try dg.object.type_map.getOrPutContext(gpa, t, .{ .target = target }); + const gop = try dg.object.type_map.getOrPutContext(gpa, t, .{ .mod = dg.module }); if (gop.found_existing) return gop.value_ptr.*; // The Type memory is ephemeral; since we want to store a longer-lived @@ -2130,7 +2141,7 @@ pub const DeclGen = struct { gop.key_ptr.* = try t.copy(dg.object.type_map_arena.allocator()); const opaque_obj = t.castTag(.@"opaque").?.data; - const name = try opaque_obj.getFullyQualifiedName(gpa); + const name = try opaque_obj.getFullyQualifiedName(dg.module); defer gpa.free(name); const llvm_struct_ty = dg.context.structCreateNamed(name); @@ -2191,7 +2202,7 @@ pub const DeclGen = struct { return dg.context.intType(16); }, .Struct => { - const gop = try dg.object.type_map.getOrPutContext(gpa, t, .{ .target = target }); + const gop = try dg.object.type_map.getOrPutContext(gpa, t, .{ .mod = dg.module }); if (gop.found_existing) return gop.value_ptr.*; // The Type memory is ephemeral; since we want to store a longer-lived @@ -2260,7 +2271,7 @@ pub const DeclGen = struct { return int_llvm_ty; } - const name = try struct_obj.getFullyQualifiedName(gpa); + const name = try struct_obj.getFullyQualifiedName(dg.module); defer gpa.free(name); const llvm_struct_ty = dg.context.structCreateNamed(name); @@ -2314,7 +2325,7 @@ pub const DeclGen = struct { return llvm_struct_ty; }, .Union => { - const gop = try dg.object.type_map.getOrPutContext(gpa, t, .{ .target = target }); + const gop = try dg.object.type_map.getOrPutContext(gpa, t, .{ .mod = dg.module }); if (gop.found_existing) return gop.value_ptr.*; // The Type memory is ephemeral; since we want to store a longer-lived @@ -2330,7 +2341,7 @@ pub const DeclGen = struct { return enum_tag_llvm_ty; } - const name = try union_obj.getFullyQualifiedName(gpa); + const name = try union_obj.getFullyQualifiedName(dg.module); defer gpa.free(name); const llvm_union_ty = dg.context.structCreateNamed(name); @@ -2439,7 +2450,7 @@ pub const DeclGen = struct { // TODO this duplicates code with Pointer but they should share the handling // of the tv.val.tag() and then Int should do extra constPtrToInt on top .Int => switch (tv.val.tag()) { - .decl_ref_mut => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref_mut).?.data.decl), + .decl_ref_mut => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref_mut).?.data.decl_index), .decl_ref => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref).?.data), else => { var bigint_space: Value.BigIntSpace = undefined; @@ -2524,12 +2535,13 @@ pub const DeclGen = struct { } }, .Pointer => switch (tv.val.tag()) { - .decl_ref_mut => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref_mut).?.data.decl), + .decl_ref_mut => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref_mut).?.data.decl_index), .decl_ref => return lowerDeclRefValue(dg, tv, tv.val.castTag(.decl_ref).?.data), .variable => { - const decl = tv.val.castTag(.variable).?.data.owner_decl; - decl.markAlive(); - const val = try dg.resolveGlobalDecl(decl); + const decl_index = tv.val.castTag(.variable).?.data.owner_decl; + const decl = dg.module.declPtr(decl_index); + dg.module.markDeclAlive(decl); + const val = try dg.resolveGlobalDecl(decl_index); const llvm_var_type = try dg.llvmType(tv.ty); const llvm_addrspace = dg.llvmAddressSpace(decl.@"addrspace"); const llvm_type = llvm_var_type.pointerType(llvm_addrspace); @@ -2683,13 +2695,14 @@ pub const DeclGen = struct { return dg.context.constStruct(&fields, fields.len, .False); }, .Fn => { - const fn_decl = switch (tv.val.tag()) { + const fn_decl_index = switch (tv.val.tag()) { .extern_fn => tv.val.castTag(.extern_fn).?.data.owner_decl, .function => tv.val.castTag(.function).?.data.owner_decl, else => unreachable, }; - fn_decl.markAlive(); - return dg.resolveLlvmFunction(fn_decl); + const fn_decl = dg.module.declPtr(fn_decl_index); + dg.module.markDeclAlive(fn_decl); + return dg.resolveLlvmFunction(fn_decl_index); }, .ErrorSet => { const llvm_ty = try dg.llvmType(tv.ty); @@ -2911,7 +2924,7 @@ pub const DeclGen = struct { }); } const union_obj = tv.ty.cast(Type.Payload.Union).?.data; - const field_index = union_obj.tag_ty.enumTagFieldIndex(tag_and_val.tag, target).?; + const field_index = union_obj.tag_ty.enumTagFieldIndex(tag_and_val.tag, dg.module).?; assert(union_obj.haveFieldTypes()); const field_ty = union_obj.fields.values()[field_index].ty; const payload = p: { @@ -3049,17 +3062,22 @@ pub const DeclGen = struct { llvm_ptr: *const llvm.Value, }; - fn lowerParentPtrDecl(dg: *DeclGen, ptr_val: Value, decl: *Module.Decl, ptr_child_ty: Type) Error!*const llvm.Value { - decl.markAlive(); + fn lowerParentPtrDecl( + dg: *DeclGen, + ptr_val: Value, + decl_index: Module.Decl.Index, + ptr_child_ty: Type, + ) Error!*const llvm.Value { + const decl = dg.module.declPtr(decl_index); + dg.module.markDeclAlive(decl); var ptr_ty_payload: Type.Payload.ElemType = .{ .base = .{ .tag = .single_mut_pointer }, .data = decl.ty, }; const ptr_ty = Type.initPayload(&ptr_ty_payload.base); - const llvm_ptr = try dg.lowerDeclRefValue(.{ .ty = ptr_ty, .val = ptr_val }, decl); + const llvm_ptr = try dg.lowerDeclRefValue(.{ .ty = ptr_ty, .val = ptr_val }, decl_index); - const target = dg.module.getTarget(); - if (ptr_child_ty.eql(decl.ty, target)) { + if (ptr_child_ty.eql(decl.ty, dg.module)) { return llvm_ptr; } else { return llvm_ptr.constBitCast((try dg.llvmType(ptr_child_ty)).pointerType(0)); @@ -3071,7 +3089,7 @@ pub const DeclGen = struct { var bitcast_needed: bool = undefined; const llvm_ptr = switch (ptr_val.tag()) { .decl_ref_mut => { - const decl = ptr_val.castTag(.decl_ref_mut).?.data.decl; + const decl = ptr_val.castTag(.decl_ref_mut).?.data.decl_index; return dg.lowerParentPtrDecl(ptr_val, decl, ptr_child_ty); }, .decl_ref => { @@ -3123,7 +3141,7 @@ pub const DeclGen = struct { }, .Struct => { const field_ty = parent_ty.structFieldType(field_index); - bitcast_needed = !field_ty.eql(ptr_child_ty, target); + bitcast_needed = !field_ty.eql(ptr_child_ty, dg.module); var ty_buf: Type.Payload.Pointer = undefined; const llvm_field_index = llvmFieldIndex(parent_ty, field_index, target, &ty_buf).?; @@ -3139,7 +3157,7 @@ pub const DeclGen = struct { .elem_ptr => blk: { const elem_ptr = ptr_val.castTag(.elem_ptr).?.data; const parent_llvm_ptr = try dg.lowerParentPtr(elem_ptr.array_ptr, elem_ptr.elem_ty); - bitcast_needed = !elem_ptr.elem_ty.eql(ptr_child_ty, target); + bitcast_needed = !elem_ptr.elem_ty.eql(ptr_child_ty, dg.module); const llvm_usize = try dg.llvmType(Type.usize); const indices: [1]*const llvm.Value = .{ @@ -3153,7 +3171,7 @@ pub const DeclGen = struct { var buf: Type.Payload.ElemType = undefined; const payload_ty = opt_payload_ptr.container_ty.optionalChild(&buf); - bitcast_needed = !payload_ty.eql(ptr_child_ty, target); + bitcast_needed = !payload_ty.eql(ptr_child_ty, dg.module); if (!payload_ty.hasRuntimeBitsIgnoreComptime() or payload_ty.isPtrLikeOptional()) { // In this case, we represent pointer to optional the same as pointer @@ -3173,7 +3191,7 @@ pub const DeclGen = struct { const parent_llvm_ptr = try dg.lowerParentPtr(eu_payload_ptr.container_ptr, eu_payload_ptr.container_ty); const payload_ty = eu_payload_ptr.container_ty.errorUnionPayload(); - bitcast_needed = !payload_ty.eql(ptr_child_ty, target); + bitcast_needed = !payload_ty.eql(ptr_child_ty, dg.module); if (!payload_ty.hasRuntimeBitsIgnoreComptime()) { // In this case, we represent pointer to error union the same as pointer @@ -3201,15 +3219,14 @@ pub const DeclGen = struct { fn lowerDeclRefValue( self: *DeclGen, tv: TypedValue, - decl: *Module.Decl, + decl_index: Module.Decl.Index, ) Error!*const llvm.Value { - const target = self.module.getTarget(); if (tv.ty.isSlice()) { var buf: Type.SlicePtrFieldTypeBuffer = undefined; const ptr_ty = tv.ty.slicePtrFieldType(&buf); var slice_len: Value.Payload.U64 = .{ .base = .{ .tag = .int_u64 }, - .data = tv.val.sliceLen(target), + .data = tv.val.sliceLen(self.module), }; const fields: [2]*const llvm.Value = .{ try self.genTypedValue(.{ @@ -3229,8 +3246,9 @@ pub const DeclGen = struct { // const bar = foo; // ... &bar; // `bar` is just an alias and we actually want to lower a reference to `foo`. + const decl = self.module.declPtr(decl_index); if (decl.val.castTag(.function)) |func| { - if (func.data.owner_decl != decl) { + if (func.data.owner_decl != decl_index) { return self.lowerDeclRefValue(tv, func.data.owner_decl); } } @@ -3240,12 +3258,12 @@ pub const DeclGen = struct { return self.lowerPtrToVoid(tv.ty); } - decl.markAlive(); + self.module.markDeclAlive(decl); const llvm_val = if (is_fn_body) - try self.resolveLlvmFunction(decl) + try self.resolveLlvmFunction(decl_index) else - try self.resolveGlobalDecl(decl); + try self.resolveGlobalDecl(decl_index); const llvm_type = try self.llvmType(tv.ty); if (tv.ty.zigTypeTag() == .Int) { @@ -4405,7 +4423,8 @@ pub const FuncGen = struct { const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const func = self.air.values[ty_pl.payload].castTag(.function).?.data; - const decl = func.owner_decl; + const decl_index = func.owner_decl; + const decl = self.dg.module.declPtr(decl_index); const di_file = try self.dg.object.getDIFile(self.gpa, decl.src_namespace.file_scope); self.di_file = di_file; const line_number = decl.src_line + 1; @@ -4417,10 +4436,10 @@ pub const FuncGen = struct { .base_line = self.base_line, }); - const fqn = try decl.getFullyQualifiedName(self.gpa); + const fqn = try decl.getFullyQualifiedName(self.dg.module); defer self.gpa.free(fqn); - const is_internal_linkage = !self.dg.module.decl_exports.contains(decl); + const is_internal_linkage = !self.dg.module.decl_exports.contains(decl_index); const subprogram = dib.createFunction( di_file.toScope(), decl.name, @@ -4447,7 +4466,8 @@ pub const FuncGen = struct { const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const func = self.air.values[ty_pl.payload].castTag(.function).?.data; - const decl = func.owner_decl; + const mod = self.dg.module; + const decl = mod.declPtr(func.owner_decl); const di_file = try self.dg.object.getDIFile(self.gpa, decl.src_namespace.file_scope); self.di_file = di_file; const old = self.dbg_inlined.pop(); @@ -5887,7 +5907,7 @@ pub const FuncGen = struct { if (self.dg.object.di_builder) |dib| { const src_index = self.getSrcArgIndex(self.arg_index - 1); const func = self.dg.decl.getFunction().?; - const lbrace_line = func.owner_decl.src_line + func.lbrace_line + 1; + const lbrace_line = self.dg.module.declPtr(func.owner_decl).src_line + func.lbrace_line + 1; const lbrace_col = func.lbrace_column + 1; const di_local_var = dib.createParameterVariable( self.di_scope.?, @@ -6430,8 +6450,9 @@ pub const FuncGen = struct { const operand = try self.resolveInst(un_op); const enum_ty = self.air.typeOf(un_op); + const mod = self.dg.module; const llvm_fn_name = try std.fmt.allocPrintZ(arena, "__zig_tag_name_{s}", .{ - try enum_ty.getOwnerDecl().getFullyQualifiedName(arena), + try mod.declPtr(enum_ty.getOwnerDecl()).getFullyQualifiedName(mod), }); const llvm_fn = try self.getEnumTagNameFunction(enum_ty, llvm_fn_name); @@ -6617,7 +6638,7 @@ pub const FuncGen = struct { for (values) |*val, i| { var buf: Value.ElemValueBuffer = undefined; - const elem = mask.elemValueBuffer(i, &buf); + const elem = mask.elemValueBuffer(self.dg.module, i, &buf); if (elem.isUndef()) { val.* = llvm_i32.getUndef(); } else { |
