diff options
Diffstat (limited to 'src')
| -rw-r--r-- | src/all_types.hpp | 13 | ||||
| -rw-r--r-- | src/analyze.cpp | 133 | ||||
| -rw-r--r-- | src/analyze.hpp | 3 | ||||
| -rw-r--r-- | src/ir.cpp | 163 |
4 files changed, 249 insertions, 63 deletions
diff --git a/src/all_types.hpp b/src/all_types.hpp index 4d751bae2c..e38e857209 100644 --- a/src/all_types.hpp +++ b/src/all_types.hpp @@ -1187,10 +1187,22 @@ bool fn_type_id_eql(FnTypeId *a, FnTypeId *b); static const uint32_t VECTOR_INDEX_NONE = UINT32_MAX; static const uint32_t VECTOR_INDEX_RUNTIME = UINT32_MAX - 1; +struct InferredStructField { + ZigType *inferred_struct_type; + Buf *field_name; +}; + struct ZigTypePointer { ZigType *child_type; ZigType *slice_parent; + // Anonymous struct literal syntax uses this when the result location has + // no type in it. This field is null if this pointer does not refer to + // a field of a currently-being-inferred struct type. + // When this is non-null, the pointer is pointing to the base of the inferred + // struct. + InferredStructField *inferred_struct_field; + PtrLen ptr_len; uint32_t explicit_alignment; // 0 means use ABI alignment @@ -1743,6 +1755,7 @@ struct TypeId { union { struct { ZigType *child_type; + InferredStructField *inferred_struct_field; PtrLen ptr_len; uint32_t alignment; diff --git a/src/analyze.cpp b/src/analyze.cpp index ae5b626c29..d8ff4f2848 100644 --- a/src/analyze.cpp +++ b/src/analyze.cpp @@ -486,7 +486,7 @@ ZigType *get_fn_frame_type(CodeGen *g, ZigFn *fn) { ZigType *get_pointer_to_type_extra2(CodeGen *g, ZigType *child_type, bool is_const, bool is_volatile, PtrLen ptr_len, uint32_t byte_alignment, uint32_t bit_offset_in_host, uint32_t host_int_bytes, bool allow_zero, - uint32_t vector_index) + uint32_t vector_index, InferredStructField *inferred_struct_field) { assert(ptr_len != PtrLenC || allow_zero); assert(!type_is_invalid(child_type)); @@ -509,7 +509,7 @@ ZigType *get_pointer_to_type_extra2(CodeGen *g, ZigType *child_type, bool is_con TypeId type_id = {}; ZigType **parent_pointer = nullptr; if (host_int_bytes != 0 || is_volatile || byte_alignment != 0 || ptr_len != PtrLenSingle || - allow_zero || vector_index != VECTOR_INDEX_NONE) + allow_zero || vector_index != VECTOR_INDEX_NONE || inferred_struct_field != nullptr) { type_id.id = ZigTypeIdPointer; type_id.data.pointer.child_type = child_type; @@ -521,6 +521,7 @@ ZigType *get_pointer_to_type_extra2(CodeGen *g, ZigType *child_type, bool is_con type_id.data.pointer.ptr_len = ptr_len; type_id.data.pointer.allow_zero = allow_zero; type_id.data.pointer.vector_index = vector_index; + type_id.data.pointer.inferred_struct_field = inferred_struct_field; auto existing_entry = g->type_table.maybe_get(type_id); if (existing_entry) @@ -548,8 +549,15 @@ ZigType *get_pointer_to_type_extra2(CodeGen *g, ZigType *child_type, bool is_con } buf_resize(&entry->name, 0); if (host_int_bytes == 0 && byte_alignment == 0 && vector_index == VECTOR_INDEX_NONE) { - buf_appendf(&entry->name, "%s%s%s%s%s", - star_str, const_str, volatile_str, allow_zero_str, buf_ptr(&child_type->name)); + if (inferred_struct_field == nullptr) { + buf_appendf(&entry->name, "%s%s%s%s%s", + star_str, const_str, volatile_str, allow_zero_str, buf_ptr(&child_type->name)); + } else { + buf_appendf(&entry->name, "(%s%s%s%s field '%s' of %s)", + star_str, const_str, volatile_str, allow_zero_str, + buf_ptr(inferred_struct_field->field_name), + buf_ptr(&inferred_struct_field->inferred_struct_type->name)); + } } else if (host_int_bytes == 0 && vector_index == VECTOR_INDEX_NONE) { buf_appendf(&entry->name, "%salign(%" PRIu32 ") %s%s%s%s", star_str, byte_alignment, const_str, volatile_str, allow_zero_str, buf_ptr(&child_type->name)); @@ -606,6 +614,7 @@ ZigType *get_pointer_to_type_extra2(CodeGen *g, ZigType *child_type, bool is_con entry->data.pointer.host_int_bytes = host_int_bytes; entry->data.pointer.allow_zero = allow_zero; entry->data.pointer.vector_index = vector_index; + entry->data.pointer.inferred_struct_field = inferred_struct_field; if (parent_pointer) { *parent_pointer = entry; @@ -620,12 +629,12 @@ ZigType *get_pointer_to_type_extra(CodeGen *g, ZigType *child_type, bool is_cons uint32_t bit_offset_in_host, uint32_t host_int_bytes, bool allow_zero) { return get_pointer_to_type_extra2(g, child_type, is_const, is_volatile, ptr_len, - byte_alignment, bit_offset_in_host, host_int_bytes, allow_zero, VECTOR_INDEX_NONE); + byte_alignment, bit_offset_in_host, host_int_bytes, allow_zero, VECTOR_INDEX_NONE, nullptr); } ZigType *get_pointer_to_type(CodeGen *g, ZigType *child_type, bool is_const) { return get_pointer_to_type_extra2(g, child_type, is_const, false, PtrLenSingle, 0, 0, 0, false, - VECTOR_INDEX_NONE); + VECTOR_INDEX_NONE, nullptr); } ZigType *get_optional_type(CodeGen *g, ZigType *child_type) { @@ -2082,7 +2091,7 @@ static Error resolve_struct_type(CodeGen *g, ZigType *struct_type) { } assert(struct_type->data.structure.fields || struct_type->data.structure.src_field_count == 0); - assert(decl_node->type == NodeTypeContainerDecl); + assert(decl_node->type == NodeTypeContainerDecl || decl_node->type == NodeTypeContainerInitExpr); size_t field_count = struct_type->data.structure.src_field_count; @@ -2670,7 +2679,6 @@ static Error resolve_struct_zero_bits(CodeGen *g, ZigType *struct_type) { return ErrorNone; AstNode *decl_node = struct_type->data.structure.decl_node; - assert(decl_node->type == NodeTypeContainerDecl); if (struct_type->data.structure.resolve_loop_flag_zero_bits) { if (struct_type->data.structure.resolve_status != ResolveStatusInvalid) { @@ -2681,29 +2689,46 @@ static Error resolve_struct_zero_bits(CodeGen *g, ZigType *struct_type) { } return ErrorSemanticAnalyzeFail; } - struct_type->data.structure.resolve_loop_flag_zero_bits = true; - assert(!struct_type->data.structure.fields); - size_t field_count = decl_node->data.container_decl.fields.length; - struct_type->data.structure.src_field_count = (uint32_t)field_count; - struct_type->data.structure.fields = allocate<TypeStructField>(field_count); + size_t field_count; + if (decl_node->type == NodeTypeContainerDecl) { + field_count = decl_node->data.container_decl.fields.length; + struct_type->data.structure.src_field_count = (uint32_t)field_count; + + src_assert(struct_type->data.structure.fields == nullptr, decl_node); + struct_type->data.structure.fields = allocate<TypeStructField>(field_count); + } else if (decl_node->type == NodeTypeContainerInitExpr) { + src_assert(struct_type->data.structure.is_inferred, decl_node); + src_assert(struct_type->data.structure.fields != nullptr, decl_node); + + field_count = struct_type->data.structure.src_field_count; + } else zig_unreachable(); + struct_type->data.structure.fields_by_name.init(field_count); Scope *scope = &struct_type->data.structure.decls_scope->base; size_t gen_field_index = 0; for (size_t i = 0; i < field_count; i += 1) { - AstNode *field_node = decl_node->data.container_decl.fields.at(i); TypeStructField *type_struct_field = &struct_type->data.structure.fields[i]; - type_struct_field->name = field_node->data.struct_field.name; - type_struct_field->decl_node = field_node; - if (field_node->data.struct_field.type == nullptr) { - add_node_error(g, field_node, buf_sprintf("struct field missing type")); - struct_type->data.structure.resolve_status = ResolveStatusInvalid; - return ErrorSemanticAnalyzeFail; - } + AstNode *field_node; + if (decl_node->type == NodeTypeContainerDecl) { + field_node = decl_node->data.container_decl.fields.at(i); + type_struct_field->name = field_node->data.struct_field.name; + type_struct_field->decl_node = field_node; + + if (field_node->data.struct_field.type == nullptr) { + add_node_error(g, field_node, buf_sprintf("struct field missing type")); + struct_type->data.structure.resolve_status = ResolveStatusInvalid; + return ErrorSemanticAnalyzeFail; + } + } else if (decl_node->type == NodeTypeContainerInitExpr) { + field_node = type_struct_field->decl_node; + + src_assert(type_struct_field->type_entry != nullptr, field_node); + } else zig_unreachable(); auto field_entry = struct_type->data.structure.fields_by_name.put_unique(type_struct_field->name, type_struct_field); if (field_entry != nullptr) { @@ -2714,16 +2739,21 @@ static Error resolve_struct_zero_bits(CodeGen *g, ZigType *struct_type) { return ErrorSemanticAnalyzeFail; } - ConstExprValue *field_type_val = analyze_const_value(g, scope, - field_node->data.struct_field.type, g->builtin_types.entry_type, nullptr, LazyOkNoUndef); - if (type_is_invalid(field_type_val->type)) { - struct_type->data.structure.resolve_status = ResolveStatusInvalid; - return ErrorSemanticAnalyzeFail; - } - assert(field_type_val->special != ConstValSpecialRuntime); - type_struct_field->type_val = field_type_val; - if (struct_type->data.structure.resolve_status == ResolveStatusInvalid) - return ErrorSemanticAnalyzeFail; + ConstExprValue *field_type_val; + if (decl_node->type == NodeTypeContainerDecl) { + field_type_val = analyze_const_value(g, scope, + field_node->data.struct_field.type, g->builtin_types.entry_type, nullptr, LazyOkNoUndef); + if (type_is_invalid(field_type_val->type)) { + struct_type->data.structure.resolve_status = ResolveStatusInvalid; + return ErrorSemanticAnalyzeFail; + } + assert(field_type_val->special != ConstValSpecialRuntime); + type_struct_field->type_val = field_type_val; + if (struct_type->data.structure.resolve_status == ResolveStatusInvalid) + return ErrorSemanticAnalyzeFail; + } else if (decl_node->type == NodeTypeContainerInitExpr) { + field_type_val = type_struct_field->type_val; + } else zig_unreachable(); bool field_is_opaque_type; if ((err = type_val_resolve_is_opaque_type(g, field_type_val, &field_is_opaque_type))) { @@ -2807,7 +2837,7 @@ static Error resolve_struct_alignment(CodeGen *g, ZigType *struct_type) { } struct_type->data.structure.resolve_loop_flag_other = true; - assert(decl_node->type == NodeTypeContainerDecl); + assert(decl_node->type == NodeTypeContainerDecl || decl_node->type == NodeTypeContainerInitExpr); size_t field_count = struct_type->data.structure.src_field_count; bool packed = struct_type->data.structure.layout == ContainerLayoutPacked; @@ -2817,7 +2847,8 @@ static Error resolve_struct_alignment(CodeGen *g, ZigType *struct_type) { if (field->gen_index == SIZE_MAX) continue; - AstNode *align_expr = field->decl_node->data.struct_field.align_expr; + AstNode *align_expr = (field->decl_node->type == NodeTypeStructField) ? + field->decl_node->data.struct_field.align_expr : nullptr; if (align_expr != nullptr) { if (!analyze_const_align(g, &struct_type->data.structure.decls_scope->base, align_expr, &field->align)) @@ -5416,6 +5447,12 @@ OnePossibleValue type_has_one_possible_value(CodeGen *g, ZigType *type_entry) { if (type_entry->one_possible_value != OnePossibleValueInvalid) return type_entry->one_possible_value; + if (type_entry->id == ZigTypeIdStruct && + type_entry->data.structure.resolve_status == ResolveStatusBeingInferred) + { + return OnePossibleValueNo; + } + Error err; if ((err = type_resolve(g, type_entry, ResolveStatusZeroBitsKnown))) return OnePossibleValueInvalid; @@ -5820,9 +5857,15 @@ ConstExprValue *create_const_arg_tuple(CodeGen *g, size_t arg_index_start, size_ ConstExprValue *create_const_vals(size_t count) { - ConstGlobalRefs *global_refs = allocate<ConstGlobalRefs>(count, "ConstGlobalRefs"); - ConstExprValue *vals = allocate<ConstExprValue>(count, "ConstExprValue"); - for (size_t i = 0; i < count; i += 1) { + return realloc_const_vals(nullptr, 0, count); +} + +ConstExprValue *realloc_const_vals(ConstExprValue *base, size_t old_count, size_t new_count) { + ConstGlobalRefs *old_global_refs = (base == nullptr) ? nullptr : base->global_refs; + ConstGlobalRefs *global_refs = reallocate<ConstGlobalRefs>(old_global_refs, old_count, + new_count, "ConstGlobalRefs"); + ConstExprValue *vals = reallocate<ConstExprValue>(base, old_count, new_count, "ConstExprValue"); + for (size_t i = old_count; i < new_count; i += 1) { vals[i].global_refs = &global_refs[i]; } return vals; @@ -7002,7 +7045,16 @@ bool type_id_eql(TypeId a, TypeId b) { a.data.pointer.alignment == b.data.pointer.alignment && a.data.pointer.bit_offset_in_host == b.data.pointer.bit_offset_in_host && a.data.pointer.vector_index == b.data.pointer.vector_index && - a.data.pointer.host_int_bytes == b.data.pointer.host_int_bytes; + a.data.pointer.host_int_bytes == b.data.pointer.host_int_bytes && + ( + a.data.pointer.inferred_struct_field == b.data.pointer.inferred_struct_field || + (a.data.pointer.inferred_struct_field != nullptr && + b.data.pointer.inferred_struct_field != nullptr && + a.data.pointer.inferred_struct_field->inferred_struct_type == + b.data.pointer.inferred_struct_field->inferred_struct_type && + buf_eql_buf(a.data.pointer.inferred_struct_field->field_name, + b.data.pointer.inferred_struct_field->field_name)) + ); case ZigTypeIdArray: return a.data.array.child_type == b.data.array.child_type && a.data.array.size == b.data.array.size; @@ -7815,7 +7867,6 @@ static void resolve_llvm_types_struct(CodeGen *g, ZigType *struct_type, ResolveS ZigLLVMDIScope *di_scope; unsigned line; if (decl_node != nullptr) { - assert(decl_node->type == NodeTypeContainerDecl); Scope *scope = &struct_type->data.structure.decls_scope->base; ZigType *import = get_scope_import(scope); di_file = import->data.structure.root_struct->di_file; @@ -8018,7 +8069,7 @@ static void resolve_llvm_types_struct(CodeGen *g, ZigType *struct_type, ResolveS } unsigned line; if (decl_node != nullptr) { - AstNode *field_node = decl_node->data.container_decl.fields.at(i); + AstNode *field_node = field->decl_node; line = field_node->line + 1; } else { line = 0; @@ -8314,12 +8365,12 @@ static void resolve_llvm_types_pointer(CodeGen *g, ZigType *type, ResolveStatus if (type->data.pointer.vector_index == VECTOR_INDEX_NONE) { peer_type = get_pointer_to_type_extra2(g, elem_type, false, false, PtrLenSingle, 0, 0, type->data.pointer.host_int_bytes, false, - VECTOR_INDEX_NONE); + VECTOR_INDEX_NONE, nullptr); } else { uint32_t host_vec_len = type->data.pointer.host_int_bytes; ZigType *host_vec_type = get_vector_type(g, host_vec_len, elem_type); peer_type = get_pointer_to_type_extra2(g, host_vec_type, false, false, - PtrLenSingle, 0, 0, 0, false, VECTOR_INDEX_NONE); + PtrLenSingle, 0, 0, 0, false, VECTOR_INDEX_NONE, nullptr); } type->llvm_type = get_llvm_type(g, peer_type); type->llvm_di_type = get_llvm_di_type(g, peer_type); diff --git a/src/analyze.hpp b/src/analyze.hpp index a6af371e25..7de51092e8 100644 --- a/src/analyze.hpp +++ b/src/analyze.hpp @@ -24,7 +24,7 @@ ZigType *get_pointer_to_type_extra(CodeGen *g, ZigType *child_type, ZigType *get_pointer_to_type_extra2(CodeGen *g, ZigType *child_type, bool is_const, bool is_volatile, PtrLen ptr_len, uint32_t byte_alignment, uint32_t bit_offset, uint32_t unaligned_bit_count, - bool allow_zero, uint32_t vector_index); + bool allow_zero, uint32_t vector_index, InferredStructField *inferred_struct_field); uint64_t type_size(CodeGen *g, ZigType *type_entry); uint64_t type_size_bits(CodeGen *g, ZigType *type_entry); ZigType *get_int_type(CodeGen *g, bool is_signed, uint32_t size_in_bits); @@ -175,6 +175,7 @@ void init_const_arg_tuple(CodeGen *g, ConstExprValue *const_val, size_t arg_inde ConstExprValue *create_const_arg_tuple(CodeGen *g, size_t arg_index_start, size_t arg_index_end); ConstExprValue *create_const_vals(size_t count); +ConstExprValue *realloc_const_vals(ConstExprValue *base, size_t old_count, size_t new_count); ZigType *make_int_type(CodeGen *g, bool is_signed, uint32_t size_in_bits); void expand_undef_array(CodeGen *g, ConstExprValue *const_val); diff --git a/src/ir.cpp b/src/ir.cpp index e278193785..fb2b0ed841 100644 --- a/src/ir.cpp +++ b/src/ir.cpp @@ -202,6 +202,8 @@ static Buf *get_anon_type_name(CodeGen *codegen, IrExecutable *exec, const char Scope *scope, AstNode *source_node, Buf *out_bare_name); static ResultLocCast *ir_build_cast_result_loc(IrBuilder *irb, IrInstruction *dest_type, ResultLoc *parent_result_loc); +static IrInstruction *ir_analyze_struct_field_ptr(IrAnalyze *ira, IrInstruction *source_instr, + TypeStructField *field, IrInstruction *struct_ptr, ZigType *struct_type, bool initializing); static ConstExprValue *const_ptr_pointee_unchecked(CodeGen *g, ConstExprValue *const_val) { assert(get_src_ptr_type(const_val->type) != nullptr); @@ -16321,13 +16323,81 @@ static IrInstruction *ir_analyze_store_ptr(IrAnalyze *ira, IrInstruction *source return ir_const_void(ira, source_instr); } - ZigType *child_type = ptr->value.type->data.pointer.child_type; + InferredStructField *isf = ptr->value.type->data.pointer.inferred_struct_field; + if (allow_write_through_const && isf != nullptr) { + // Now it's time to add the field to the struct type. + uint32_t old_field_count = isf->inferred_struct_type->data.structure.src_field_count; + uint32_t new_field_count = old_field_count + 1; + isf->inferred_struct_type->data.structure.src_field_count = new_field_count; + // This thing with max(x, 16) is a hack to allow this functionality to work without + // modifying the ConstExprValue layout of structs. That reworking needs to be + // done, but this hack lets us do it separately, in the future. + TypeStructField *prev_ptr = isf->inferred_struct_type->data.structure.fields; + isf->inferred_struct_type->data.structure.fields = reallocate( + isf->inferred_struct_type->data.structure.fields, + (old_field_count == 0) ? 0 : max(old_field_count, 16u), + max(new_field_count, 16u)); + if (prev_ptr != nullptr && prev_ptr != isf->inferred_struct_type->data.structure.fields) { + zig_panic("TODO need to rework the layout of ZigTypeStruct. this realloc would have caused invalid pointer references"); + } + + // This reference can't live long, don't keep it around outside this block. + TypeStructField *field = &isf->inferred_struct_type->data.structure.fields[old_field_count]; + field->name = isf->field_name; + field->type_entry = uncasted_value->value.type; + field->type_val = create_const_type(ira->codegen, field->type_entry); + field->src_index = old_field_count; + field->decl_node = uncasted_value->source_node; + + ZigType *struct_ptr_type = get_pointer_to_type(ira->codegen, isf->inferred_struct_type, false); + IrInstruction *casted_ptr; + if (instr_is_comptime(ptr)) { + casted_ptr = ir_const(ira, source_instr, struct_ptr_type); + copy_const_val(&casted_ptr->value, &ptr->value, false); + casted_ptr->value.type = struct_ptr_type; + } else { + casted_ptr = ir_build_cast(&ira->new_irb, source_instr->scope, + source_instr->source_node, struct_ptr_type, ptr, CastOpNoop); + casted_ptr->value.type = struct_ptr_type; + } + if (instr_is_comptime(casted_ptr)) { + ConstExprValue *ptr_val = ir_resolve_const(ira, casted_ptr, UndefBad); + if (!ptr_val) + return ira->codegen->invalid_instruction; + if (ptr_val->data.x_ptr.special != ConstPtrSpecialHardCodedAddr) { + ConstExprValue *struct_val = const_ptr_pointee(ira, ira->codegen, ptr_val, + source_instr->source_node); + struct_val->special = ConstValSpecialStatic; + ConstExprValue *prev_ptr = struct_val->data.x_struct.fields; + // This thing with max(x, 16) is a hack to allow this functionality to work without + // modifying the ConstExprValue layout of structs. That reworking needs to be + // done, but this hack lets us do it separately, in the future. + struct_val->data.x_struct.fields = realloc_const_vals(struct_val->data.x_struct.fields, + (old_field_count == 0) ? 0 : max(old_field_count, 16u), + max(new_field_count, 16u)); + if (prev_ptr != nullptr && prev_ptr != struct_val->data.x_struct.fields) { + zig_panic("TODO need to rework the layout of ConstExprValue for structs. this realloc would have caused invalid pointer references"); + } + + ConstExprValue *field_val = &struct_val->data.x_struct.fields[old_field_count]; + field_val->special = ConstValSpecialUndef; + field_val->type = field->type_entry; + field_val->parent.id = ConstParentIdStruct; + field_val->parent.data.p_struct.struct_val = struct_val; + field_val->parent.data.p_struct.field_index = old_field_count; + } + } + + ptr = ir_analyze_struct_field_ptr(ira, source_instr, field, casted_ptr, + isf->inferred_struct_type, true); + } if (ptr->value.type->data.pointer.is_const && !allow_write_through_const) { ir_add_error(ira, source_instr, buf_sprintf("cannot assign to constant")); return ira->codegen->invalid_instruction; } + ZigType *child_type = ptr->value.type->data.pointer.child_type; IrInstruction *value = ir_implicit_cast(ira, uncasted_value, child_type); if (value == ira->codegen->invalid_instruction) return ira->codegen->invalid_instruction; @@ -17853,7 +17923,8 @@ static IrInstruction *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruct return_type = get_pointer_to_type_extra2(ira->codegen, elem_type, ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile, elem_ptr_instruction->ptr_len, - get_ptr_align(ira->codegen, ptr_type), 0, host_vec_len, false, (uint32_t)index); + get_ptr_align(ira->codegen, ptr_type), 0, host_vec_len, false, (uint32_t)index, + nullptr); } else if (return_type->data.pointer.explicit_alignment != 0) { // figure out the largest alignment possible @@ -18094,7 +18165,8 @@ static IrInstruction *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruct return_type = get_pointer_to_type_extra2(ira->codegen, elem_type, ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile, elem_ptr_instruction->ptr_len, - get_ptr_align(ira->codegen, ptr_type), 0, host_vec_len, false, VECTOR_INDEX_RUNTIME); + get_ptr_align(ira->codegen, ptr_type), 0, host_vec_len, false, VECTOR_INDEX_RUNTIME, + nullptr); } else { // runtime known element index switch (type_requires_comptime(ira->codegen, return_type)) { @@ -18210,31 +18282,34 @@ static IrInstruction *ir_analyze_struct_field_ptr(IrAnalyze *ira, IrInstruction case OnePossibleValueNo: break; } - ResolveStatus needed_resolve_status = - (struct_type->data.structure.layout == ContainerLayoutAuto) ? - ResolveStatusZeroBitsKnown : ResolveStatusSizeKnown; - if ((err = type_resolve(ira->codegen, struct_type, needed_resolve_status))) - return ira->codegen->invalid_instruction; - assert(struct_ptr->value.type->id == ZigTypeIdPointer); - uint32_t ptr_bit_offset = struct_ptr->value.type->data.pointer.bit_offset_in_host; - uint32_t ptr_host_int_bytes = struct_ptr->value.type->data.pointer.host_int_bytes; - uint32_t host_int_bytes_for_result_type = (ptr_host_int_bytes == 0) ? - get_host_int_bytes(ira->codegen, struct_type, field) : ptr_host_int_bytes; bool is_const = struct_ptr->value.type->data.pointer.is_const; bool is_volatile = struct_ptr->value.type->data.pointer.is_volatile; - ZigType *ptr_type = get_pointer_to_type_extra(ira->codegen, field_type, - is_const, is_volatile, PtrLenSingle, field->align, - (uint32_t)(ptr_bit_offset + field->bit_offset_in_host), - (uint32_t)host_int_bytes_for_result_type, false); + ZigType *ptr_type; + if (struct_type->data.structure.is_inferred) { + ptr_type = get_pointer_to_type_extra(ira->codegen, field_type, + is_const, is_volatile, PtrLenSingle, 0, 0, 0, false); + } else { + ResolveStatus needed_resolve_status = + (struct_type->data.structure.layout == ContainerLayoutAuto) ? + ResolveStatusZeroBitsKnown : ResolveStatusSizeKnown; + if ((err = type_resolve(ira->codegen, struct_type, needed_resolve_status))) + return ira->codegen->invalid_instruction; + assert(struct_ptr->value.type->id == ZigTypeIdPointer); + uint32_t ptr_bit_offset = struct_ptr->value.type->data.pointer.bit_offset_in_host; + uint32_t ptr_host_int_bytes = struct_ptr->value.type->data.pointer.host_int_bytes; + uint32_t host_int_bytes_for_result_type = (ptr_host_int_bytes == 0) ? + get_host_int_bytes(ira->codegen, struct_type, field) : ptr_host_int_bytes; + ptr_type = get_pointer_to_type_extra(ira->codegen, field_type, + is_const, is_volatile, PtrLenSingle, field->align, + (uint32_t)(ptr_bit_offset + field->bit_offset_in_host), + (uint32_t)host_int_bytes_for_result_type, false); + } if (instr_is_comptime(struct_ptr)) { ConstExprValue *ptr_val = ir_resolve_const(ira, struct_ptr, UndefBad); if (!ptr_val) return ira->codegen->invalid_instruction; if (ptr_val->data.x_ptr.special != ConstPtrSpecialHardCodedAddr) { - if ((err = type_resolve(ira->codegen, struct_type, ResolveStatusSizeKnown))) - return ira->codegen->invalid_instruction; - ConstExprValue *struct_val = const_ptr_pointee(ira, ira->codegen, ptr_val, source_instr->source_node); if (struct_val == nullptr) return ira->codegen->invalid_instruction; @@ -18246,7 +18321,8 @@ static IrInstruction *ir_analyze_struct_field_ptr(IrAnalyze *ira, IrInstruction for (size_t i = 0; i < struct_type->data.structure.src_field_count; i += 1) { ConstExprValue *field_val = &struct_val->data.x_struct.fields[i]; field_val->special = ConstValSpecialUndef; - field_val->type = struct_type->data.structure.fields[i].type_entry; + field_val->type = resolve_struct_field_type(ira->codegen, + &struct_type->data.structure.fields[i]); field_val->parent.id = ConstParentIdStruct; field_val->parent.data.p_struct.struct_val = struct_val; field_val->parent.data.p_struct.field_index = i; @@ -18275,6 +18351,40 @@ static IrInstruction *ir_analyze_struct_field_ptr(IrAnalyze *ira, IrInstruction return result; } +static IrInstruction *ir_analyze_inferred_field_ptr(IrAnalyze *ira, Buf *field_name, + IrInstruction *source_instr, IrInstruction *container_ptr, ZigType *container_type) +{ + // The type of the field is not available until a store using this pointer happens. + // So, here we create a special pointer type which has the inferred struct type and + // field name encoded in the type. Later, when there is a store via this pointer, + // the field type will then be available, and the field will be added to the inferred + // struct. + + ZigType *container_ptr_type = container_ptr->value.type; + ir_assert(container_ptr_type->id == ZigTypeIdPointer, source_instr); + + InferredStructField *inferred_struct_field = allocate<InferredStructField>(1, "InferredStructField"); + inferred_struct_field->inferred_struct_type = container_type; + inferred_struct_field->field_name = field_name; + + ZigType *elem_type = ira->codegen->builtin_types.entry_c_void; + ZigType *field_ptr_type = get_pointer_to_type_extra2(ira->codegen, elem_type, + container_ptr_type->data.pointer.is_const, container_ptr_type->data.pointer.is_volatile, + PtrLenSingle, 0, 0, 0, false, VECTOR_INDEX_NONE, inferred_struct_field); + + if (instr_is_comptime(container_ptr)) { + IrInstruction *result = ir_const(ira, source_instr, field_ptr_type); + copy_const_val(&result->value, &container_ptr->value, false); + result->value.type = field_ptr_type; + return result; + } + + IrInstruction *result = ir_build_cast(&ira->new_irb, source_instr->scope, + source_instr->source_node, field_ptr_type, container_ptr, CastOpNoop); + result->value.type = field_ptr_type; + return result; +} + static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_name, IrInstruction *source_instr, IrInstruction *container_ptr, ZigType *container_type, bool initializing) { @@ -18282,6 +18392,12 @@ static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_ ZigType *bare_type = container_ref_type(container_type); + if (initializing && bare_type->id == ZigTypeIdStruct && + bare_type->data.structure.resolve_status == ResolveStatusBeingInferred) + { + return ir_analyze_inferred_field_ptr(ira, field_name, source_instr, container_ptr, bare_type); + } + if ((err = type_resolve(ira->codegen, bare_type, ResolveStatusZeroBitsKnown))) return ira->codegen->invalid_instruction; @@ -20056,6 +20172,11 @@ static IrInstruction *ir_analyze_container_init_fields(IrAnalyze *ira, IrInstruc return ira->codegen->invalid_instruction; } + if (container_type->data.structure.resolve_status == ResolveStatusBeingInferred) { + // We're now done inferring the type. + container_type->data.structure.resolve_status = ResolveStatusUnstarted; + } + if ((err = type_resolve(ira->codegen, container_type, ResolveStatusSizeKnown))) return ira->codegen->invalid_instruction; |
