diff options
Diffstat (limited to 'src')
| -rw-r--r-- | src/all_types.hpp | 13 | ||||
| -rw-r--r-- | src/analyze.cpp | 41 | ||||
| -rw-r--r-- | src/analyze.hpp | 1 | ||||
| -rw-r--r-- | src/bigint.cpp | 31 | ||||
| -rw-r--r-- | src/bigint.hpp | 2 | ||||
| -rw-r--r-- | src/codegen.cpp | 45 | ||||
| -rw-r--r-- | src/ir.cpp | 225 | ||||
| -rw-r--r-- | src/ir_print.cpp | 9 |
8 files changed, 317 insertions, 50 deletions
diff --git a/src/all_types.hpp b/src/all_types.hpp index 3ad4a5d84a..a18b87e90d 100644 --- a/src/all_types.hpp +++ b/src/all_types.hpp @@ -1352,6 +1352,7 @@ enum BuiltinFnId { BuiltinFnIdCompileLog, BuiltinFnIdCtz, BuiltinFnIdClz, + BuiltinFnIdPopCount, BuiltinFnIdImport, BuiltinFnIdCImport, BuiltinFnIdErrName, @@ -1477,6 +1478,7 @@ bool type_id_eql(TypeId a, TypeId b); enum ZigLLVMFnId { ZigLLVMFnIdCtz, ZigLLVMFnIdClz, + ZigLLVMFnIdPopCount, ZigLLVMFnIdOverflowArithmetic, ZigLLVMFnIdFloor, ZigLLVMFnIdCeil, @@ -1501,6 +1503,9 @@ struct ZigLLVMFnKey { } clz; struct { uint32_t bit_count; + } pop_count; + struct { + uint32_t bit_count; } floating; struct { AddSubMul add_sub_mul; @@ -2048,6 +2053,7 @@ enum IrInstructionId { IrInstructionIdUnionTag, IrInstructionIdClz, IrInstructionIdCtz, + IrInstructionIdPopCount, IrInstructionIdImport, IrInstructionIdCImport, IrInstructionIdCInclude, @@ -2191,6 +2197,7 @@ struct IrInstructionSwitchBr { size_t case_count; IrInstructionSwitchBrCase *cases; IrInstruction *is_comptime; + IrInstruction *switch_prongs_void; }; struct IrInstructionSwitchVar { @@ -2542,6 +2549,12 @@ struct IrInstructionClz { IrInstruction *value; }; +struct IrInstructionPopCount { + IrInstruction base; + + IrInstruction *value; +}; + struct IrInstructionUnionTag { IrInstruction base; diff --git a/src/analyze.cpp b/src/analyze.cpp index ca582dfc4c..9b60f7374a 100644 --- a/src/analyze.cpp +++ b/src/analyze.cpp @@ -212,6 +212,43 @@ static uint8_t bits_needed_for_unsigned(uint64_t x) { return (upper >= x) ? base : (base + 1); } +AstNode *type_decl_node(TypeTableEntry *type_entry) { + switch (type_entry->id) { + case TypeTableEntryIdInvalid: + zig_unreachable(); + case TypeTableEntryIdStruct: + return type_entry->data.structure.decl_node; + case TypeTableEntryIdEnum: + return type_entry->data.enumeration.decl_node; + case TypeTableEntryIdUnion: + return type_entry->data.unionation.decl_node; + case TypeTableEntryIdOpaque: + case TypeTableEntryIdMetaType: + case TypeTableEntryIdVoid: + case TypeTableEntryIdBool: + case TypeTableEntryIdUnreachable: + case TypeTableEntryIdInt: + case TypeTableEntryIdFloat: + case TypeTableEntryIdPointer: + case TypeTableEntryIdArray: + case TypeTableEntryIdComptimeFloat: + case TypeTableEntryIdComptimeInt: + case TypeTableEntryIdUndefined: + case TypeTableEntryIdNull: + case TypeTableEntryIdOptional: + case TypeTableEntryIdErrorUnion: + case TypeTableEntryIdErrorSet: + case TypeTableEntryIdFn: + case TypeTableEntryIdNamespace: + case TypeTableEntryIdBlock: + case TypeTableEntryIdBoundFn: + case TypeTableEntryIdArgTuple: + case TypeTableEntryIdPromise: + return nullptr; + } + zig_unreachable(); +} + bool type_is_complete(TypeTableEntry *type_entry) { switch (type_entry->id) { case TypeTableEntryIdInvalid: @@ -5939,6 +5976,8 @@ uint32_t zig_llvm_fn_key_hash(ZigLLVMFnKey x) { return (uint32_t)(x.data.ctz.bit_count) * (uint32_t)810453934; case ZigLLVMFnIdClz: return (uint32_t)(x.data.clz.bit_count) * (uint32_t)2428952817; + case ZigLLVMFnIdPopCount: + return (uint32_t)(x.data.clz.bit_count) * (uint32_t)101195049; case ZigLLVMFnIdFloor: return (uint32_t)(x.data.floating.bit_count) * (uint32_t)1899859168; case ZigLLVMFnIdCeil: @@ -5961,6 +6000,8 @@ bool zig_llvm_fn_key_eql(ZigLLVMFnKey a, ZigLLVMFnKey b) { return a.data.ctz.bit_count == b.data.ctz.bit_count; case ZigLLVMFnIdClz: return a.data.clz.bit_count == b.data.clz.bit_count; + case ZigLLVMFnIdPopCount: + return a.data.pop_count.bit_count == b.data.pop_count.bit_count; case ZigLLVMFnIdFloor: case ZigLLVMFnIdCeil: case ZigLLVMFnIdSqrt: diff --git a/src/analyze.hpp b/src/analyze.hpp index c2730197e2..5168509fe0 100644 --- a/src/analyze.hpp +++ b/src/analyze.hpp @@ -202,5 +202,6 @@ uint32_t get_coro_frame_align_bytes(CodeGen *g); bool fn_type_can_fail(FnTypeId *fn_type_id); bool type_can_fail(TypeTableEntry *type_entry); bool fn_eval_cacheable(Scope *scope, TypeTableEntry *return_type); +AstNode *type_decl_node(TypeTableEntry *type_entry); #endif diff --git a/src/bigint.cpp b/src/bigint.cpp index bb227a7c3d..bf18b9a1bf 100644 --- a/src/bigint.cpp +++ b/src/bigint.cpp @@ -1593,6 +1593,37 @@ void bigint_append_buf(Buf *buf, const BigInt *op, uint64_t base) { } } +size_t bigint_popcount_unsigned(const BigInt *bi) { + assert(!bi->is_negative); + if (bi->digit_count == 0) + return 0; + + size_t count = 0; + size_t bit_count = bi->digit_count * 64; + for (size_t i = 0; i < bit_count; i += 1) { + if (bit_at_index(bi, i)) + count += 1; + } + return count; +} + +size_t bigint_popcount_signed(const BigInt *bi, size_t bit_count) { + if (bit_count == 0) + return 0; + if (bi->digit_count == 0) + return 0; + + BigInt twos_comp = {0}; + to_twos_complement(&twos_comp, bi, bit_count); + + size_t count = 0; + for (size_t i = 0; i < bit_count; i += 1) { + if (bit_at_index(&twos_comp, i)) + count += 1; + } + return count; +} + size_t bigint_ctz(const BigInt *bi, size_t bit_count) { if (bit_count == 0) return 0; diff --git a/src/bigint.hpp b/src/bigint.hpp index 9f044c8722..48b222a227 100644 --- a/src/bigint.hpp +++ b/src/bigint.hpp @@ -81,6 +81,8 @@ void bigint_append_buf(Buf *buf, const BigInt *op, uint64_t base); size_t bigint_ctz(const BigInt *bi, size_t bit_count); size_t bigint_clz(const BigInt *bi, size_t bit_count); +size_t bigint_popcount_signed(const BigInt *bi, size_t bit_count); +size_t bigint_popcount_unsigned(const BigInt *bi); size_t bigint_bits_needed(const BigInt *op); diff --git a/src/codegen.cpp b/src/codegen.cpp index bfc20cb1d2..1d51629002 100644 --- a/src/codegen.cpp +++ b/src/codegen.cpp @@ -2927,18 +2927,26 @@ static LLVMValueRef ir_render_elem_ptr(CodeGen *g, IrExecutable *executable, IrI return LLVMBuildInBoundsGEP(g->builder, array_ptr, indices, 1, ""); } else if (array_type->id == TypeTableEntryIdStruct) { assert(array_type->data.structure.is_slice); + if (!type_has_bits(instruction->base.value.type)) { + if (safety_check_on) { + assert(LLVMGetTypeKind(LLVMTypeOf(array_ptr)) == LLVMIntegerTypeKind); + add_bounds_check(g, subscript_value, LLVMIntEQ, nullptr, LLVMIntULT, array_ptr); + } + return nullptr; + } + assert(LLVMGetTypeKind(LLVMTypeOf(array_ptr)) == LLVMPointerTypeKind); assert(LLVMGetTypeKind(LLVMGetElementType(LLVMTypeOf(array_ptr))) == LLVMStructTypeKind); if (safety_check_on) { - size_t len_index = array_type->data.structure.fields[1].gen_index; + size_t len_index = array_type->data.structure.fields[slice_len_index].gen_index; assert(len_index != SIZE_MAX); LLVMValueRef len_ptr = LLVMBuildStructGEP(g->builder, array_ptr, (unsigned)len_index, ""); LLVMValueRef len = gen_load_untyped(g, len_ptr, 0, false, ""); add_bounds_check(g, subscript_value, LLVMIntEQ, nullptr, LLVMIntULT, len); } - size_t ptr_index = array_type->data.structure.fields[0].gen_index; + size_t ptr_index = array_type->data.structure.fields[slice_ptr_index].gen_index; assert(ptr_index != SIZE_MAX); LLVMValueRef ptr_ptr = LLVMBuildStructGEP(g->builder, array_ptr, (unsigned)ptr_index, ""); LLVMValueRef ptr = gen_load_untyped(g, ptr_ptr, 0, false, ""); @@ -3353,14 +3361,22 @@ static LLVMValueRef ir_render_unwrap_maybe(CodeGen *g, IrExecutable *executable, static LLVMValueRef get_int_builtin_fn(CodeGen *g, TypeTableEntry *int_type, BuiltinFnId fn_id) { ZigLLVMFnKey key = {}; const char *fn_name; + uint32_t n_args; if (fn_id == BuiltinFnIdCtz) { fn_name = "cttz"; + n_args = 2; key.id = ZigLLVMFnIdCtz; key.data.ctz.bit_count = (uint32_t)int_type->data.integral.bit_count; } else if (fn_id == BuiltinFnIdClz) { fn_name = "ctlz"; + n_args = 2; key.id = ZigLLVMFnIdClz; key.data.clz.bit_count = (uint32_t)int_type->data.integral.bit_count; + } else if (fn_id == BuiltinFnIdPopCount) { + fn_name = "ctpop"; + n_args = 1; + key.id = ZigLLVMFnIdPopCount; + key.data.pop_count.bit_count = (uint32_t)int_type->data.integral.bit_count; } else { zig_unreachable(); } @@ -3375,7 +3391,7 @@ static LLVMValueRef get_int_builtin_fn(CodeGen *g, TypeTableEntry *int_type, Bui int_type->type_ref, LLVMInt1Type(), }; - LLVMTypeRef fn_type = LLVMFunctionType(int_type->type_ref, param_types, 2, false); + LLVMTypeRef fn_type = LLVMFunctionType(int_type->type_ref, param_types, n_args, false); LLVMValueRef fn_val = LLVMAddFunction(g->module, llvm_name, fn_type); assert(LLVMGetIntrinsicID(fn_val)); @@ -3408,6 +3424,14 @@ static LLVMValueRef ir_render_ctz(CodeGen *g, IrExecutable *executable, IrInstru return gen_widen_or_shorten(g, false, int_type, instruction->base.value.type, wrong_size_int); } +static LLVMValueRef ir_render_pop_count(CodeGen *g, IrExecutable *executable, IrInstructionPopCount *instruction) { + TypeTableEntry *int_type = instruction->value->value.type; + LLVMValueRef fn_val = get_int_builtin_fn(g, int_type, BuiltinFnIdPopCount); + LLVMValueRef operand = ir_llvm_value(g, instruction->value); + LLVMValueRef wrong_size_int = LLVMBuildCall(g->builder, fn_val, &operand, 1, ""); + return gen_widen_or_shorten(g, false, int_type, instruction->base.value.type, wrong_size_int); +} + static LLVMValueRef ir_render_switch_br(CodeGen *g, IrExecutable *executable, IrInstructionSwitchBr *instruction) { LLVMValueRef target_value = ir_llvm_value(g, instruction->target_value); LLVMBasicBlockRef else_block = instruction->else_block->llvm_block; @@ -3894,11 +3918,15 @@ static LLVMValueRef ir_render_slice(CodeGen *g, IrExecutable *executable, IrInst add_bounds_check(g, start_val, LLVMIntEQ, nullptr, LLVMIntULE, end_val); } - LLVMValueRef ptr_field_ptr = LLVMBuildStructGEP(g->builder, tmp_struct_ptr, slice_ptr_index, ""); - LLVMValueRef slice_start_ptr = LLVMBuildInBoundsGEP(g->builder, array_ptr, &start_val, 1, ""); - gen_store_untyped(g, slice_start_ptr, ptr_field_ptr, 0, false); + if (type_has_bits(array_type)) { + size_t gen_ptr_index = instruction->base.value.type->data.structure.fields[slice_ptr_index].gen_index; + LLVMValueRef ptr_field_ptr = LLVMBuildStructGEP(g->builder, tmp_struct_ptr, gen_ptr_index, ""); + LLVMValueRef slice_start_ptr = LLVMBuildInBoundsGEP(g->builder, array_ptr, &start_val, 1, ""); + gen_store_untyped(g, slice_start_ptr, ptr_field_ptr, 0, false); + } - LLVMValueRef len_field_ptr = LLVMBuildStructGEP(g->builder, tmp_struct_ptr, slice_len_index, ""); + size_t gen_len_index = instruction->base.value.type->data.structure.fields[slice_len_index].gen_index; + LLVMValueRef len_field_ptr = LLVMBuildStructGEP(g->builder, tmp_struct_ptr, gen_len_index, ""); LLVMValueRef len_value = LLVMBuildNSWSub(g->builder, end_val, start_val, ""); gen_store_untyped(g, len_value, len_field_ptr, 0, false); @@ -4730,6 +4758,8 @@ static LLVMValueRef ir_render_instruction(CodeGen *g, IrExecutable *executable, return ir_render_clz(g, executable, (IrInstructionClz *)instruction); case IrInstructionIdCtz: return ir_render_ctz(g, executable, (IrInstructionCtz *)instruction); + case IrInstructionIdPopCount: + return ir_render_pop_count(g, executable, (IrInstructionPopCount *)instruction); case IrInstructionIdSwitchBr: return ir_render_switch_br(g, executable, (IrInstructionSwitchBr *)instruction); case IrInstructionIdPhi: @@ -6241,6 +6271,7 @@ static void define_builtin_fns(CodeGen *g) { create_builtin_fn(g, BuiltinFnIdCUndef, "cUndef", 1); create_builtin_fn(g, BuiltinFnIdCtz, "ctz", 1); create_builtin_fn(g, BuiltinFnIdClz, "clz", 1); + create_builtin_fn(g, BuiltinFnIdPopCount, "popCount", 1); create_builtin_fn(g, BuiltinFnIdImport, "import", 1); create_builtin_fn(g, BuiltinFnIdCImport, "cImport", 1); create_builtin_fn(g, BuiltinFnIdErrName, "errorName", 1); diff --git a/src/ir.cpp b/src/ir.cpp index b40c2dc36d..7f7436010e 100644 --- a/src/ir.cpp +++ b/src/ir.cpp @@ -82,6 +82,7 @@ struct ConstCastSliceMismatch; struct ConstCastErrUnionErrSetMismatch; struct ConstCastErrUnionPayloadMismatch; struct ConstCastErrSetMismatch; +struct ConstCastTypeMismatch; struct ConstCastOnly { ConstCastResultId id; @@ -92,6 +93,7 @@ struct ConstCastOnly { ConstCastOptionalMismatch *optional; ConstCastErrUnionPayloadMismatch *error_union_payload; ConstCastErrUnionErrSetMismatch *error_union_error_set; + ConstCastTypeMismatch *type_mismatch; ConstCastOnly *return_type; ConstCastOnly *async_allocator_type; ConstCastOnly *null_wrap_ptr_child; @@ -100,6 +102,11 @@ struct ConstCastOnly { } data; }; +struct ConstCastTypeMismatch { + TypeTableEntry *wanted_type; + TypeTableEntry *actual_type; +}; + struct ConstCastOptionalMismatch { ConstCastOnly child; TypeTableEntry *wanted_child; @@ -420,6 +427,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionCtz *) { return IrInstructionIdCtz; } +static constexpr IrInstructionId ir_instruction_id(IrInstructionPopCount *) { + return IrInstructionIdPopCount; +} + static constexpr IrInstructionId ir_instruction_id(IrInstructionUnionTag *) { return IrInstructionIdUnionTag; } @@ -1718,8 +1729,18 @@ static IrInstruction *ir_build_ctz_from(IrBuilder *irb, IrInstruction *old_instr return new_instruction; } +static IrInstruction *ir_build_pop_count(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *value) { + IrInstructionPopCount *instruction = ir_build_instruction<IrInstructionPopCount>(irb, scope, source_node); + instruction->value = value; + + ir_ref_instruction(value, irb->current_basic_block); + + return &instruction->base; +} + static IrInstruction *ir_build_switch_br(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *target_value, - IrBasicBlock *else_block, size_t case_count, IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime) + IrBasicBlock *else_block, size_t case_count, IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime, + IrInstruction *switch_prongs_void) { IrInstructionSwitchBr *instruction = ir_build_instruction<IrInstructionSwitchBr>(irb, scope, source_node); instruction->base.value.type = irb->codegen->builtin_types.entry_unreachable; @@ -1729,10 +1750,12 @@ static IrInstruction *ir_build_switch_br(IrBuilder *irb, Scope *scope, AstNode * instruction->case_count = case_count; instruction->cases = cases; instruction->is_comptime = is_comptime; + instruction->switch_prongs_void = switch_prongs_void; ir_ref_instruction(target_value, irb->current_basic_block); if (is_comptime) ir_ref_instruction(is_comptime, irb->current_basic_block); ir_ref_bb(else_block); + if (switch_prongs_void) ir_ref_instruction(switch_prongs_void, irb->current_basic_block); for (size_t i = 0; i < case_count; i += 1) { ir_ref_instruction(cases[i].value, irb->current_basic_block); @@ -1744,10 +1767,10 @@ static IrInstruction *ir_build_switch_br(IrBuilder *irb, Scope *scope, AstNode * static IrInstruction *ir_build_switch_br_from(IrBuilder *irb, IrInstruction *old_instruction, IrInstruction *target_value, IrBasicBlock *else_block, size_t case_count, - IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime) + IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime, IrInstruction *switch_prongs_void) { IrInstruction *new_instruction = ir_build_switch_br(irb, old_instruction->scope, old_instruction->source_node, - target_value, else_block, case_count, cases, is_comptime); + target_value, else_block, case_count, cases, is_comptime, switch_prongs_void); ir_link_new_instruction(new_instruction, old_instruction); return new_instruction; } @@ -3831,6 +3854,16 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo IrInstruction *ctz = ir_build_ctz(irb, scope, node, arg0_value); return ir_lval_wrap(irb, scope, ctz, lval); } + case BuiltinFnIdPopCount: + { + AstNode *arg0_node = node->data.fn_call_expr.params.at(0); + IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope); + if (arg0_value == irb->codegen->invalid_instruction) + return arg0_value; + + IrInstruction *instr = ir_build_pop_count(irb, scope, node, arg0_value); + return ir_lval_wrap(irb, scope, instr, lval); + } case BuiltinFnIdClz: { AstNode *arg0_node = node->data.fn_call_expr.params.at(0); @@ -6035,13 +6068,13 @@ static IrInstruction *ir_gen_switch_expr(IrBuilder *irb, Scope *scope, AstNode * } - ir_build_check_switch_prongs(irb, scope, node, target_value, check_ranges.items, check_ranges.length, + IrInstruction *switch_prongs_void = ir_build_check_switch_prongs(irb, scope, node, target_value, check_ranges.items, check_ranges.length, else_prong != nullptr); if (cases.length == 0) { ir_build_br(irb, scope, node, else_block, is_comptime); } else { - ir_build_switch_br(irb, scope, node, target_value, else_block, cases.length, cases.items, is_comptime); + ir_build_switch_br(irb, scope, node, target_value, else_block, cases.length, cases.items, is_comptime, switch_prongs_void); } if (!else_prong) { @@ -6692,7 +6725,7 @@ static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *parent_scope, Ast cases[1].value = ir_build_const_u8(irb, parent_scope, node, 1); cases[1].block = cleanup_block; ir_build_switch_br(irb, parent_scope, node, suspend_code, irb->exec->coro_suspend_block, - 2, cases, const_bool_false); + 2, cases, const_bool_false, nullptr); ir_set_cursor_at_end_and_append_block(irb, cleanup_block); ir_gen_defers_for_block(irb, parent_scope, outer_scope, true); @@ -6773,7 +6806,7 @@ static IrInstruction *ir_gen_suspend(IrBuilder *irb, Scope *parent_scope, AstNod cases[1].value = ir_mark_gen(ir_build_const_u8(irb, parent_scope, node, 1)); cases[1].block = cleanup_block; ir_mark_gen(ir_build_switch_br(irb, parent_scope, node, suspend_code, irb->exec->coro_suspend_block, - 2, cases, const_bool_false)); + 2, cases, const_bool_false, nullptr)); ir_set_cursor_at_end_and_append_block(irb, cleanup_block); ir_gen_defers_for_block(irb, parent_scope, outer_scope, true); @@ -7078,7 +7111,7 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec cases[0].block = invalid_resume_block; cases[1].value = ir_build_const_u8(irb, scope, node, 1); cases[1].block = irb->exec->coro_final_cleanup_block; - ir_build_switch_br(irb, scope, node, suspend_code, irb->exec->coro_suspend_block, 2, cases, const_bool_false); + ir_build_switch_br(irb, scope, node, suspend_code, irb->exec->coro_suspend_block, 2, cases, const_bool_false, nullptr); ir_set_cursor_at_end_and_append_block(irb, irb->exec->coro_suspend_block); ir_build_coro_end(irb, scope, node); @@ -8125,15 +8158,7 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry } // pointer const - if (wanted_type->id == TypeTableEntryIdPointer && - actual_type->id == TypeTableEntryIdPointer && - (actual_type->data.pointer.ptr_len == wanted_type->data.pointer.ptr_len) && - (!actual_type->data.pointer.is_const || wanted_type->data.pointer.is_const) && - (!actual_type->data.pointer.is_volatile || wanted_type->data.pointer.is_volatile) && - actual_type->data.pointer.bit_offset == wanted_type->data.pointer.bit_offset && - actual_type->data.pointer.unaligned_bit_count == wanted_type->data.pointer.unaligned_bit_count && - actual_type->data.pointer.alignment >= wanted_type->data.pointer.alignment) - { + if (wanted_type->id == TypeTableEntryIdPointer && actual_type->id == TypeTableEntryIdPointer) { ConstCastOnly child = types_match_const_cast_only(ira, wanted_type->data.pointer.child_type, actual_type->data.pointer.child_type, source_node, !wanted_type->data.pointer.is_const); if (child.id != ConstCastResultIdOk) { @@ -8142,8 +8167,17 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry result.data.pointer_mismatch->child = child; result.data.pointer_mismatch->wanted_child = wanted_type->data.pointer.child_type; result.data.pointer_mismatch->actual_child = actual_type->data.pointer.child_type; + return result; + } + if ((actual_type->data.pointer.ptr_len == wanted_type->data.pointer.ptr_len) && + (!actual_type->data.pointer.is_const || wanted_type->data.pointer.is_const) && + (!actual_type->data.pointer.is_volatile || wanted_type->data.pointer.is_volatile) && + actual_type->data.pointer.bit_offset == wanted_type->data.pointer.bit_offset && + actual_type->data.pointer.unaligned_bit_count == wanted_type->data.pointer.unaligned_bit_count && + actual_type->data.pointer.alignment >= wanted_type->data.pointer.alignment) + { + return result; } - return result; } // slice const @@ -8338,6 +8372,9 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry } result.id = ConstCastResultIdType; + result.data.type_mismatch = allocate_nonzero<ConstCastTypeMismatch>(1); + result.data.type_mismatch->wanted_type = wanted_type; + result.data.type_mismatch->actual_type = actual_type; return result; } @@ -10151,6 +10188,21 @@ static void report_recursive_error(IrAnalyze *ira, AstNode *source_node, ConstCa report_recursive_error(ira, source_node, &cast_result->data.error_union_payload->child, msg); break; } + case ConstCastResultIdType: { + AstNode *wanted_decl_node = type_decl_node(cast_result->data.type_mismatch->wanted_type); + AstNode *actual_decl_node = type_decl_node(cast_result->data.type_mismatch->actual_type); + if (wanted_decl_node != nullptr) { + add_error_note(ira->codegen, parent_msg, wanted_decl_node, + buf_sprintf("%s declared here", + buf_ptr(&cast_result->data.type_mismatch->wanted_type->name))); + } + if (actual_decl_node != nullptr) { + add_error_note(ira->codegen, parent_msg, actual_decl_node, + buf_sprintf("%s declared here", + buf_ptr(&cast_result->data.type_mismatch->actual_type->name))); + } + break; + } case ConstCastResultIdFnAlign: // TODO case ConstCastResultIdFnCC: // TODO case ConstCastResultIdFnVarArgs: // TODO @@ -10160,7 +10212,6 @@ static void report_recursive_error(IrAnalyze *ira, AstNode *source_node, ConstCa case ConstCastResultIdFnGenericArgCount: // TODO case ConstCastResultIdFnArg: // TODO case ConstCastResultIdFnArgNoAlias: // TODO - case ConstCastResultIdType: // TODO case ConstCastResultIdUnresolvedInferredErrSet: // TODO case ConstCastResultIdAsyncAllocatorType: // TODO case ConstCastResultIdNullWrapPtr: // TODO @@ -12670,14 +12721,22 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal // for extern functions, the var args argument is not counted. // for zig functions, it is. size_t var_args_1_or_0; - if (fn_type_id->cc == CallingConventionUnspecified) { - var_args_1_or_0 = fn_type_id->is_var_args ? 1 : 0; - } else { + if (fn_type_id->cc == CallingConventionC) { var_args_1_or_0 = 0; + } else { + var_args_1_or_0 = fn_type_id->is_var_args ? 1 : 0; } size_t src_param_count = fn_type_id->param_count - var_args_1_or_0; size_t call_param_count = call_instruction->arg_count + first_arg_1_or_0; + for (size_t i = 0; i < call_instruction->arg_count; i += 1) { + ConstExprValue *arg_tuple_value = &call_instruction->args[i]->other->value; + if (arg_tuple_value->type->id == TypeTableEntryIdArgTuple) { + call_param_count -= 1; + call_param_count += arg_tuple_value->data.x_arg_tuple.end_index - + arg_tuple_value->data.x_arg_tuple.start_index; + } + } AstNode *source_node = call_instruction->base.source_node; AstNode *fn_proto_node = fn_entry ? fn_entry->proto_node : nullptr;; @@ -12858,11 +12917,6 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal buf_sprintf("calling a generic function requires compile-time known function value")); return ira->codegen->builtin_types.entry_invalid; } - if (call_instruction->is_async && fn_type_id->is_var_args) { - ir_add_error(ira, call_instruction->fn_ref, - buf_sprintf("compiler bug: TODO: implement var args async functions. https://github.com/ziglang/zig/issues/557")); - return ira->codegen->builtin_types.entry_invalid; - } // Count the arguments of the function type id we are creating size_t new_fn_arg_count = first_arg_1_or_0; @@ -12937,18 +12991,18 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal if (type_is_invalid(arg->value.type)) return ira->codegen->builtin_types.entry_invalid; - AstNode *param_decl_node = fn_proto_node->data.fn_proto.params.at(next_proto_i); - assert(param_decl_node->type == NodeTypeParamDecl); - bool is_var_args = param_decl_node->data.param_decl.is_var_args; - if (is_var_args && !found_first_var_arg) { - first_var_arg = inst_fn_type_id.param_count; - found_first_var_arg = true; - } - if (arg->value.type->id == TypeTableEntryIdArgTuple) { for (size_t arg_tuple_i = arg->value.data.x_arg_tuple.start_index; arg_tuple_i < arg->value.data.x_arg_tuple.end_index; arg_tuple_i += 1) { + AstNode *param_decl_node = fn_proto_node->data.fn_proto.params.at(next_proto_i); + assert(param_decl_node->type == NodeTypeParamDecl); + bool is_var_args = param_decl_node->data.param_decl.is_var_args; + if (is_var_args && !found_first_var_arg) { + first_var_arg = inst_fn_type_id.param_count; + found_first_var_arg = true; + } + VariableTableEntry *arg_var = get_fn_var_by_index(parent_fn_entry, arg_tuple_i); if (arg_var == nullptr) { ir_add_error(ira, arg, @@ -12969,10 +13023,20 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal return ira->codegen->builtin_types.entry_invalid; } } - } else if (!ir_analyze_fn_call_generic_arg(ira, fn_proto_node, arg, &impl_fn->child_scope, - &next_proto_i, generic_id, &inst_fn_type_id, casted_args, impl_fn)) - { - return ira->codegen->builtin_types.entry_invalid; + } else { + AstNode *param_decl_node = fn_proto_node->data.fn_proto.params.at(next_proto_i); + assert(param_decl_node->type == NodeTypeParamDecl); + bool is_var_args = param_decl_node->data.param_decl.is_var_args; + if (is_var_args && !found_first_var_arg) { + first_var_arg = inst_fn_type_id.param_count; + found_first_var_arg = true; + } + + if (!ir_analyze_fn_call_generic_arg(ira, fn_proto_node, arg, &impl_fn->child_scope, + &next_proto_i, generic_id, &inst_fn_type_id, casted_args, impl_fn)) + { + return ira->codegen->builtin_types.entry_invalid; + } } } @@ -13220,6 +13284,8 @@ static TypeTableEntry *ir_analyze_instruction_call(IrAnalyze *ira, IrInstruction return ir_finish_anal(ira, cast_instruction->value.type); } else if (fn_ref->value.type->id == TypeTableEntryIdFn) { FnTableEntry *fn_table_entry = ir_resolve_fn(ira, fn_ref); + if (fn_table_entry == nullptr) + return ira->codegen->builtin_types.entry_invalid; return ir_analyze_fn_call(ira, call_instruction, fn_table_entry, fn_table_entry->type_entry, fn_ref, nullptr, is_comptime, call_instruction->fn_inline); } else if (fn_ref->value.type->id == TypeTableEntryIdBoundFn) { @@ -13227,7 +13293,7 @@ static TypeTableEntry *ir_analyze_instruction_call(IrAnalyze *ira, IrInstruction FnTableEntry *fn_table_entry = fn_ref->value.data.x_bound_fn.fn; IrInstruction *first_arg_ptr = fn_ref->value.data.x_bound_fn.first_arg; return ir_analyze_fn_call(ira, call_instruction, fn_table_entry, fn_table_entry->type_entry, - nullptr, first_arg_ptr, is_comptime, call_instruction->fn_inline); + fn_ref, first_arg_ptr, is_comptime, call_instruction->fn_inline); } else { ir_add_error_node(ira, fn_ref->source_node, buf_sprintf("type '%s' not a function", buf_ptr(&fn_ref->value.type->name))); @@ -15247,6 +15313,48 @@ static TypeTableEntry *ir_analyze_instruction_clz(IrAnalyze *ira, IrInstructionC } } +static TypeTableEntry *ir_analyze_instruction_pop_count(IrAnalyze *ira, IrInstructionPopCount *instruction) { + IrInstruction *value = instruction->value->other; + if (type_is_invalid(value->value.type)) + return ira->codegen->builtin_types.entry_invalid; + + if (value->value.type->id != TypeTableEntryIdInt && value->value.type->id != TypeTableEntryIdComptimeInt) { + ir_add_error(ira, value, + buf_sprintf("expected integer type, found '%s'", buf_ptr(&value->value.type->name))); + return ira->codegen->builtin_types.entry_invalid; + } + + if (instr_is_comptime(value)) { + ConstExprValue *val = ir_resolve_const(ira, value, UndefBad); + if (!val) + return ira->codegen->builtin_types.entry_invalid; + if (bigint_cmp_zero(&val->data.x_bigint) != CmpLT) { + size_t result = bigint_popcount_unsigned(&val->data.x_bigint); + ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base); + bigint_init_unsigned(&out_val->data.x_bigint, result); + return ira->codegen->builtin_types.entry_num_lit_int; + } + if (value->value.type->id == TypeTableEntryIdComptimeInt) { + Buf *val_buf = buf_alloc(); + bigint_append_buf(val_buf, &val->data.x_bigint, 10); + ir_add_error(ira, &instruction->base, + buf_sprintf("@popCount on negative %s value %s", + buf_ptr(&value->value.type->name), buf_ptr(val_buf))); + return ira->codegen->builtin_types.entry_invalid; + } + size_t result = bigint_popcount_signed(&val->data.x_bigint, value->value.type->data.integral.bit_count); + ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base); + bigint_init_unsigned(&out_val->data.x_bigint, result); + return ira->codegen->builtin_types.entry_num_lit_int; + } + + IrInstruction *result = ir_build_pop_count(&ira->new_irb, instruction->base.scope, + instruction->base.source_node, value); + result->value.type = get_smallest_unsigned_int_type(ira->codegen, value->value.type->data.integral.bit_count); + ir_link_new_instruction(result, &instruction->base); + return result->value.type; +} + static IrInstruction *ir_analyze_union_tag(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value) { if (type_is_invalid(value->value.type)) return ira->codegen->invalid_instruction; @@ -15297,6 +15405,13 @@ static TypeTableEntry *ir_analyze_instruction_switch_br(IrAnalyze *ira, if (type_is_invalid(target_value->value.type)) return ir_unreach_error(ira); + if (switch_br_instruction->switch_prongs_void != nullptr) { + if (type_is_invalid(switch_br_instruction->switch_prongs_void->other->value.type)) { + return ir_unreach_error(ira); + } + } + + size_t case_count = switch_br_instruction->case_count; bool is_comptime; @@ -15387,7 +15502,7 @@ static TypeTableEntry *ir_analyze_instruction_switch_br(IrAnalyze *ira, IrBasicBlock *new_else_block = ir_get_new_bb(ira, switch_br_instruction->else_block, &switch_br_instruction->base); ir_build_switch_br_from(&ira->new_irb, &switch_br_instruction->base, - target_value, new_else_block, case_count, cases, nullptr); + target_value, new_else_block, case_count, cases, nullptr, nullptr); return ir_finish_anal(ira, ira->codegen->builtin_types.entry_unreachable); } @@ -19136,16 +19251,22 @@ static TypeTableEntry *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira IrInstruction *start_value = range->start->other; if (type_is_invalid(start_value->value.type)) return ira->codegen->builtin_types.entry_invalid; + IrInstruction *casted_start_value = ir_implicit_cast(ira, start_value, switch_type); + if (type_is_invalid(casted_start_value->value.type)) + return ira->codegen->builtin_types.entry_invalid; IrInstruction *end_value = range->end->other; if (type_is_invalid(end_value->value.type)) return ira->codegen->builtin_types.entry_invalid; + IrInstruction *casted_end_value = ir_implicit_cast(ira, end_value, switch_type); + if (type_is_invalid(casted_end_value->value.type)) + return ira->codegen->builtin_types.entry_invalid; - ConstExprValue *start_val = ir_resolve_const(ira, start_value, UndefBad); + ConstExprValue *start_val = ir_resolve_const(ira, casted_start_value, UndefBad); if (!start_val) return ira->codegen->builtin_types.entry_invalid; - ConstExprValue *end_val = ir_resolve_const(ira, end_value, UndefBad); + ConstExprValue *end_val = ir_resolve_const(ira, casted_end_value, UndefBad); if (!end_val) return ira->codegen->builtin_types.entry_invalid; @@ -19264,6 +19385,15 @@ static IrInstruction *ir_align_cast(IrAnalyze *ira, IrInstruction *target, uint3 if (!val) return ira->codegen->invalid_instruction; + if (val->data.x_ptr.special == ConstPtrSpecialHardCodedAddr && + val->data.x_ptr.data.hard_coded_addr.addr % align_bytes != 0) + { + ir_add_error(ira, target, + buf_sprintf("pointer address 0x%" ZIG_PRI_x64 " is not aligned to %" PRIu32 " bytes", + val->data.x_ptr.data.hard_coded_addr.addr, align_bytes)); + return ira->codegen->invalid_instruction; + } + IrInstruction *result = ir_create_const(&ira->new_irb, target->scope, target->source_node, result_type); copy_const_val(&result->value, val, false); result->value.type = result_type; @@ -19690,6 +19820,12 @@ static TypeTableEntry *ir_analyze_instruction_ptr_to_int(IrAnalyze *ira, IrInstr return ira->codegen->builtin_types.entry_invalid; } + if (!type_has_bits(target->value.type)) { + ir_add_error(ira, target, + buf_sprintf("pointer to size 0 type has no address")); + return ira->codegen->builtin_types.entry_invalid; + } + if (instr_is_comptime(target)) { ConstExprValue *val = ir_resolve_const(ira, target, UndefBad); if (!val) @@ -20493,6 +20629,8 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi return ir_analyze_instruction_clz(ira, (IrInstructionClz *)instruction); case IrInstructionIdCtz: return ir_analyze_instruction_ctz(ira, (IrInstructionCtz *)instruction); + case IrInstructionIdPopCount: + return ir_analyze_instruction_pop_count(ira, (IrInstructionPopCount *)instruction); case IrInstructionIdSwitchBr: return ir_analyze_instruction_switch_br(ira, (IrInstructionSwitchBr *)instruction); case IrInstructionIdSwitchTarget: @@ -20851,6 +20989,7 @@ bool ir_has_side_effects(IrInstruction *instruction) { case IrInstructionIdUnwrapOptional: case IrInstructionIdClz: case IrInstructionIdCtz: + case IrInstructionIdPopCount: case IrInstructionIdSwitchVar: case IrInstructionIdSwitchTarget: case IrInstructionIdUnionTag: diff --git a/src/ir_print.cpp b/src/ir_print.cpp index 5e5a71382c..780cf9e756 100644 --- a/src/ir_print.cpp +++ b/src/ir_print.cpp @@ -501,6 +501,12 @@ static void ir_print_ctz(IrPrint *irp, IrInstructionCtz *instruction) { fprintf(irp->f, ")"); } +static void ir_print_pop_count(IrPrint *irp, IrInstructionPopCount *instruction) { + fprintf(irp->f, "@popCount("); + ir_print_other_instruction(irp, instruction->value); + fprintf(irp->f, ")"); +} + static void ir_print_switch_br(IrPrint *irp, IrInstructionSwitchBr *instruction) { fprintf(irp->f, "switch ("); ir_print_other_instruction(irp, instruction->target_value); @@ -1425,6 +1431,9 @@ static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) { case IrInstructionIdCtz: ir_print_ctz(irp, (IrInstructionCtz *)instruction); break; + case IrInstructionIdPopCount: + ir_print_pop_count(irp, (IrInstructionPopCount *)instruction); + break; case IrInstructionIdClz: ir_print_clz(irp, (IrInstructionClz *)instruction); break; |
