aboutsummaryrefslogtreecommitdiff
path: root/src/ir.cpp
diff options
context:
space:
mode:
authorAndrea Orru <andrea@orru.io>2018-08-06 01:43:19 -0400
committerAndrea Orru <andrea@orru.io>2018-08-06 01:43:19 -0400
commitd2f5e57b68da0b16e5789ca19045ccbcb4ecfa8d (patch)
treee9fa3caec533a0d1e2b434868b2fde1f9240e5c8 /src/ir.cpp
parent06614b3fa09954464c2e2f32756cacedc178a282 (diff)
parent63a23e848a62d5f167f8d5478de9766cb24aa6eb (diff)
downloadzig-d2f5e57b68da0b16e5789ca19045ccbcb4ecfa8d.tar.gz
zig-d2f5e57b68da0b16e5789ca19045ccbcb4ecfa8d.zip
Merge branch 'master' into zen_stdlib
Diffstat (limited to 'src/ir.cpp')
-rw-r--r--src/ir.cpp6185
1 files changed, 4533 insertions, 1652 deletions
diff --git a/src/ir.cpp b/src/ir.cpp
index 3ba58a09bd..3e423487aa 100644
--- a/src/ir.cpp
+++ b/src/ir.cpp
@@ -11,9 +11,10 @@
#include "ir.hpp"
#include "ir_print.hpp"
#include "os.hpp"
-#include "translate_c.hpp"
#include "range_set.hpp"
#include "softfloat.hpp"
+#include "translate_c.hpp"
+#include "util.hpp"
struct IrExecContext {
ConstExprValue *mem_slot_list;
@@ -38,20 +39,13 @@ struct IrAnalyze {
IrBasicBlock *const_predecessor_bb;
};
-static const LVal LVAL_NONE = { false, false, false };
-static const LVal LVAL_PTR = { true, false, false };
-
-static LVal make_lval_addr(bool is_const, bool is_volatile) {
- return { true, is_const, is_volatile };
-}
-
enum ConstCastResultId {
ConstCastResultIdOk,
ConstCastResultIdErrSet,
ConstCastResultIdErrSetGlobal,
ConstCastResultIdPointerChild,
ConstCastResultIdSliceChild,
- ConstCastResultIdNullableChild,
+ ConstCastResultIdOptionalChild,
ConstCastResultIdErrorUnionPayload,
ConstCastResultIdErrorUnionErrorSet,
ConstCastResultIdFnAlign,
@@ -66,14 +60,10 @@ enum ConstCastResultId {
ConstCastResultIdType,
ConstCastResultIdUnresolvedInferredErrSet,
ConstCastResultIdAsyncAllocatorType,
-};
-
-struct ConstCastErrSetMismatch {
- ZigList<ErrorTableEntry *> missing_errors;
+ ConstCastResultIdNullWrapPtr,
};
struct ConstCastOnly;
-
struct ConstCastArg {
size_t arg_index;
ConstCastOnly *child;
@@ -83,22 +73,70 @@ struct ConstCastArgNoAlias {
size_t arg_index;
};
+struct ConstCastOptionalMismatch;
+struct ConstCastPointerMismatch;
+struct ConstCastSliceMismatch;
+struct ConstCastErrUnionErrSetMismatch;
+struct ConstCastErrUnionPayloadMismatch;
+struct ConstCastErrSetMismatch;
+struct ConstCastTypeMismatch;
+
struct ConstCastOnly {
ConstCastResultId id;
union {
- ConstCastErrSetMismatch error_set;
- ConstCastOnly *pointer_child;
- ConstCastOnly *slice_child;
- ConstCastOnly *nullable_child;
- ConstCastOnly *error_union_payload;
- ConstCastOnly *error_union_error_set;
+ ConstCastErrSetMismatch *error_set_mismatch;
+ ConstCastPointerMismatch *pointer_mismatch;
+ ConstCastSliceMismatch *slice_mismatch;
+ ConstCastOptionalMismatch *optional;
+ ConstCastErrUnionPayloadMismatch *error_union_payload;
+ ConstCastErrUnionErrSetMismatch *error_union_error_set;
+ ConstCastTypeMismatch *type_mismatch;
ConstCastOnly *return_type;
ConstCastOnly *async_allocator_type;
+ ConstCastOnly *null_wrap_ptr_child;
ConstCastArg fn_arg;
ConstCastArgNoAlias arg_no_alias;
} data;
};
+struct ConstCastTypeMismatch {
+ TypeTableEntry *wanted_type;
+ TypeTableEntry *actual_type;
+};
+
+struct ConstCastOptionalMismatch {
+ ConstCastOnly child;
+ TypeTableEntry *wanted_child;
+ TypeTableEntry *actual_child;
+};
+
+struct ConstCastPointerMismatch {
+ ConstCastOnly child;
+ TypeTableEntry *wanted_child;
+ TypeTableEntry *actual_child;
+};
+
+struct ConstCastSliceMismatch {
+ ConstCastOnly child;
+ TypeTableEntry *wanted_child;
+ TypeTableEntry *actual_child;
+};
+
+struct ConstCastErrUnionErrSetMismatch {
+ ConstCastOnly child;
+ TypeTableEntry *wanted_err_set;
+ TypeTableEntry *actual_err_set;
+};
+
+struct ConstCastErrUnionPayloadMismatch {
+ ConstCastOnly child;
+ TypeTableEntry *wanted_payload;
+ TypeTableEntry *actual_payload;
+};
+
+struct ConstCastErrSetMismatch {
+ ZigList<ErrorTableEntry *> missing_errors;
+};
static IrInstruction *ir_gen_node(IrBuilder *irb, AstNode *node, Scope *scope);
static IrInstruction *ir_gen_node_extra(IrBuilder *irb, AstNode *node, Scope *scope, LVal lval);
@@ -108,11 +146,14 @@ static IrInstruction *ir_get_deref(IrAnalyze *ira, IrInstruction *source_instruc
static ErrorMsg *exec_add_error_node(CodeGen *codegen, IrExecutable *exec, AstNode *source_node, Buf *msg);
static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_name,
IrInstruction *source_instr, IrInstruction *container_ptr, TypeTableEntry *container_type);
-static IrInstruction *ir_get_var_ptr(IrAnalyze *ira, IrInstruction *instruction,
- VariableTableEntry *var, bool is_const_ptr, bool is_volatile_ptr);
+static IrInstruction *ir_get_var_ptr(IrAnalyze *ira, IrInstruction *instruction, VariableTableEntry *var);
+static TypeTableEntry *ir_resolve_atomic_operand_type(IrAnalyze *ira, IrInstruction *op);
+static IrInstruction *ir_lval_wrap(IrBuilder *irb, Scope *scope, IrInstruction *value, LVal lval);
+static TypeTableEntry *adjust_ptr_align(CodeGen *g, TypeTableEntry *ptr_type, uint32_t new_align);
+static TypeTableEntry *adjust_slice_align(CodeGen *g, TypeTableEntry *slice_type, uint32_t new_align);
ConstExprValue *const_ptr_pointee(CodeGen *g, ConstExprValue *const_val) {
- assert(const_val->type->id == TypeTableEntryIdPointer);
+ assert(get_codegen_ptr_type(const_val->type) != nullptr);
assert(const_val->special == ConstValSpecialStatic);
switch (const_val->data.x_ptr.special) {
case ConstPtrSpecialInvalid:
@@ -143,6 +184,8 @@ static bool ir_should_inline(IrExecutable *exec, Scope *scope) {
while (scope != nullptr) {
if (scope->id == ScopeIdCompTime)
return true;
+ if (scope->id == ScopeIdFnDef)
+ break;
scope = scope->parent;
}
return false;
@@ -369,8 +412,8 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionTestNonNull *) {
return IrInstructionIdTestNonNull;
}
-static constexpr IrInstructionId ir_instruction_id(IrInstructionUnwrapMaybe *) {
- return IrInstructionIdUnwrapMaybe;
+static constexpr IrInstructionId ir_instruction_id(IrInstructionUnwrapOptional *) {
+ return IrInstructionIdUnwrapOptional;
}
static constexpr IrInstructionId ir_instruction_id(IrInstructionClz *) {
@@ -381,6 +424,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionCtz *) {
return IrInstructionIdCtz;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionPopCount *) {
+ return IrInstructionIdPopCount;
+}
+
static constexpr IrInstructionId ir_instruction_id(IrInstructionUnionTag *) {
return IrInstructionIdUnionTag;
}
@@ -457,6 +504,38 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionTruncate *) {
return IrInstructionIdTruncate;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionIntCast *) {
+ return IrInstructionIdIntCast;
+}
+
+static constexpr IrInstructionId ir_instruction_id(IrInstructionFloatCast *) {
+ return IrInstructionIdFloatCast;
+}
+
+static constexpr IrInstructionId ir_instruction_id(IrInstructionErrSetCast *) {
+ return IrInstructionIdErrSetCast;
+}
+
+static constexpr IrInstructionId ir_instruction_id(IrInstructionToBytes *) {
+ return IrInstructionIdToBytes;
+}
+
+static constexpr IrInstructionId ir_instruction_id(IrInstructionFromBytes *) {
+ return IrInstructionIdFromBytes;
+}
+
+static constexpr IrInstructionId ir_instruction_id(IrInstructionIntToFloat *) {
+ return IrInstructionIdIntToFloat;
+}
+
+static constexpr IrInstructionId ir_instruction_id(IrInstructionFloatToInt *) {
+ return IrInstructionIdFloatToInt;
+}
+
+static constexpr IrInstructionId ir_instruction_id(IrInstructionBoolToInt *) {
+ return IrInstructionIdBoolToInt;
+}
+
static constexpr IrInstructionId ir_instruction_id(IrInstructionIntType *) {
return IrInstructionIdIntType;
}
@@ -501,6 +580,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionFrameAddress *)
return IrInstructionIdFrameAddress;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionHandle *) {
+ return IrInstructionIdHandle;
+}
+
static constexpr IrInstructionId ir_instruction_id(IrInstructionAlignOf *) {
return IrInstructionIdAlignOf;
}
@@ -521,8 +604,8 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionUnwrapErrPayload
return IrInstructionIdUnwrapErrPayload;
}
-static constexpr IrInstructionId ir_instruction_id(IrInstructionMaybeWrap *) {
- return IrInstructionIdMaybeWrap;
+static constexpr IrInstructionId ir_instruction_id(IrInstructionOptionalWrap *) {
+ return IrInstructionIdOptionalWrap;
}
static constexpr IrInstructionId ir_instruction_id(IrInstructionErrWrapPayload *) {
@@ -565,6 +648,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionIntToEnum *) {
return IrInstructionIdIntToEnum;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionEnumToInt *) {
+ return IrInstructionIdEnumToInt;
+}
+
static constexpr IrInstructionId ir_instruction_id(IrInstructionIntToErr *) {
return IrInstructionIdIntToErr;
}
@@ -585,10 +672,6 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionTypeName *) {
return IrInstructionIdTypeName;
}
-static constexpr IrInstructionId ir_instruction_id(IrInstructionCanImplicitCast *) {
- return IrInstructionIdCanImplicitCast;
-}
-
static constexpr IrInstructionId ir_instruction_id(IrInstructionDeclRef *) {
return IrInstructionIdDeclRef;
}
@@ -613,6 +696,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionOffsetOf *) {
return IrInstructionIdOffsetOf;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionTypeInfo *) {
+ return IrInstructionIdTypeInfo;
+}
+
static constexpr IrInstructionId ir_instruction_id(IrInstructionTypeId *) {
return IrInstructionIdTypeId;
}
@@ -621,8 +708,8 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionSetEvalBranchQuo
return IrInstructionIdSetEvalBranchQuota;
}
-static constexpr IrInstructionId ir_instruction_id(IrInstructionPtrTypeOf *) {
- return IrInstructionIdPtrTypeOf;
+static constexpr IrInstructionId ir_instruction_id(IrInstructionPtrType *) {
+ return IrInstructionIdPtrType;
}
static constexpr IrInstructionId ir_instruction_id(IrInstructionAlignCast *) {
@@ -709,6 +796,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionAtomicRmw *) {
return IrInstructionIdAtomicRmw;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionAtomicLoad *) {
+ return IrInstructionIdAtomicLoad;
+}
+
static constexpr IrInstructionId ir_instruction_id(IrInstructionPromiseResultType *) {
return IrInstructionIdPromiseResultType;
}
@@ -733,6 +824,10 @@ static constexpr IrInstructionId ir_instruction_id(IrInstructionMarkErrRetTraceP
return IrInstructionIdMarkErrRetTracePtr;
}
+static constexpr IrInstructionId ir_instruction_id(IrInstructionSqrt *) {
+ return IrInstructionIdSqrt;
+}
+
template<typename T>
static T *ir_create_instruction(IrBuilder *irb, Scope *scope, AstNode *source_node) {
T *special_instruction = allocate<T>(1);
@@ -988,13 +1083,9 @@ static IrInstruction *ir_build_bin_op_from(IrBuilder *irb, IrInstruction *old_in
return new_instruction;
}
-static IrInstruction *ir_build_var_ptr(IrBuilder *irb, Scope *scope, AstNode *source_node,
- VariableTableEntry *var, bool is_const, bool is_volatile)
-{
+static IrInstruction *ir_build_var_ptr(IrBuilder *irb, Scope *scope, AstNode *source_node, VariableTableEntry *var) {
IrInstructionVarPtr *instruction = ir_build_instruction<IrInstructionVarPtr>(irb, scope, source_node);
instruction->var = var;
- instruction->is_const = is_const;
- instruction->is_volatile = is_volatile;
ir_ref_var(var);
@@ -1002,12 +1093,13 @@ static IrInstruction *ir_build_var_ptr(IrBuilder *irb, Scope *scope, AstNode *so
}
static IrInstruction *ir_build_elem_ptr(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *array_ptr,
- IrInstruction *elem_index, bool safety_check_on)
+ IrInstruction *elem_index, bool safety_check_on, PtrLen ptr_len)
{
IrInstructionElemPtr *instruction = ir_build_instruction<IrInstructionElemPtr>(irb, scope, source_node);
instruction->array_ptr = array_ptr;
instruction->elem_index = elem_index;
instruction->safety_check_on = safety_check_on;
+ instruction->ptr_len = ptr_len;
ir_ref_instruction(array_ptr, irb->current_basic_block);
ir_ref_instruction(elem_index, irb->current_basic_block);
@@ -1015,13 +1107,18 @@ static IrInstruction *ir_build_elem_ptr(IrBuilder *irb, Scope *scope, AstNode *s
return &instruction->base;
}
-static IrInstruction *ir_build_elem_ptr_from(IrBuilder *irb, IrInstruction *old_instruction,
- IrInstruction *array_ptr, IrInstruction *elem_index, bool safety_check_on)
+static IrInstruction *ir_build_field_ptr_instruction(IrBuilder *irb, Scope *scope, AstNode *source_node,
+ IrInstruction *container_ptr, IrInstruction *field_name_expr)
{
- IrInstruction *new_instruction = ir_build_elem_ptr(irb, old_instruction->scope,
- old_instruction->source_node, array_ptr, elem_index, safety_check_on);
- ir_link_new_instruction(new_instruction, old_instruction);
- return new_instruction;
+ IrInstructionFieldPtr *instruction = ir_build_instruction<IrInstructionFieldPtr>(irb, scope, source_node);
+ instruction->container_ptr = container_ptr;
+ instruction->field_name_buffer = nullptr;
+ instruction->field_name_expr = field_name_expr;
+
+ ir_ref_instruction(container_ptr, irb->current_basic_block);
+ ir_ref_instruction(field_name_expr, irb->current_basic_block);
+
+ return &instruction->base;
}
static IrInstruction *ir_build_field_ptr(IrBuilder *irb, Scope *scope, AstNode *source_node,
@@ -1029,7 +1126,8 @@ static IrInstruction *ir_build_field_ptr(IrBuilder *irb, Scope *scope, AstNode *
{
IrInstructionFieldPtr *instruction = ir_build_instruction<IrInstructionFieldPtr>(irb, scope, source_node);
instruction->container_ptr = container_ptr;
- instruction->field_name = field_name;
+ instruction->field_name_buffer = field_name;
+ instruction->field_name_expr = nullptr;
ir_ref_instruction(container_ptr, irb->current_basic_block);
@@ -1071,7 +1169,8 @@ static IrInstruction *ir_build_union_field_ptr_from(IrBuilder *irb, IrInstructio
static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *source_node,
FnTableEntry *fn_entry, IrInstruction *fn_ref, size_t arg_count, IrInstruction **args,
- bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator)
+ bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator,
+ IrInstruction *new_stack)
{
IrInstructionCall *call_instruction = ir_build_instruction<IrInstructionCall>(irb, scope, source_node);
call_instruction->fn_entry = fn_entry;
@@ -1082,6 +1181,7 @@ static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *sourc
call_instruction->arg_count = arg_count;
call_instruction->is_async = is_async;
call_instruction->async_allocator = async_allocator;
+ call_instruction->new_stack = new_stack;
if (fn_ref)
ir_ref_instruction(fn_ref, irb->current_basic_block);
@@ -1089,16 +1189,19 @@ static IrInstruction *ir_build_call(IrBuilder *irb, Scope *scope, AstNode *sourc
ir_ref_instruction(args[i], irb->current_basic_block);
if (async_allocator)
ir_ref_instruction(async_allocator, irb->current_basic_block);
+ if (new_stack != nullptr)
+ ir_ref_instruction(new_stack, irb->current_basic_block);
return &call_instruction->base;
}
static IrInstruction *ir_build_call_from(IrBuilder *irb, IrInstruction *old_instruction,
FnTableEntry *fn_entry, IrInstruction *fn_ref, size_t arg_count, IrInstruction **args,
- bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator)
+ bool is_comptime, FnInline fn_inline, bool is_async, IrInstruction *async_allocator,
+ IrInstruction *new_stack)
{
IrInstruction *new_instruction = ir_build_call(irb, old_instruction->scope,
- old_instruction->source_node, fn_entry, fn_ref, arg_count, args, is_comptime, fn_inline, is_async, async_allocator);
+ old_instruction->source_node, fn_entry, fn_ref, arg_count, args, is_comptime, fn_inline, is_async, async_allocator, new_stack);
ir_link_new_instruction(new_instruction, old_instruction);
return new_instruction;
}
@@ -1160,15 +1263,16 @@ static IrInstruction *ir_build_br_from(IrBuilder *irb, IrInstruction *old_instru
return new_instruction;
}
-static IrInstruction *ir_build_ptr_type_of(IrBuilder *irb, Scope *scope, AstNode *source_node,
- IrInstruction *child_type, bool is_const, bool is_volatile, IrInstruction *align_value,
- uint32_t bit_offset_start, uint32_t bit_offset_end)
+static IrInstruction *ir_build_ptr_type(IrBuilder *irb, Scope *scope, AstNode *source_node,
+ IrInstruction *child_type, bool is_const, bool is_volatile, PtrLen ptr_len,
+ IrInstruction *align_value, uint32_t bit_offset_start, uint32_t bit_offset_end)
{
- IrInstructionPtrTypeOf *ptr_type_of_instruction = ir_build_instruction<IrInstructionPtrTypeOf>(irb, scope, source_node);
+ IrInstructionPtrType *ptr_type_of_instruction = ir_build_instruction<IrInstructionPtrType>(irb, scope, source_node);
ptr_type_of_instruction->align_value = align_value;
ptr_type_of_instruction->child_type = child_type;
ptr_type_of_instruction->is_const = is_const;
ptr_type_of_instruction->is_volatile = is_volatile;
+ ptr_type_of_instruction->ptr_len = ptr_len;
ptr_type_of_instruction->bit_offset_start = bit_offset_start;
ptr_type_of_instruction->bit_offset_end = bit_offset_end;
@@ -1551,7 +1655,7 @@ static IrInstruction *ir_build_test_nonnull_from(IrBuilder *irb, IrInstruction *
static IrInstruction *ir_build_unwrap_maybe(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *value,
bool safety_check_on)
{
- IrInstructionUnwrapMaybe *instruction = ir_build_instruction<IrInstructionUnwrapMaybe>(irb, scope, source_node);
+ IrInstructionUnwrapOptional *instruction = ir_build_instruction<IrInstructionUnwrapOptional>(irb, scope, source_node);
instruction->value = value;
instruction->safety_check_on = safety_check_on;
@@ -1570,7 +1674,7 @@ static IrInstruction *ir_build_unwrap_maybe_from(IrBuilder *irb, IrInstruction *
}
static IrInstruction *ir_build_maybe_wrap(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *value) {
- IrInstructionMaybeWrap *instruction = ir_build_instruction<IrInstructionMaybeWrap>(irb, scope, source_node);
+ IrInstructionOptionalWrap *instruction = ir_build_instruction<IrInstructionOptionalWrap>(irb, scope, source_node);
instruction->value = value;
ir_ref_instruction(value, irb->current_basic_block);
@@ -1626,8 +1730,18 @@ static IrInstruction *ir_build_ctz_from(IrBuilder *irb, IrInstruction *old_instr
return new_instruction;
}
+static IrInstruction *ir_build_pop_count(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *value) {
+ IrInstructionPopCount *instruction = ir_build_instruction<IrInstructionPopCount>(irb, scope, source_node);
+ instruction->value = value;
+
+ ir_ref_instruction(value, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
static IrInstruction *ir_build_switch_br(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *target_value,
- IrBasicBlock *else_block, size_t case_count, IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime)
+ IrBasicBlock *else_block, size_t case_count, IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime,
+ IrInstruction *switch_prongs_void)
{
IrInstructionSwitchBr *instruction = ir_build_instruction<IrInstructionSwitchBr>(irb, scope, source_node);
instruction->base.value.type = irb->codegen->builtin_types.entry_unreachable;
@@ -1637,10 +1751,12 @@ static IrInstruction *ir_build_switch_br(IrBuilder *irb, Scope *scope, AstNode *
instruction->case_count = case_count;
instruction->cases = cases;
instruction->is_comptime = is_comptime;
+ instruction->switch_prongs_void = switch_prongs_void;
ir_ref_instruction(target_value, irb->current_basic_block);
if (is_comptime) ir_ref_instruction(is_comptime, irb->current_basic_block);
ir_ref_bb(else_block);
+ if (switch_prongs_void) ir_ref_instruction(switch_prongs_void, irb->current_basic_block);
for (size_t i = 0; i < case_count; i += 1) {
ir_ref_instruction(cases[i].value, irb->current_basic_block);
@@ -1652,10 +1768,10 @@ static IrInstruction *ir_build_switch_br(IrBuilder *irb, Scope *scope, AstNode *
static IrInstruction *ir_build_switch_br_from(IrBuilder *irb, IrInstruction *old_instruction,
IrInstruction *target_value, IrBasicBlock *else_block, size_t case_count,
- IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime)
+ IrInstructionSwitchBrCase *cases, IrInstruction *is_comptime, IrInstruction *switch_prongs_void)
{
IrInstruction *new_instruction = ir_build_switch_br(irb, old_instruction->scope, old_instruction->source_node,
- target_value, else_block, case_count, cases, is_comptime);
+ target_value, else_block, case_count, cases, is_comptime, switch_prongs_void);
ir_link_new_instruction(new_instruction, old_instruction);
return new_instruction;
}
@@ -1824,38 +1940,34 @@ static IrInstruction *ir_build_embed_file(IrBuilder *irb, Scope *scope, AstNode
return &instruction->base;
}
-static IrInstruction *ir_build_cmpxchg(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *ptr,
- IrInstruction *cmp_value, IrInstruction *new_value, IrInstruction *success_order_value, IrInstruction *failure_order_value,
- AtomicOrder success_order, AtomicOrder failure_order)
+static IrInstruction *ir_build_cmpxchg(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *type_value,
+ IrInstruction *ptr, IrInstruction *cmp_value, IrInstruction *new_value,
+ IrInstruction *success_order_value, IrInstruction *failure_order_value,
+ bool is_weak,
+ TypeTableEntry *type, AtomicOrder success_order, AtomicOrder failure_order)
{
IrInstructionCmpxchg *instruction = ir_build_instruction<IrInstructionCmpxchg>(irb, scope, source_node);
+ instruction->type_value = type_value;
instruction->ptr = ptr;
instruction->cmp_value = cmp_value;
instruction->new_value = new_value;
instruction->success_order_value = success_order_value;
instruction->failure_order_value = failure_order_value;
+ instruction->is_weak = is_weak;
+ instruction->type = type;
instruction->success_order = success_order;
instruction->failure_order = failure_order;
+ if (type_value != nullptr) ir_ref_instruction(type_value, irb->current_basic_block);
ir_ref_instruction(ptr, irb->current_basic_block);
ir_ref_instruction(cmp_value, irb->current_basic_block);
ir_ref_instruction(new_value, irb->current_basic_block);
- ir_ref_instruction(success_order_value, irb->current_basic_block);
- ir_ref_instruction(failure_order_value, irb->current_basic_block);
+ if (type_value != nullptr) ir_ref_instruction(success_order_value, irb->current_basic_block);
+ if (type_value != nullptr) ir_ref_instruction(failure_order_value, irb->current_basic_block);
return &instruction->base;
}
-static IrInstruction *ir_build_cmpxchg_from(IrBuilder *irb, IrInstruction *old_instruction, IrInstruction *ptr,
- IrInstruction *cmp_value, IrInstruction *new_value, IrInstruction *success_order_value, IrInstruction *failure_order_value,
- AtomicOrder success_order, AtomicOrder failure_order)
-{
- IrInstruction *new_instruction = ir_build_cmpxchg(irb, old_instruction->scope, old_instruction->source_node,
- ptr, cmp_value, new_value, success_order_value, failure_order_value, success_order, failure_order);
- ir_link_new_instruction(new_instruction, old_instruction);
- return new_instruction;
-}
-
static IrInstruction *ir_build_fence(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *order_value, AtomicOrder order) {
IrInstructionFence *instruction = ir_build_instruction<IrInstructionFence>(irb, scope, source_node);
instruction->order_value = order_value;
@@ -1883,10 +1995,88 @@ static IrInstruction *ir_build_truncate(IrBuilder *irb, Scope *scope, AstNode *s
return &instruction->base;
}
-static IrInstruction *ir_build_truncate_from(IrBuilder *irb, IrInstruction *old_instruction, IrInstruction *dest_type, IrInstruction *target) {
- IrInstruction *new_instruction = ir_build_truncate(irb, old_instruction->scope, old_instruction->source_node, dest_type, target);
- ir_link_new_instruction(new_instruction, old_instruction);
- return new_instruction;
+static IrInstruction *ir_build_int_cast(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *dest_type, IrInstruction *target) {
+ IrInstructionIntCast *instruction = ir_build_instruction<IrInstructionIntCast>(irb, scope, source_node);
+ instruction->dest_type = dest_type;
+ instruction->target = target;
+
+ ir_ref_instruction(dest_type, irb->current_basic_block);
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_float_cast(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *dest_type, IrInstruction *target) {
+ IrInstructionFloatCast *instruction = ir_build_instruction<IrInstructionFloatCast>(irb, scope, source_node);
+ instruction->dest_type = dest_type;
+ instruction->target = target;
+
+ ir_ref_instruction(dest_type, irb->current_basic_block);
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_err_set_cast(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *dest_type, IrInstruction *target) {
+ IrInstructionErrSetCast *instruction = ir_build_instruction<IrInstructionErrSetCast>(irb, scope, source_node);
+ instruction->dest_type = dest_type;
+ instruction->target = target;
+
+ ir_ref_instruction(dest_type, irb->current_basic_block);
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_to_bytes(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *target) {
+ IrInstructionToBytes *instruction = ir_build_instruction<IrInstructionToBytes>(irb, scope, source_node);
+ instruction->target = target;
+
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_from_bytes(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *dest_child_type, IrInstruction *target) {
+ IrInstructionFromBytes *instruction = ir_build_instruction<IrInstructionFromBytes>(irb, scope, source_node);
+ instruction->dest_child_type = dest_child_type;
+ instruction->target = target;
+
+ ir_ref_instruction(dest_child_type, irb->current_basic_block);
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_int_to_float(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *dest_type, IrInstruction *target) {
+ IrInstructionIntToFloat *instruction = ir_build_instruction<IrInstructionIntToFloat>(irb, scope, source_node);
+ instruction->dest_type = dest_type;
+ instruction->target = target;
+
+ ir_ref_instruction(dest_type, irb->current_basic_block);
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_float_to_int(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *dest_type, IrInstruction *target) {
+ IrInstructionFloatToInt *instruction = ir_build_instruction<IrInstructionFloatToInt>(irb, scope, source_node);
+ instruction->dest_type = dest_type;
+ instruction->target = target;
+
+ ir_ref_instruction(dest_type, irb->current_basic_block);
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_bool_to_int(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *target) {
+ IrInstructionBoolToInt *instruction = ir_build_instruction<IrInstructionBoolToInt>(irb, scope, source_node);
+ instruction->target = target;
+
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
}
static IrInstruction *ir_build_int_type(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *is_signed, IrInstruction *bit_count) {
@@ -2054,6 +2244,17 @@ static IrInstruction *ir_build_frame_address_from(IrBuilder *irb, IrInstruction
return new_instruction;
}
+static IrInstruction *ir_build_handle(IrBuilder *irb, Scope *scope, AstNode *source_node) {
+ IrInstructionHandle *instruction = ir_build_instruction<IrInstructionHandle>(irb, scope, source_node);
+ return &instruction->base;
+}
+
+static IrInstruction *ir_build_handle_from(IrBuilder *irb, IrInstruction *old_instruction) {
+ IrInstruction *new_instruction = ir_build_handle(irb, old_instruction->scope, old_instruction->source_node);
+ ir_link_new_instruction(new_instruction, old_instruction);
+ return new_instruction;
+}
+
static IrInstruction *ir_build_overflow_op(IrBuilder *irb, Scope *scope, AstNode *source_node,
IrOverflowOp op, IrInstruction *type_value, IrInstruction *op1, IrInstruction *op2,
IrInstruction *result_ptr, TypeTableEntry *result_ptr_type)
@@ -2252,10 +2453,26 @@ static IrInstruction *ir_build_ptr_to_int(IrBuilder *irb, Scope *scope, AstNode
}
static IrInstruction *ir_build_int_to_enum(IrBuilder *irb, Scope *scope, AstNode *source_node,
- IrInstruction *target)
+ IrInstruction *dest_type, IrInstruction *target)
{
IrInstructionIntToEnum *instruction = ir_build_instruction<IrInstructionIntToEnum>(
irb, scope, source_node);
+ instruction->dest_type = dest_type;
+ instruction->target = target;
+
+ if (dest_type) ir_ref_instruction(dest_type, irb->current_basic_block);
+ ir_ref_instruction(target, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
+
+
+static IrInstruction *ir_build_enum_to_int(IrBuilder *irb, Scope *scope, AstNode *source_node,
+ IrInstruction *target)
+{
+ IrInstructionEnumToInt *instruction = ir_build_instruction<IrInstructionEnumToInt>(
+ irb, scope, source_node);
instruction->target = target;
ir_ref_instruction(target, irb->current_basic_block);
@@ -2331,20 +2548,6 @@ static IrInstruction *ir_build_type_name(IrBuilder *irb, Scope *scope, AstNode *
return &instruction->base;
}
-static IrInstruction *ir_build_can_implicit_cast(IrBuilder *irb, Scope *scope, AstNode *source_node,
- IrInstruction *type_value, IrInstruction *target_value)
-{
- IrInstructionCanImplicitCast *instruction = ir_build_instruction<IrInstructionCanImplicitCast>(
- irb, scope, source_node);
- instruction->type_value = type_value;
- instruction->target_value = target_value;
-
- ir_ref_instruction(type_value, irb->current_basic_block);
- ir_ref_instruction(target_value, irb->current_basic_block);
-
- return &instruction->base;
-}
-
static IrInstruction *ir_build_decl_ref(IrBuilder *irb, Scope *scope, AstNode *source_node,
Tld *tld, LVal lval)
{
@@ -2419,6 +2622,16 @@ static IrInstruction *ir_build_offset_of(IrBuilder *irb, Scope *scope, AstNode *
return &instruction->base;
}
+static IrInstruction *ir_build_type_info(IrBuilder *irb, Scope *scope, AstNode *source_node,
+ IrInstruction *type_value) {
+ IrInstructionTypeInfo *instruction = ir_build_instruction<IrInstructionTypeInfo>(irb, scope, source_node);
+ instruction->type_value = type_value;
+
+ ir_ref_instruction(type_value, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
static IrInstruction *ir_build_type_id(IrBuilder *irb, Scope *scope, AstNode *source_node,
IrInstruction *type_value)
{
@@ -2484,9 +2697,9 @@ static IrInstruction *ir_build_arg_type(IrBuilder *irb, Scope *scope, AstNode *s
return &instruction->base;
}
-static IrInstruction *ir_build_error_return_trace(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstructionErrorReturnTrace::Nullable nullable) {
+static IrInstruction *ir_build_error_return_trace(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstructionErrorReturnTrace::Optional optional) {
IrInstructionErrorReturnTrace *instruction = ir_build_instruction<IrInstructionErrorReturnTrace>(irb, scope, source_node);
- instruction->nullable = nullable;
+ instruction->optional = optional;
return &instruction->base;
}
@@ -2669,6 +2882,23 @@ static IrInstruction *ir_build_atomic_rmw(IrBuilder *irb, Scope *scope, AstNode
return &instruction->base;
}
+static IrInstruction *ir_build_atomic_load(IrBuilder *irb, Scope *scope, AstNode *source_node,
+ IrInstruction *operand_type, IrInstruction *ptr,
+ IrInstruction *ordering, AtomicOrder resolved_ordering)
+{
+ IrInstructionAtomicLoad *instruction = ir_build_instruction<IrInstructionAtomicLoad>(irb, scope, source_node);
+ instruction->operand_type = operand_type;
+ instruction->ptr = ptr;
+ instruction->ordering = ordering;
+ instruction->resolved_ordering = resolved_ordering;
+
+ if (operand_type != nullptr) ir_ref_instruction(operand_type, irb->current_basic_block);
+ ir_ref_instruction(ptr, irb->current_basic_block);
+ if (ordering != nullptr) ir_ref_instruction(ordering, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
static IrInstruction *ir_build_promise_result_type(IrBuilder *irb, Scope *scope, AstNode *source_node,
IrInstruction *promise_type)
{
@@ -2731,20 +2961,49 @@ static IrInstruction *ir_build_mark_err_ret_trace_ptr(IrBuilder *irb, Scope *sco
return &instruction->base;
}
+static IrInstruction *ir_build_sqrt(IrBuilder *irb, Scope *scope, AstNode *source_node, IrInstruction *type, IrInstruction *op) {
+ IrInstructionSqrt *instruction = ir_build_instruction<IrInstructionSqrt>(irb, scope, source_node);
+ instruction->type = type;
+ instruction->op = op;
+
+ if (type != nullptr) ir_ref_instruction(type, irb->current_basic_block);
+ ir_ref_instruction(op, irb->current_basic_block);
+
+ return &instruction->base;
+}
+
static void ir_count_defers(IrBuilder *irb, Scope *inner_scope, Scope *outer_scope, size_t *results) {
results[ReturnKindUnconditional] = 0;
results[ReturnKindError] = 0;
- while (inner_scope != outer_scope) {
- assert(inner_scope);
- if (inner_scope->id == ScopeIdDefer) {
- AstNode *defer_node = inner_scope->source_node;
- assert(defer_node->type == NodeTypeDefer);
- ReturnKind defer_kind = defer_node->data.defer.kind;
- results[defer_kind] += 1;
+ Scope *scope = inner_scope;
+ while (scope != outer_scope) {
+ assert(scope);
+ switch (scope->id) {
+ case ScopeIdDefer: {
+ AstNode *defer_node = scope->source_node;
+ assert(defer_node->type == NodeTypeDefer);
+ ReturnKind defer_kind = defer_node->data.defer.kind;
+ results[defer_kind] += 1;
+ scope = scope->parent;
+ continue;
+ }
+ case ScopeIdDecls:
+ case ScopeIdFnDef:
+ return;
+ case ScopeIdBlock:
+ case ScopeIdVarDecl:
+ case ScopeIdLoop:
+ case ScopeIdSuspend:
+ case ScopeIdCompTime:
+ scope = scope->parent;
+ continue;
+ case ScopeIdDeferExpr:
+ case ScopeIdCImport:
+ case ScopeIdCoroPrelude:
+ zig_unreachable();
}
- inner_scope = inner_scope->parent;
}
}
@@ -2760,27 +3019,43 @@ static bool ir_gen_defers_for_block(IrBuilder *irb, Scope *inner_scope, Scope *o
if (!scope)
return is_noreturn;
- if (scope->id == ScopeIdDefer) {
- AstNode *defer_node = scope->source_node;
- assert(defer_node->type == NodeTypeDefer);
- ReturnKind defer_kind = defer_node->data.defer.kind;
- if (defer_kind == ReturnKindUnconditional ||
- (gen_error_defers && defer_kind == ReturnKindError))
- {
- AstNode *defer_expr_node = defer_node->data.defer.expr;
- Scope *defer_expr_scope = defer_node->data.defer.expr_scope;
- IrInstruction *defer_expr_value = ir_gen_node(irb, defer_expr_node, defer_expr_scope);
- if (defer_expr_value != irb->codegen->invalid_instruction) {
- if (defer_expr_value->value.type != nullptr && defer_expr_value->value.type->id == TypeTableEntryIdUnreachable) {
- is_noreturn = true;
- } else {
- ir_mark_gen(ir_build_check_statement_is_void(irb, defer_expr_scope, defer_expr_node, defer_expr_value));
+ switch (scope->id) {
+ case ScopeIdDefer: {
+ AstNode *defer_node = scope->source_node;
+ assert(defer_node->type == NodeTypeDefer);
+ ReturnKind defer_kind = defer_node->data.defer.kind;
+ if (defer_kind == ReturnKindUnconditional ||
+ (gen_error_defers && defer_kind == ReturnKindError))
+ {
+ AstNode *defer_expr_node = defer_node->data.defer.expr;
+ Scope *defer_expr_scope = defer_node->data.defer.expr_scope;
+ IrInstruction *defer_expr_value = ir_gen_node(irb, defer_expr_node, defer_expr_scope);
+ if (defer_expr_value != irb->codegen->invalid_instruction) {
+ if (defer_expr_value->value.type != nullptr && defer_expr_value->value.type->id == TypeTableEntryIdUnreachable) {
+ is_noreturn = true;
+ } else {
+ ir_mark_gen(ir_build_check_statement_is_void(irb, defer_expr_scope, defer_expr_node, defer_expr_value));
+ }
}
}
+ scope = scope->parent;
+ continue;
}
-
+ case ScopeIdDecls:
+ case ScopeIdFnDef:
+ return is_noreturn;
+ case ScopeIdBlock:
+ case ScopeIdVarDecl:
+ case ScopeIdLoop:
+ case ScopeIdSuspend:
+ case ScopeIdCompTime:
+ scope = scope->parent;
+ continue;
+ case ScopeIdDeferExpr:
+ case ScopeIdCImport:
+ case ScopeIdCoroPrelude:
+ zig_unreachable();
}
- scope = scope->parent;
}
return is_noreturn;
}
@@ -2796,6 +3071,18 @@ static void ir_set_cursor_at_end_and_append_block(IrBuilder *irb, IrBasicBlock *
ir_set_cursor_at_end(irb, basic_block);
}
+static ScopeSuspend *get_scope_suspend(Scope *scope) {
+ while (scope) {
+ if (scope->id == ScopeIdSuspend)
+ return (ScopeSuspend *)scope;
+ if (scope->id == ScopeIdFnDef)
+ return nullptr;
+
+ scope = scope->parent;
+ }
+ return nullptr;
+}
+
static ScopeDeferExpr *get_scope_defer_expr(Scope *scope) {
while (scope) {
if (scope->id == ScopeIdDeferExpr)
@@ -2825,20 +3112,47 @@ static IrInstruction *ir_gen_async_return(IrBuilder *irb, Scope *scope, AstNode
return return_inst;
}
- ir_build_store_ptr(irb, scope, node, irb->exec->coro_result_field_ptr, return_value);
- IrInstruction *promise_type_val = ir_build_const_type(irb, scope, node,
- get_maybe_type(irb->codegen, irb->codegen->builtin_types.entry_promise));
- // TODO replace replacement_value with @intToPtr(?promise, 0x1) when it doesn't crash zig
- IrInstruction *replacement_value = irb->exec->coro_handle;
- IrInstruction *maybe_await_handle = ir_build_atomic_rmw(irb, scope, node,
- promise_type_val, irb->exec->coro_awaiter_field_ptr, nullptr, replacement_value, nullptr,
- AtomicRmwOp_xchg, AtomicOrderSeqCst);
- ir_build_store_ptr(irb, scope, node, irb->exec->await_handle_var_ptr, maybe_await_handle);
- IrInstruction *is_non_null = ir_build_test_nonnull(irb, scope, node, maybe_await_handle);
+ IrBasicBlock *suspended_block = ir_create_basic_block(irb, scope, "Suspended");
+ IrBasicBlock *not_suspended_block = ir_create_basic_block(irb, scope, "NotSuspended");
+ IrBasicBlock *store_awaiter_block = ir_create_basic_block(irb, scope, "StoreAwaiter");
+ IrBasicBlock *check_canceled_block = ir_create_basic_block(irb, scope, "CheckCanceled");
+
+ IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, scope, node, 0x7); // 0b111
+ IrInstruction *ptr_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
+ IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
+ IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
+ IrInstruction *promise_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_promise);
IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node, false);
- return ir_build_cond_br(irb, scope, node, is_non_null, irb->exec->coro_normal_final, irb->exec->coro_early_final,
- is_comptime);
- // the above blocks are rendered by ir_gen after the rest of codegen
+ IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
+
+ ir_build_store_ptr(irb, scope, node, irb->exec->coro_result_field_ptr, return_value);
+ IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
+ IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
+ usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, ptr_mask, nullptr,
+ AtomicRmwOp_or, AtomicOrderSeqCst);
+
+ IrInstruction *is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
+ IrInstruction *is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_suspended_bool, suspended_block, not_suspended_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, suspended_block);
+ ir_build_unreachable(irb, scope, node);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_suspended_block);
+ IrInstruction *await_handle_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
+ // if we ever add null checking safety to the ptrtoint instruction, it needs to be disabled here
+ IrInstruction *have_await_handle = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, await_handle_addr, zero, false);
+ ir_build_cond_br(irb, scope, node, have_await_handle, store_awaiter_block, check_canceled_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, store_awaiter_block);
+ IrInstruction *await_handle = ir_build_int_to_ptr(irb, scope, node, promise_type_val, await_handle_addr);
+ ir_build_store_ptr(irb, scope, node, irb->exec->await_handle_var_ptr, await_handle);
+ ir_build_br(irb, scope, node, irb->exec->coro_normal_final, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, check_canceled_block);
+ IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
+ IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
+ return ir_build_cond_br(irb, scope, node, is_canceled_bool, irb->exec->coro_final_cleanup_block, irb->exec->coro_early_final, is_comptime);
}
static IrInstruction *ir_gen_return(IrBuilder *irb, Scope *scope, AstNode *node, LVal lval) {
@@ -2923,7 +3237,7 @@ static IrInstruction *ir_gen_return(IrBuilder *irb, Scope *scope, AstNode *node,
case ReturnKindError:
{
assert(expr_node);
- IrInstruction *err_union_ptr = ir_gen_node_extra(irb, expr_node, scope, LVAL_PTR);
+ IrInstruction *err_union_ptr = ir_gen_node_extra(irb, expr_node, scope, LValPtr);
if (err_union_ptr == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
IrInstruction *err_union_val = ir_build_load_ptr(irb, scope, node, err_union_ptr);
@@ -2951,7 +3265,7 @@ static IrInstruction *ir_gen_return(IrBuilder *irb, Scope *scope, AstNode *node,
ir_set_cursor_at_end_and_append_block(irb, continue_block);
IrInstruction *unwrapped_ptr = ir_build_unwrap_err_payload(irb, scope, node, err_union_ptr, false);
- if (lval.is_ptr)
+ if (lval == LValPtr)
return unwrapped_ptr;
else
return ir_build_load_ptr(irb, scope, node, unwrapped_ptr);
@@ -2981,9 +3295,8 @@ static VariableTableEntry *create_local_var(CodeGen *codegen, AstNode *node, Sco
add_error_note(codegen, msg, existing_var->decl_node, buf_sprintf("previous declaration is here"));
variable_entry->value->type = codegen->builtin_types.entry_invalid;
} else {
- auto primitive_table_entry = codegen->primitive_type_table.maybe_get(name);
- if (primitive_table_entry) {
- TypeTableEntry *type = primitive_table_entry->value;
+ TypeTableEntry *type = get_primitive_type(codegen, name);
+ if (type != nullptr) {
add_node_error(codegen, node,
buf_sprintf("variable shadows type '%s'", buf_ptr(&type->name)));
variable_entry->value->type = codegen->builtin_types.entry_invalid;
@@ -3019,7 +3332,15 @@ static VariableTableEntry *create_local_var(CodeGen *codegen, AstNode *node, Sco
static VariableTableEntry *ir_create_var(IrBuilder *irb, AstNode *node, Scope *scope, Buf *name,
bool src_is_const, bool gen_is_const, bool is_shadowable, IrInstruction *is_comptime)
{
- VariableTableEntry *var = create_local_var(irb->codegen, node, scope, name, src_is_const, gen_is_const, is_shadowable, is_comptime);
+ bool is_underscored = name ? buf_eql_str(name, "_") : false;
+ VariableTableEntry *var = create_local_var( irb->codegen
+ , node
+ , scope
+ , (is_underscored ? nullptr : name)
+ , src_is_const
+ , gen_is_const
+ , (is_underscored ? true : is_shadowable)
+ , is_comptime );
if (is_comptime != nullptr || gen_is_const) {
var->mem_slot_index = exec_next_mem_slot(irb->exec);
var->owner_exec = irb->exec;
@@ -3086,6 +3407,9 @@ static IrInstruction *ir_gen_block(IrBuilder *irb, Scope *parent_scope, AstNode
if (block_node->data.block.name == nullptr || incoming_blocks.length == 0) {
return noreturn_return_value;
}
+
+ ir_set_cursor_at_end_and_append_block(irb, scope_block->end_block);
+ return ir_build_phi(irb, parent_scope, block_node, incoming_blocks.length, incoming_blocks.items, incoming_values.items);
} else {
incoming_blocks.append(irb->current_basic_block);
incoming_values.append(ir_mark_gen(ir_build_const_void(irb, parent_scope, block_node)));
@@ -3113,7 +3437,7 @@ static IrInstruction *ir_gen_bin_op_id(IrBuilder *irb, Scope *scope, AstNode *no
}
static IrInstruction *ir_gen_assign(IrBuilder *irb, Scope *scope, AstNode *node) {
- IrInstruction *lvalue = ir_gen_node_extra(irb, node->data.bin_op_expr.op1, scope, LVAL_PTR);
+ IrInstruction *lvalue = ir_gen_node_extra(irb, node->data.bin_op_expr.op1, scope, LValPtr);
IrInstruction *rvalue = ir_gen_node(irb, node->data.bin_op_expr.op2, scope);
if (lvalue == irb->codegen->invalid_instruction || rvalue == irb->codegen->invalid_instruction)
@@ -3124,7 +3448,7 @@ static IrInstruction *ir_gen_assign(IrBuilder *irb, Scope *scope, AstNode *node)
}
static IrInstruction *ir_gen_assign_op(IrBuilder *irb, Scope *scope, AstNode *node, IrBinOp op_id) {
- IrInstruction *lvalue = ir_gen_node_extra(irb, node->data.bin_op_expr.op1, scope, LVAL_PTR);
+ IrInstruction *lvalue = ir_gen_node_extra(irb, node->data.bin_op_expr.op1, scope, LValPtr);
if (lvalue == irb->codegen->invalid_instruction)
return lvalue;
IrInstruction *op1 = ir_build_load_ptr(irb, scope, node->data.bin_op_expr.op1, lvalue);
@@ -3226,7 +3550,7 @@ static IrInstruction *ir_gen_maybe_ok_or(IrBuilder *irb, Scope *parent_scope, As
AstNode *op1_node = node->data.bin_op_expr.op1;
AstNode *op2_node = node->data.bin_op_expr.op2;
- IrInstruction *maybe_ptr = ir_gen_node_extra(irb, op1_node, parent_scope, LVAL_PTR);
+ IrInstruction *maybe_ptr = ir_gen_node_extra(irb, op1_node, parent_scope, LValPtr);
if (maybe_ptr == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
@@ -3240,9 +3564,9 @@ static IrInstruction *ir_gen_maybe_ok_or(IrBuilder *irb, Scope *parent_scope, As
is_comptime = ir_build_test_comptime(irb, parent_scope, node, is_non_null);
}
- IrBasicBlock *ok_block = ir_create_basic_block(irb, parent_scope, "MaybeNonNull");
- IrBasicBlock *null_block = ir_create_basic_block(irb, parent_scope, "MaybeNull");
- IrBasicBlock *end_block = ir_create_basic_block(irb, parent_scope, "MaybeEnd");
+ IrBasicBlock *ok_block = ir_create_basic_block(irb, parent_scope, "OptionalNonNull");
+ IrBasicBlock *null_block = ir_create_basic_block(irb, parent_scope, "OptionalNull");
+ IrBasicBlock *end_block = ir_create_basic_block(irb, parent_scope, "OptionalEnd");
ir_build_cond_br(irb, parent_scope, node, is_non_null, ok_block, null_block, is_comptime);
ir_set_cursor_at_end_and_append_block(irb, null_block);
@@ -3371,7 +3695,7 @@ static IrInstruction *ir_gen_bin_op(IrBuilder *irb, Scope *scope, AstNode *node)
return ir_gen_bin_op_id(irb, scope, node, IrBinOpArrayMult);
case BinOpTypeMergeErrorSets:
return ir_gen_bin_op_id(irb, scope, node, IrBinOpMergeErrorSets);
- case BinOpTypeUnwrapMaybe:
+ case BinOpTypeUnwrapOptional:
return ir_gen_maybe_ok_or(irb, scope, node);
case BinOpTypeErrorUnion:
return ir_gen_error_union(irb, scope, node);
@@ -3413,7 +3737,7 @@ static IrInstruction *ir_gen_symbol(IrBuilder *irb, Scope *scope, AstNode *node,
Buf *variable_name = node->data.symbol_expr.symbol;
- if (buf_eql_str(variable_name, "_") && lval.is_ptr) {
+ if (buf_eql_str(variable_name, "_") && lval == LValPtr) {
IrInstructionConst *const_instruction = ir_build_instruction<IrInstructionConst>(irb, scope, node);
const_instruction->base.value.type = get_pointer_to_type(irb->codegen,
irb->codegen->builtin_types.entry_void, false);
@@ -3422,11 +3746,11 @@ static IrInstruction *ir_gen_symbol(IrBuilder *irb, Scope *scope, AstNode *node,
return &const_instruction->base;
}
- auto primitive_table_entry = irb->codegen->primitive_type_table.maybe_get(variable_name);
- if (primitive_table_entry) {
- IrInstruction *value = ir_build_const_type(irb, scope, node, primitive_table_entry->value);
- if (lval.is_ptr) {
- return ir_build_ref(irb, scope, node, value, lval.is_const, lval.is_volatile);
+ TypeTableEntry *primitive_type = get_primitive_type(irb->codegen, variable_name);
+ if (primitive_type != nullptr) {
+ IrInstruction *value = ir_build_const_type(irb, scope, node, primitive_type);
+ if (lval == LValPtr) {
+ return ir_build_ref(irb, scope, node, value, false, false);
} else {
return value;
}
@@ -3434,9 +3758,8 @@ static IrInstruction *ir_gen_symbol(IrBuilder *irb, Scope *scope, AstNode *node,
VariableTableEntry *var = find_variable(irb->codegen, scope, variable_name);
if (var) {
- IrInstruction *var_ptr = ir_build_var_ptr(irb, scope, node, var,
- !lval.is_ptr || lval.is_const, lval.is_ptr && lval.is_volatile);
- if (lval.is_ptr)
+ IrInstruction *var_ptr = ir_build_var_ptr(irb, scope, node, var);
+ if (lval == LValPtr)
return var_ptr;
else
return ir_build_load_ptr(irb, scope, node, var_ptr);
@@ -3462,7 +3785,7 @@ static IrInstruction *ir_gen_array_access(IrBuilder *irb, Scope *scope, AstNode
assert(node->type == NodeTypeArrayAccessExpr);
AstNode *array_ref_node = node->data.array_access_expr.array_ref_expr;
- IrInstruction *array_ref_instruction = ir_gen_node_extra(irb, array_ref_node, scope, LVAL_PTR);
+ IrInstruction *array_ref_instruction = ir_gen_node_extra(irb, array_ref_node, scope, LValPtr);
if (array_ref_instruction == irb->codegen->invalid_instruction)
return array_ref_instruction;
@@ -3472,28 +3795,24 @@ static IrInstruction *ir_gen_array_access(IrBuilder *irb, Scope *scope, AstNode
return subscript_instruction;
IrInstruction *ptr_instruction = ir_build_elem_ptr(irb, scope, node, array_ref_instruction,
- subscript_instruction, true);
- if (lval.is_ptr)
+ subscript_instruction, true, PtrLenSingle);
+ if (lval == LValPtr)
return ptr_instruction;
return ir_build_load_ptr(irb, scope, node, ptr_instruction);
}
-static IrInstruction *ir_gen_field_access(IrBuilder *irb, Scope *scope, AstNode *node, LVal lval) {
+static IrInstruction *ir_gen_field_access(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypeFieldAccessExpr);
AstNode *container_ref_node = node->data.field_access_expr.struct_expr;
Buf *field_name = node->data.field_access_expr.field_name;
- IrInstruction *container_ref_instruction = ir_gen_node_extra(irb, container_ref_node, scope, LVAL_PTR);
+ IrInstruction *container_ref_instruction = ir_gen_node_extra(irb, container_ref_node, scope, LValPtr);
if (container_ref_instruction == irb->codegen->invalid_instruction)
return container_ref_instruction;
- IrInstruction *ptr_instruction = ir_build_field_ptr(irb, scope, node, container_ref_instruction, field_name);
- if (lval.is_ptr)
- return ptr_instruction;
-
- return ir_build_load_ptr(irb, scope, node, ptr_instruction);
+ return ir_build_field_ptr(irb, scope, node, container_ref_instruction, field_name);
}
static IrInstruction *ir_gen_overflow_op(IrBuilder *irb, Scope *scope, AstNode *node, IrOverflowOp op) {
@@ -3524,7 +3843,7 @@ static IrInstruction *ir_gen_overflow_op(IrBuilder *irb, Scope *scope, AstNode *
return ir_build_overflow_op(irb, scope, node, op, type_value, op1, op2, result_ptr, nullptr);
}
-static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNode *node) {
+static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNode *node, LVal lval) {
assert(node->type == NodeTypeFnCallExpr);
AstNode *fn_ref_expr = node->data.fn_call_expr.fn_ref_expr;
@@ -3547,6 +3866,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return irb->codegen->invalid_instruction;
}
+ bool is_async = exec_is_async(irb->exec);
+
switch (builtin_fn->id) {
case BuiltinFnIdInvalid:
zig_unreachable();
@@ -3556,7 +3877,9 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
IrInstruction *arg = ir_gen_node(irb, arg_node, scope);
if (arg == irb->codegen->invalid_instruction)
return arg;
- return ir_build_typeof(irb, scope, node, arg);
+
+ IrInstruction *type_of = ir_build_typeof(irb, scope, node, arg);
+ return ir_lval_wrap(irb, scope, type_of, lval);
}
case BuiltinFnIdSetCold:
{
@@ -3565,7 +3888,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_set_cold(irb, scope, node, arg0_value);
+ IrInstruction *set_cold = ir_build_set_cold(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, set_cold, lval);
}
case BuiltinFnIdSetRuntimeSafety:
{
@@ -3574,7 +3898,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_set_runtime_safety(irb, scope, node, arg0_value);
+ IrInstruction *set_safety = ir_build_set_runtime_safety(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, set_safety, lval);
}
case BuiltinFnIdSetFloatMode:
{
@@ -3588,7 +3913,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_set_float_mode(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *set_float_mode = ir_build_set_float_mode(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, set_float_mode, lval);
}
case BuiltinFnIdSizeof:
{
@@ -3597,7 +3923,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_size_of(irb, scope, node, arg0_value);
+ IrInstruction *size_of = ir_build_size_of(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, size_of, lval);
}
case BuiltinFnIdCtz:
{
@@ -3606,7 +3933,18 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_ctz(irb, scope, node, arg0_value);
+ IrInstruction *ctz = ir_build_ctz(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, ctz, lval);
+ }
+ case BuiltinFnIdPopCount:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *instr = ir_build_pop_count(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, instr, lval);
}
case BuiltinFnIdClz:
{
@@ -3615,7 +3953,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_clz(irb, scope, node, arg0_value);
+ IrInstruction *clz = ir_build_clz(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, clz, lval);
}
case BuiltinFnIdImport:
{
@@ -3624,11 +3963,13 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_import(irb, scope, node, arg0_value);
+ IrInstruction *import = ir_build_import(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, import, lval);
}
case BuiltinFnIdCImport:
{
- return ir_build_c_import(irb, scope, node);
+ IrInstruction *c_import = ir_build_c_import(irb, scope, node);
+ return ir_lval_wrap(irb, scope, c_import, lval);
}
case BuiltinFnIdCInclude:
{
@@ -3642,7 +3983,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return irb->codegen->invalid_instruction;
}
- return ir_build_c_include(irb, scope, node, arg0_value);
+ IrInstruction *c_include = ir_build_c_include(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, c_include, lval);
}
case BuiltinFnIdCDefine:
{
@@ -3661,7 +4003,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return irb->codegen->invalid_instruction;
}
- return ir_build_c_define(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *c_define = ir_build_c_define(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, c_define, lval);
}
case BuiltinFnIdCUndef:
{
@@ -3675,7 +4018,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return irb->codegen->invalid_instruction;
}
- return ir_build_c_undef(irb, scope, node, arg0_value);
+ IrInstruction *c_undef = ir_build_c_undef(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, c_undef, lval);
}
case BuiltinFnIdMaxValue:
{
@@ -3684,7 +4028,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_max_value(irb, scope, node, arg0_value);
+ IrInstruction *max_value = ir_build_max_value(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, max_value, lval);
}
case BuiltinFnIdMinValue:
{
@@ -3693,7 +4038,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_min_value(irb, scope, node, arg0_value);
+ IrInstruction *min_value = ir_build_min_value(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, min_value, lval);
}
case BuiltinFnIdCompileErr:
{
@@ -3702,7 +4048,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_compile_err(irb, scope, node, arg0_value);
+ IrInstruction *compile_err = ir_build_compile_err(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, compile_err, lval);
}
case BuiltinFnIdCompileLog:
{
@@ -3715,7 +4062,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return irb->codegen->invalid_instruction;
}
- return ir_build_compile_log(irb, scope, node, actual_param_count, args);
+ IrInstruction *compile_log = ir_build_compile_log(irb, scope, node, actual_param_count, args);
+ return ir_lval_wrap(irb, scope, compile_log, lval);
}
case BuiltinFnIdErrName:
{
@@ -3724,7 +4072,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_err_name(irb, scope, node, arg0_value);
+ IrInstruction *err_name = ir_build_err_name(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, err_name, lval);
}
case BuiltinFnIdEmbedFile:
{
@@ -3733,9 +4082,11 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_embed_file(irb, scope, node, arg0_value);
+ IrInstruction *embed_file = ir_build_embed_file(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, embed_file, lval);
}
- case BuiltinFnIdCmpExchange:
+ case BuiltinFnIdCmpxchgWeak:
+ case BuiltinFnIdCmpxchgStrong:
{
AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
@@ -3762,9 +4113,15 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg4_value == irb->codegen->invalid_instruction)
return arg4_value;
- return ir_build_cmpxchg(irb, scope, node, arg0_value, arg1_value,
- arg2_value, arg3_value, arg4_value,
- AtomicOrderUnordered, AtomicOrderUnordered);
+ AstNode *arg5_node = node->data.fn_call_expr.params.at(5);
+ IrInstruction *arg5_value = ir_gen_node(irb, arg5_node, scope);
+ if (arg5_value == irb->codegen->invalid_instruction)
+ return arg5_value;
+
+ IrInstruction *cmpxchg = ir_build_cmpxchg(irb, scope, node, arg0_value, arg1_value,
+ arg2_value, arg3_value, arg4_value, arg5_value, (builtin_fn->id == BuiltinFnIdCmpxchgWeak),
+ nullptr, AtomicOrderUnordered, AtomicOrderUnordered);
+ return ir_lval_wrap(irb, scope, cmpxchg, lval);
}
case BuiltinFnIdFence:
{
@@ -3773,7 +4130,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_fence(irb, scope, node, arg0_value, AtomicOrderUnordered);
+ IrInstruction *fence = ir_build_fence(irb, scope, node, arg0_value, AtomicOrderUnordered);
+ return ir_lval_wrap(irb, scope, fence, lval);
}
case BuiltinFnIdDivExact:
{
@@ -3787,7 +4145,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bin_op(irb, scope, node, IrBinOpDivExact, arg0_value, arg1_value, true);
+ IrInstruction *bin_op = ir_build_bin_op(irb, scope, node, IrBinOpDivExact, arg0_value, arg1_value, true);
+ return ir_lval_wrap(irb, scope, bin_op, lval);
}
case BuiltinFnIdDivTrunc:
{
@@ -3801,7 +4160,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bin_op(irb, scope, node, IrBinOpDivTrunc, arg0_value, arg1_value, true);
+ IrInstruction *bin_op = ir_build_bin_op(irb, scope, node, IrBinOpDivTrunc, arg0_value, arg1_value, true);
+ return ir_lval_wrap(irb, scope, bin_op, lval);
}
case BuiltinFnIdDivFloor:
{
@@ -3815,7 +4175,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bin_op(irb, scope, node, IrBinOpDivFloor, arg0_value, arg1_value, true);
+ IrInstruction *bin_op = ir_build_bin_op(irb, scope, node, IrBinOpDivFloor, arg0_value, arg1_value, true);
+ return ir_lval_wrap(irb, scope, bin_op, lval);
}
case BuiltinFnIdRem:
{
@@ -3829,7 +4190,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bin_op(irb, scope, node, IrBinOpRemRem, arg0_value, arg1_value, true);
+ IrInstruction *bin_op = ir_build_bin_op(irb, scope, node, IrBinOpRemRem, arg0_value, arg1_value, true);
+ return ir_lval_wrap(irb, scope, bin_op, lval);
}
case BuiltinFnIdMod:
{
@@ -3843,7 +4205,23 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bin_op(irb, scope, node, IrBinOpRemMod, arg0_value, arg1_value, true);
+ IrInstruction *bin_op = ir_build_bin_op(irb, scope, node, IrBinOpRemMod, arg0_value, arg1_value, true);
+ return ir_lval_wrap(irb, scope, bin_op, lval);
+ }
+ case BuiltinFnIdSqrt:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *ir_sqrt = ir_build_sqrt(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, ir_sqrt, lval);
}
case BuiltinFnIdTruncate:
{
@@ -3857,7 +4235,138 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_truncate(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *truncate = ir_build_truncate(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, truncate, lval);
+ }
+ case BuiltinFnIdIntCast:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *result = ir_build_int_cast(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdFloatCast:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *result = ir_build_float_cast(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdErrSetCast:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *result = ir_build_err_set_cast(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdFromBytes:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *result = ir_build_from_bytes(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdToBytes:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *result = ir_build_to_bytes(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdIntToFloat:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *result = ir_build_int_to_float(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdFloatToInt:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *result = ir_build_float_to_int(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdErrToInt:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *result = ir_build_err_to_int(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdIntToErr:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *result = ir_build_int_to_err(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdBoolToInt:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *result = ir_build_bool_to_int(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, result, lval);
}
case BuiltinFnIdIntType:
{
@@ -3871,7 +4380,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_int_type(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *int_type = ir_build_int_type(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, int_type, lval);
}
case BuiltinFnIdMemcpy:
{
@@ -3890,7 +4400,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg2_value == irb->codegen->invalid_instruction)
return arg2_value;
- return ir_build_memcpy(irb, scope, node, arg0_value, arg1_value, arg2_value);
+ IrInstruction *ir_memcpy = ir_build_memcpy(irb, scope, node, arg0_value, arg1_value, arg2_value);
+ return ir_lval_wrap(irb, scope, ir_memcpy, lval);
}
case BuiltinFnIdMemset:
{
@@ -3909,7 +4420,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg2_value == irb->codegen->invalid_instruction)
return arg2_value;
- return ir_build_memset(irb, scope, node, arg0_value, arg1_value, arg2_value);
+ IrInstruction *ir_memset = ir_build_memset(irb, scope, node, arg0_value, arg1_value, arg2_value);
+ return ir_lval_wrap(irb, scope, ir_memset, lval);
}
case BuiltinFnIdMemberCount:
{
@@ -3918,7 +4430,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_member_count(irb, scope, node, arg0_value);
+ IrInstruction *member_count = ir_build_member_count(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, member_count, lval);
}
case BuiltinFnIdMemberType:
{
@@ -3933,7 +4446,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return arg1_value;
- return ir_build_member_type(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *member_type = ir_build_member_type(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, member_type, lval);
}
case BuiltinFnIdMemberName:
{
@@ -3948,14 +4462,54 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return arg1_value;
- return ir_build_member_name(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *member_name = ir_build_member_name(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, member_name, lval);
+ }
+ case BuiltinFnIdField:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node_extra(irb, arg0_node, scope, LValPtr);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *ptr_instruction = ir_build_field_ptr_instruction(irb, scope, node, arg0_value, arg1_value);
+
+ if (lval == LValPtr)
+ return ptr_instruction;
+
+ return ir_build_load_ptr(irb, scope, node, ptr_instruction);
+ }
+ case BuiltinFnIdTypeInfo:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *type_info = ir_build_type_info(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, type_info, lval);
}
case BuiltinFnIdBreakpoint:
- return ir_build_breakpoint(irb, scope, node);
+ return ir_lval_wrap(irb, scope, ir_build_breakpoint(irb, scope, node), lval);
case BuiltinFnIdReturnAddress:
- return ir_build_return_address(irb, scope, node);
+ return ir_lval_wrap(irb, scope, ir_build_return_address(irb, scope, node), lval);
case BuiltinFnIdFrameAddress:
- return ir_build_frame_address(irb, scope, node);
+ return ir_lval_wrap(irb, scope, ir_build_frame_address(irb, scope, node), lval);
+ case BuiltinFnIdHandle:
+ if (!irb->exec->fn_entry) {
+ add_node_error(irb->codegen, node, buf_sprintf("@handle() called outside of function definition"));
+ return irb->codegen->invalid_instruction;
+ }
+ if (!is_async) {
+ add_node_error(irb->codegen, node, buf_sprintf("@handle() in non-async function"));
+ return irb->codegen->invalid_instruction;
+ }
+ return ir_lval_wrap(irb, scope, ir_build_handle(irb, scope, node), lval);
case BuiltinFnIdAlignOf:
{
AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
@@ -3963,16 +4517,17 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_align_of(irb, scope, node, arg0_value);
+ IrInstruction *align_of = ir_build_align_of(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, align_of, lval);
}
case BuiltinFnIdAddWithOverflow:
- return ir_gen_overflow_op(irb, scope, node, IrOverflowOpAdd);
+ return ir_lval_wrap(irb, scope, ir_gen_overflow_op(irb, scope, node, IrOverflowOpAdd), lval);
case BuiltinFnIdSubWithOverflow:
- return ir_gen_overflow_op(irb, scope, node, IrOverflowOpSub);
+ return ir_lval_wrap(irb, scope, ir_gen_overflow_op(irb, scope, node, IrOverflowOpSub), lval);
case BuiltinFnIdMulWithOverflow:
- return ir_gen_overflow_op(irb, scope, node, IrOverflowOpMul);
+ return ir_lval_wrap(irb, scope, ir_gen_overflow_op(irb, scope, node, IrOverflowOpMul), lval);
case BuiltinFnIdShlWithOverflow:
- return ir_gen_overflow_op(irb, scope, node, IrOverflowOpShl);
+ return ir_lval_wrap(irb, scope, ir_gen_overflow_op(irb, scope, node, IrOverflowOpShl), lval);
case BuiltinFnIdTypeName:
{
AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
@@ -3980,21 +4535,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_type_name(irb, scope, node, arg0_value);
- }
- case BuiltinFnIdCanImplicitCast:
- {
- AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
- IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
- if (arg0_value == irb->codegen->invalid_instruction)
- return arg0_value;
-
- AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
- IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
- if (arg1_value == irb->codegen->invalid_instruction)
- return arg1_value;
-
- return ir_build_can_implicit_cast(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *type_name = ir_build_type_name(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, type_name, lval);
}
case BuiltinFnIdPanic:
{
@@ -4003,7 +4545,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_panic(irb, scope, node, arg0_value);
+ IrInstruction *panic = ir_build_panic(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, panic, lval);
}
case BuiltinFnIdPtrCast:
{
@@ -4017,7 +4560,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_ptr_cast(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *ptr_cast = ir_build_ptr_cast(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, ptr_cast, lval);
}
case BuiltinFnIdBitCast:
{
@@ -4031,7 +4575,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bit_cast(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *bit_cast = ir_build_bit_cast(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, bit_cast, lval);
}
case BuiltinFnIdIntToPtr:
{
@@ -4045,7 +4590,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_int_to_ptr(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *int_to_ptr = ir_build_int_to_ptr(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, int_to_ptr, lval);
}
case BuiltinFnIdPtrToInt:
{
@@ -4054,7 +4600,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_ptr_to_int(irb, scope, node, arg0_value);
+ IrInstruction *ptr_to_int = ir_build_ptr_to_int(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, ptr_to_int, lval);
}
case BuiltinFnIdTagName:
{
@@ -4064,7 +4611,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
return arg0_value;
IrInstruction *actual_tag = ir_build_union_tag(irb, scope, node, arg0_value);
- return ir_build_tag_name(irb, scope, node, actual_tag);
+ IrInstruction *tag_name = ir_build_tag_name(irb, scope, node, actual_tag);
+ return ir_lval_wrap(irb, scope, tag_name, lval);
}
case BuiltinFnIdTagType:
{
@@ -4073,7 +4621,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_tag_type(irb, scope, node, arg0_value);
+ IrInstruction *tag_type = ir_build_tag_type(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, tag_type, lval);
}
case BuiltinFnIdFieldParentPtr:
{
@@ -4092,7 +4641,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg2_value == irb->codegen->invalid_instruction)
return arg2_value;
- return ir_build_field_parent_ptr(irb, scope, node, arg0_value, arg1_value, arg2_value, nullptr);
+ IrInstruction *field_parent_ptr = ir_build_field_parent_ptr(irb, scope, node, arg0_value, arg1_value, arg2_value, nullptr);
+ return ir_lval_wrap(irb, scope, field_parent_ptr, lval);
}
case BuiltinFnIdOffsetOf:
{
@@ -4106,7 +4656,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_offset_of(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *offset_of = ir_build_offset_of(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, offset_of, lval);
}
case BuiltinFnIdInlineCall:
case BuiltinFnIdNoInlineCall:
@@ -4132,7 +4683,38 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
}
FnInline fn_inline = (builtin_fn->id == BuiltinFnIdInlineCall) ? FnInlineAlways : FnInlineNever;
- return ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, fn_inline, false, nullptr);
+ IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, fn_inline, false, nullptr, nullptr);
+ return ir_lval_wrap(irb, scope, call, lval);
+ }
+ case BuiltinFnIdNewStackCall:
+ {
+ if (node->data.fn_call_expr.params.length == 0) {
+ add_node_error(irb->codegen, node, buf_sprintf("expected at least 1 argument, found 0"));
+ return irb->codegen->invalid_instruction;
+ }
+
+ AstNode *new_stack_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *new_stack = ir_gen_node(irb, new_stack_node, scope);
+ if (new_stack == irb->codegen->invalid_instruction)
+ return new_stack;
+
+ AstNode *fn_ref_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *fn_ref = ir_gen_node(irb, fn_ref_node, scope);
+ if (fn_ref == irb->codegen->invalid_instruction)
+ return fn_ref;
+
+ size_t arg_count = node->data.fn_call_expr.params.length - 2;
+
+ IrInstruction **args = allocate<IrInstruction*>(arg_count);
+ for (size_t i = 0; i < arg_count; i += 1) {
+ AstNode *arg_node = node->data.fn_call_expr.params.at(i + 2);
+ args[i] = ir_gen_node(irb, arg_node, scope);
+ if (args[i] == irb->codegen->invalid_instruction)
+ return args[i];
+ }
+
+ IrInstruction *call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, false, nullptr, new_stack);
+ return ir_lval_wrap(irb, scope, call, lval);
}
case BuiltinFnIdTypeId:
{
@@ -4141,7 +4723,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_type_id(irb, scope, node, arg0_value);
+ IrInstruction *type_id = ir_build_type_id(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, type_id, lval);
}
case BuiltinFnIdShlExact:
{
@@ -4155,7 +4738,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bin_op(irb, scope, node, IrBinOpBitShiftLeftExact, arg0_value, arg1_value, true);
+ IrInstruction *bin_op = ir_build_bin_op(irb, scope, node, IrBinOpBitShiftLeftExact, arg0_value, arg1_value, true);
+ return ir_lval_wrap(irb, scope, bin_op, lval);
}
case BuiltinFnIdShrExact:
{
@@ -4169,7 +4753,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_bin_op(irb, scope, node, IrBinOpBitShiftRightExact, arg0_value, arg1_value, true);
+ IrInstruction *bin_op = ir_build_bin_op(irb, scope, node, IrBinOpBitShiftRightExact, arg0_value, arg1_value, true);
+ return ir_lval_wrap(irb, scope, bin_op, lval);
}
case BuiltinFnIdSetEvalBranchQuota:
{
@@ -4178,7 +4763,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_set_eval_branch_quota(irb, scope, node, arg0_value);
+ IrInstruction *set_eval_branch_quota = ir_build_set_eval_branch_quota(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, set_eval_branch_quota, lval);
}
case BuiltinFnIdAlignCast:
{
@@ -4192,10 +4778,14 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_align_cast(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *align_cast = ir_build_align_cast(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, align_cast, lval);
}
case BuiltinFnIdOpaqueType:
- return ir_build_opaque_type(irb, scope, node);
+ {
+ IrInstruction *opaque_type = ir_build_opaque_type(irb, scope, node);
+ return ir_lval_wrap(irb, scope, opaque_type, lval);
+ }
case BuiltinFnIdSetAlignStack:
{
AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
@@ -4203,7 +4793,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg0_value == irb->codegen->invalid_instruction)
return arg0_value;
- return ir_build_set_align_stack(irb, scope, node, arg0_value);
+ IrInstruction *set_align_stack = ir_build_set_align_stack(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, set_align_stack, lval);
}
case BuiltinFnIdArgType:
{
@@ -4217,7 +4808,8 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg1_value == irb->codegen->invalid_instruction)
return arg1_value;
- return ir_build_arg_type(irb, scope, node, arg0_value, arg1_value);
+ IrInstruction *arg_type = ir_build_arg_type(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, arg_type, lval);
}
case BuiltinFnIdExport:
{
@@ -4236,11 +4828,13 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
if (arg2_value == irb->codegen->invalid_instruction)
return arg2_value;
- return ir_build_export(irb, scope, node, arg0_value, arg1_value, arg2_value);
+ IrInstruction *ir_export = ir_build_export(irb, scope, node, arg0_value, arg1_value, arg2_value);
+ return ir_lval_wrap(irb, scope, ir_export, lval);
}
case BuiltinFnIdErrorReturnTrace:
{
- return ir_build_error_return_trace(irb, scope, node, IrInstructionErrorReturnTrace::Null);
+ IrInstruction *error_return_trace = ir_build_error_return_trace(irb, scope, node, IrInstructionErrorReturnTrace::Null);
+ return ir_lval_wrap(irb, scope, error_return_trace, lval);
}
case BuiltinFnIdAtomicRmw:
{
@@ -4274,15 +4868,61 @@ static IrInstruction *ir_gen_builtin_fn_call(IrBuilder *irb, Scope *scope, AstNo
// these 2 values don't mean anything since we passed non-null values for other args
AtomicRmwOp_xchg, AtomicOrderMonotonic);
}
+ case BuiltinFnIdAtomicLoad:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ AstNode *arg2_node = node->data.fn_call_expr.params.at(2);
+ IrInstruction *arg2_value = ir_gen_node(irb, arg2_node, scope);
+ if (arg2_value == irb->codegen->invalid_instruction)
+ return arg2_value;
+
+ return ir_build_atomic_load(irb, scope, node, arg0_value, arg1_value, arg2_value,
+ // this value does not mean anything since we passed non-null values for other arg
+ AtomicOrderMonotonic);
+ }
+ case BuiltinFnIdIntToEnum:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ AstNode *arg1_node = node->data.fn_call_expr.params.at(1);
+ IrInstruction *arg1_value = ir_gen_node(irb, arg1_node, scope);
+ if (arg1_value == irb->codegen->invalid_instruction)
+ return arg1_value;
+
+ IrInstruction *result = ir_build_int_to_enum(irb, scope, node, arg0_value, arg1_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
+ case BuiltinFnIdEnumToInt:
+ {
+ AstNode *arg0_node = node->data.fn_call_expr.params.at(0);
+ IrInstruction *arg0_value = ir_gen_node(irb, arg0_node, scope);
+ if (arg0_value == irb->codegen->invalid_instruction)
+ return arg0_value;
+
+ IrInstruction *result = ir_build_enum_to_int(irb, scope, node, arg0_value);
+ return ir_lval_wrap(irb, scope, result, lval);
+ }
}
zig_unreachable();
}
-static IrInstruction *ir_gen_fn_call(IrBuilder *irb, Scope *scope, AstNode *node) {
+static IrInstruction *ir_gen_fn_call(IrBuilder *irb, Scope *scope, AstNode *node, LVal lval) {
assert(node->type == NodeTypeFnCallExpr);
if (node->data.fn_call_expr.is_builtin)
- return ir_gen_builtin_fn_call(irb, scope, node);
+ return ir_gen_builtin_fn_call(irb, scope, node, lval);
AstNode *fn_ref_node = node->data.fn_call_expr.fn_ref_expr;
IrInstruction *fn_ref = ir_gen_node(irb, fn_ref_node, scope);
@@ -4308,7 +4948,8 @@ static IrInstruction *ir_gen_fn_call(IrBuilder *irb, Scope *scope, AstNode *node
}
}
- return ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator);
+ IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator, nullptr);
+ return ir_lval_wrap(irb, scope, fn_call, lval);
}
static IrInstruction *ir_gen_if_bool_expr(IrBuilder *irb, Scope *scope, AstNode *node) {
@@ -4379,30 +5020,28 @@ static IrInstruction *ir_gen_prefix_op_id_lval(IrBuilder *irb, Scope *scope, Ast
}
static IrInstruction *ir_gen_prefix_op_id(IrBuilder *irb, Scope *scope, AstNode *node, IrUnOp op_id) {
- return ir_gen_prefix_op_id_lval(irb, scope, node, op_id, LVAL_NONE);
+ return ir_gen_prefix_op_id_lval(irb, scope, node, op_id, LValNone);
}
static IrInstruction *ir_lval_wrap(IrBuilder *irb, Scope *scope, IrInstruction *value, LVal lval) {
- if (!lval.is_ptr)
+ if (lval != LValPtr)
return value;
if (value == irb->codegen->invalid_instruction)
return value;
// We needed a pointer to a value, but we got a value. So we create
// an instruction which just makes a const pointer of it.
- return ir_build_ref(irb, scope, value->source_node, value, lval.is_const, lval.is_volatile);
+ return ir_build_ref(irb, scope, value->source_node, value, false, false);
}
-static IrInstruction *ir_gen_address_of(IrBuilder *irb, Scope *scope, AstNode *node) {
- assert(node->type == NodeTypeAddrOfExpr);
- bool is_const = node->data.addr_of_expr.is_const;
- bool is_volatile = node->data.addr_of_expr.is_volatile;
- AstNode *expr_node = node->data.addr_of_expr.op_expr;
- AstNode *align_expr = node->data.addr_of_expr.align_expr;
-
- if (align_expr == nullptr && !is_const && !is_volatile) {
- return ir_gen_node_extra(irb, expr_node, scope, make_lval_addr(is_const, is_volatile));
- }
+static IrInstruction *ir_gen_pointer_type(IrBuilder *irb, Scope *scope, AstNode *node) {
+ assert(node->type == NodeTypePointerType);
+ PtrLen ptr_len = (node->data.pointer_type.star_token->id == TokenIdStar ||
+ node->data.pointer_type.star_token->id == TokenIdStarStar) ? PtrLenSingle : PtrLenUnknown;
+ bool is_const = node->data.pointer_type.is_const;
+ bool is_volatile = node->data.pointer_type.is_volatile;
+ AstNode *expr_node = node->data.pointer_type.op_expr;
+ AstNode *align_expr = node->data.pointer_type.align_expr;
IrInstruction *align_value;
if (align_expr != nullptr) {
@@ -4418,27 +5057,27 @@ static IrInstruction *ir_gen_address_of(IrBuilder *irb, Scope *scope, AstNode *n
return child_type;
uint32_t bit_offset_start = 0;
- if (node->data.addr_of_expr.bit_offset_start != nullptr) {
- if (!bigint_fits_in_bits(node->data.addr_of_expr.bit_offset_start, 32, false)) {
+ if (node->data.pointer_type.bit_offset_start != nullptr) {
+ if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_start, 32, false)) {
Buf *val_buf = buf_alloc();
- bigint_append_buf(val_buf, node->data.addr_of_expr.bit_offset_start, 10);
+ bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_start, 10);
exec_add_error_node(irb->codegen, irb->exec, node,
buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
return irb->codegen->invalid_instruction;
}
- bit_offset_start = bigint_as_unsigned(node->data.addr_of_expr.bit_offset_start);
+ bit_offset_start = bigint_as_unsigned(node->data.pointer_type.bit_offset_start);
}
uint32_t bit_offset_end = 0;
- if (node->data.addr_of_expr.bit_offset_end != nullptr) {
- if (!bigint_fits_in_bits(node->data.addr_of_expr.bit_offset_end, 32, false)) {
+ if (node->data.pointer_type.bit_offset_end != nullptr) {
+ if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_end, 32, false)) {
Buf *val_buf = buf_alloc();
- bigint_append_buf(val_buf, node->data.addr_of_expr.bit_offset_end, 10);
+ bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_end, 10);
exec_add_error_node(irb->codegen, irb->exec, node,
buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
return irb->codegen->invalid_instruction;
}
- bit_offset_end = bigint_as_unsigned(node->data.addr_of_expr.bit_offset_end);
+ bit_offset_end = bigint_as_unsigned(node->data.pointer_type.bit_offset_end);
}
if ((bit_offset_start != 0 || bit_offset_end != 0) && bit_offset_start >= bit_offset_end) {
@@ -4447,14 +5086,14 @@ static IrInstruction *ir_gen_address_of(IrBuilder *irb, Scope *scope, AstNode *n
return irb->codegen->invalid_instruction;
}
- return ir_build_ptr_type_of(irb, scope, node, child_type, is_const, is_volatile,
- align_value, bit_offset_start, bit_offset_end);
+ return ir_build_ptr_type(irb, scope, node, child_type, is_const, is_volatile,
+ ptr_len, align_value, bit_offset_start, bit_offset_end);
}
static IrInstruction *ir_gen_err_assert_ok(IrBuilder *irb, Scope *scope, AstNode *source_node, AstNode *expr_node,
LVal lval)
{
- IrInstruction *err_union_ptr = ir_gen_node_extra(irb, expr_node, scope, LVAL_PTR);
+ IrInstruction *err_union_ptr = ir_gen_node_extra(irb, expr_node, scope, LValPtr);
if (err_union_ptr == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
@@ -4462,27 +5101,12 @@ static IrInstruction *ir_gen_err_assert_ok(IrBuilder *irb, Scope *scope, AstNode
if (payload_ptr == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
- if (lval.is_ptr)
+ if (lval == LValPtr)
return payload_ptr;
return ir_build_load_ptr(irb, scope, source_node, payload_ptr);
}
-static IrInstruction *ir_gen_maybe_assert_ok(IrBuilder *irb, Scope *scope, AstNode *node, LVal lval) {
- assert(node->type == NodeTypePrefixOpExpr);
- AstNode *expr_node = node->data.prefix_op_expr.primary_expr;
-
- IrInstruction *maybe_ptr = ir_gen_node_extra(irb, expr_node, scope, LVAL_PTR);
- if (maybe_ptr == irb->codegen->invalid_instruction)
- return irb->codegen->invalid_instruction;
-
- IrInstruction *unwrapped_ptr = ir_build_unwrap_maybe(irb, scope, node, maybe_ptr, true);
- if (lval.is_ptr)
- return unwrapped_ptr;
-
- return ir_build_load_ptr(irb, scope, node, unwrapped_ptr);
-}
-
static IrInstruction *ir_gen_bool_not(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypePrefixOpExpr);
AstNode *expr_node = node->data.prefix_op_expr.primary_expr;
@@ -4510,12 +5134,12 @@ static IrInstruction *ir_gen_prefix_op_expr(IrBuilder *irb, Scope *scope, AstNod
return ir_lval_wrap(irb, scope, ir_gen_prefix_op_id(irb, scope, node, IrUnOpNegation), lval);
case PrefixOpNegationWrap:
return ir_lval_wrap(irb, scope, ir_gen_prefix_op_id(irb, scope, node, IrUnOpNegationWrap), lval);
- case PrefixOpDereference:
- return ir_gen_prefix_op_id_lval(irb, scope, node, IrUnOpDereference, lval);
- case PrefixOpMaybe:
- return ir_lval_wrap(irb, scope, ir_gen_prefix_op_id(irb, scope, node, IrUnOpMaybe), lval);
- case PrefixOpUnwrapMaybe:
- return ir_gen_maybe_assert_ok(irb, scope, node, lval);
+ case PrefixOpOptional:
+ return ir_lval_wrap(irb, scope, ir_gen_prefix_op_id(irb, scope, node, IrUnOpOptional), lval);
+ case PrefixOpAddrOf: {
+ AstNode *expr_node = node->data.prefix_op_expr.primary_expr;
+ return ir_lval_wrap(irb, scope, ir_gen_node_extra(irb, expr_node, scope, LValPtr), lval);
+ }
}
zig_unreachable();
}
@@ -4570,6 +5194,11 @@ static IrInstruction *ir_gen_var_decl(IrBuilder *irb, Scope *scope, AstNode *nod
AstNodeVariableDeclaration *variable_declaration = &node->data.variable_declaration;
+ if (buf_eql_str(variable_declaration->symbol, "_")) {
+ add_node_error(irb->codegen, node, buf_sprintf("`_` is not a declarable symbol"));
+ return irb->codegen->invalid_instruction;
+ }
+
IrInstruction *type_instruction;
if (variable_declaration->type != nullptr) {
type_instruction = ir_gen_node(irb, variable_declaration->type, scope);
@@ -4582,6 +5211,7 @@ static IrInstruction *ir_gen_var_decl(IrBuilder *irb, Scope *scope, AstNode *nod
bool is_shadowable = false;
bool is_const = variable_declaration->is_const;
bool is_extern = variable_declaration->is_extern;
+
IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node,
ir_should_inline(irb->exec, scope) || variable_declaration->is_comptime);
VariableTableEntry *var = ir_create_var(irb, node, scope, variable_declaration->symbol,
@@ -4654,7 +5284,7 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
} else {
payload_scope = scope;
}
- IrInstruction *err_val_ptr = ir_gen_node_extra(irb, node->data.while_expr.condition, scope, LVAL_PTR);
+ IrInstruction *err_val_ptr = ir_gen_node_extra(irb, node->data.while_expr.condition, scope, LValPtr);
if (err_val_ptr == irb->codegen->invalid_instruction)
return err_val_ptr;
IrInstruction *err_val = ir_build_load_ptr(irb, scope, node->data.while_expr.condition, err_val_ptr);
@@ -4689,8 +5319,10 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
if (body_result == irb->codegen->invalid_instruction)
return body_result;
- if (!instr_is_unreachable(body_result))
+ if (!instr_is_unreachable(body_result)) {
+ ir_mark_gen(ir_build_check_statement_is_void(irb, payload_scope, node->data.while_expr.body, body_result));
ir_mark_gen(ir_build_br(irb, payload_scope, node, continue_block, is_comptime));
+ }
if (continue_expr_node) {
ir_set_cursor_at_end_and_append_block(irb, continue_block);
@@ -4737,7 +5369,7 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
VariableTableEntry *payload_var = ir_create_var(irb, symbol_node, scope, var_symbol,
true, false, false, is_comptime);
Scope *child_scope = payload_var->child_scope;
- IrInstruction *maybe_val_ptr = ir_gen_node_extra(irb, node->data.while_expr.condition, scope, LVAL_PTR);
+ IrInstruction *maybe_val_ptr = ir_gen_node_extra(irb, node->data.while_expr.condition, scope, LValPtr);
if (maybe_val_ptr == irb->codegen->invalid_instruction)
return maybe_val_ptr;
IrInstruction *maybe_val = ir_build_load_ptr(irb, scope, node->data.while_expr.condition, maybe_val_ptr);
@@ -4769,8 +5401,10 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
if (body_result == irb->codegen->invalid_instruction)
return body_result;
- if (!instr_is_unreachable(body_result))
+ if (!instr_is_unreachable(body_result)) {
+ ir_mark_gen(ir_build_check_statement_is_void(irb, child_scope, node->data.while_expr.body, body_result));
ir_mark_gen(ir_build_br(irb, child_scope, node, continue_block, is_comptime));
+ }
if (continue_expr_node) {
ir_set_cursor_at_end_and_append_block(irb, continue_block);
@@ -4830,8 +5464,10 @@ static IrInstruction *ir_gen_while_expr(IrBuilder *irb, Scope *scope, AstNode *n
if (body_result == irb->codegen->invalid_instruction)
return body_result;
- if (!instr_is_unreachable(body_result))
+ if (!instr_is_unreachable(body_result)) {
+ ir_mark_gen(ir_build_check_statement_is_void(irb, scope, node->data.while_expr.body, body_result));
ir_mark_gen(ir_build_br(irb, scope, node, continue_block, is_comptime));
+ }
if (continue_expr_node) {
ir_set_cursor_at_end_and_append_block(irb, continue_block);
@@ -4881,7 +5517,7 @@ static IrInstruction *ir_gen_for_expr(IrBuilder *irb, Scope *parent_scope, AstNo
}
assert(elem_node->type == NodeTypeSymbol);
- IrInstruction *array_val_ptr = ir_gen_node_extra(irb, array_node, parent_scope, LVAL_PTR);
+ IrInstruction *array_val_ptr = ir_gen_node_extra(irb, array_node, parent_scope, LValPtr);
if (array_val_ptr == irb->codegen->invalid_instruction)
return array_val_ptr;
@@ -4905,7 +5541,7 @@ static IrInstruction *ir_gen_for_expr(IrBuilder *irb, Scope *parent_scope, AstNo
IrInstruction *undefined_value = ir_build_const_undefined(irb, child_scope, elem_node);
ir_build_var_decl(irb, child_scope, elem_node, elem_var, elem_var_type, nullptr, undefined_value);
- IrInstruction *elem_var_ptr = ir_build_var_ptr(irb, child_scope, node, elem_var, false, false);
+ IrInstruction *elem_var_ptr = ir_build_var_ptr(irb, child_scope, node, elem_var);
AstNode *index_var_source_node;
VariableTableEntry *index_var;
@@ -4923,7 +5559,7 @@ static IrInstruction *ir_gen_for_expr(IrBuilder *irb, Scope *parent_scope, AstNo
IrInstruction *zero = ir_build_const_usize(irb, child_scope, node, 0);
IrInstruction *one = ir_build_const_usize(irb, child_scope, node, 1);
ir_build_var_decl(irb, child_scope, index_var_source_node, index_var, usize, nullptr, zero);
- IrInstruction *index_ptr = ir_build_var_ptr(irb, child_scope, node, index_var, false, false);
+ IrInstruction *index_ptr = ir_build_var_ptr(irb, child_scope, node, index_var);
IrBasicBlock *cond_block = ir_create_basic_block(irb, child_scope, "ForCond");
@@ -4943,7 +5579,7 @@ static IrInstruction *ir_gen_for_expr(IrBuilder *irb, Scope *parent_scope, AstNo
ir_mark_gen(ir_build_cond_br(irb, child_scope, node, cond, body_block, else_block, is_comptime));
ir_set_cursor_at_end_and_append_block(irb, body_block);
- IrInstruction *elem_ptr = ir_build_elem_ptr(irb, child_scope, node, array_val_ptr, index_val, false);
+ IrInstruction *elem_ptr = ir_build_elem_ptr(irb, child_scope, node, array_val_ptr, index_val, false, PtrLenSingle);
IrInstruction *elem_val;
if (node->data.for_expr.elem_is_ptr) {
elem_val = elem_ptr;
@@ -5168,16 +5804,16 @@ static IrInstruction *ir_gen_test_expr(IrBuilder *irb, Scope *scope, AstNode *no
AstNode *else_node = node->data.test_expr.else_node;
bool var_is_ptr = node->data.test_expr.var_is_ptr;
- IrInstruction *maybe_val_ptr = ir_gen_node_extra(irb, expr_node, scope, LVAL_PTR);
+ IrInstruction *maybe_val_ptr = ir_gen_node_extra(irb, expr_node, scope, LValPtr);
if (maybe_val_ptr == irb->codegen->invalid_instruction)
return maybe_val_ptr;
IrInstruction *maybe_val = ir_build_load_ptr(irb, scope, node, maybe_val_ptr);
IrInstruction *is_non_null = ir_build_test_nonnull(irb, scope, node, maybe_val);
- IrBasicBlock *then_block = ir_create_basic_block(irb, scope, "MaybeThen");
- IrBasicBlock *else_block = ir_create_basic_block(irb, scope, "MaybeElse");
- IrBasicBlock *endif_block = ir_create_basic_block(irb, scope, "MaybeEndIf");
+ IrBasicBlock *then_block = ir_create_basic_block(irb, scope, "OptionalThen");
+ IrBasicBlock *else_block = ir_create_basic_block(irb, scope, "OptionalElse");
+ IrBasicBlock *endif_block = ir_create_basic_block(irb, scope, "OptionalEndIf");
IrInstruction *is_comptime;
if (ir_should_inline(irb->exec, scope)) {
@@ -5246,7 +5882,7 @@ static IrInstruction *ir_gen_if_err_expr(IrBuilder *irb, Scope *scope, AstNode *
Buf *var_symbol = node->data.if_err_expr.var_symbol;
Buf *err_symbol = node->data.if_err_expr.err_symbol;
- IrInstruction *err_val_ptr = ir_gen_node_extra(irb, target_node, scope, LVAL_PTR);
+ IrInstruction *err_val_ptr = ir_gen_node_extra(irb, target_node, scope, LValPtr);
if (err_val_ptr == irb->codegen->invalid_instruction)
return err_val_ptr;
@@ -5372,7 +6008,7 @@ static IrInstruction *ir_gen_switch_expr(IrBuilder *irb, Scope *scope, AstNode *
assert(node->type == NodeTypeSwitchExpr);
AstNode *target_node = node->data.switch_expr.expr;
- IrInstruction *target_value_ptr = ir_gen_node_extra(irb, target_node, scope, LVAL_PTR);
+ IrInstruction *target_value_ptr = ir_gen_node_extra(irb, target_node, scope, LValPtr);
if (target_value_ptr == irb->codegen->invalid_instruction)
return target_value_ptr;
IrInstruction *target_value = ir_build_switch_target(irb, scope, node, target_value_ptr);
@@ -5536,13 +6172,13 @@ static IrInstruction *ir_gen_switch_expr(IrBuilder *irb, Scope *scope, AstNode *
}
- ir_build_check_switch_prongs(irb, scope, node, target_value, check_ranges.items, check_ranges.length,
+ IrInstruction *switch_prongs_void = ir_build_check_switch_prongs(irb, scope, node, target_value, check_ranges.items, check_ranges.length,
else_prong != nullptr);
if (cases.length == 0) {
ir_build_br(irb, scope, node, else_block, is_comptime);
} else {
- ir_build_switch_br(irb, scope, node, target_value, else_block, cases.length, cases.items, is_comptime);
+ ir_build_switch_br(irb, scope, node, target_value, else_block, cases.length, cases.items, is_comptime, switch_prongs_void);
}
if (!else_prong) {
@@ -5630,6 +6266,9 @@ static IrInstruction *ir_gen_break(IrBuilder *irb, Scope *break_scope, AstNode *
assert(this_block_scope->end_block != nullptr);
return ir_gen_return_from_block(irb, break_scope, node, this_block_scope);
}
+ } else if (search_scope->id == ScopeIdSuspend) {
+ add_node_error(irb->codegen, node, buf_sprintf("cannot break out of suspend block"));
+ return irb->codegen->invalid_instruction;
}
search_scope = search_scope->parent;
}
@@ -5729,7 +6368,7 @@ static IrInstruction *ir_gen_slice(IrBuilder *irb, Scope *scope, AstNode *node)
AstNode *start_node = slice_expr->start;
AstNode *end_node = slice_expr->end;
- IrInstruction *ptr_value = ir_gen_node_extra(irb, array_node, scope, LVAL_PTR);
+ IrInstruction *ptr_value = ir_gen_node_extra(irb, array_node, scope, LValPtr);
if (ptr_value == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
@@ -5763,11 +6402,11 @@ static IrInstruction *ir_gen_err_ok_or(IrBuilder *irb, Scope *parent_scope, AstN
add_node_error(irb->codegen, var_node, buf_sprintf("unused variable: '%s'", buf_ptr(var_name)));
return irb->codegen->invalid_instruction;
}
- return ir_gen_err_assert_ok(irb, parent_scope, node, op1_node, LVAL_NONE);
+ return ir_gen_err_assert_ok(irb, parent_scope, node, op1_node, LValNone);
}
- IrInstruction *err_union_ptr = ir_gen_node_extra(irb, op1_node, parent_scope, LVAL_PTR);
+ IrInstruction *err_union_ptr = ir_gen_node_extra(irb, op1_node, parent_scope, LValPtr);
if (err_union_ptr == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
@@ -5970,16 +6609,10 @@ static IrInstruction *ir_gen_err_set_decl(IrBuilder *irb, Scope *parent_scope, A
buf_init_from_buf(&err_set_type->name, type_name);
err_set_type->is_copyable = true;
err_set_type->data.error_set.err_count = err_count;
-
- if (err_count == 0) {
- err_set_type->zero_bits = true;
- err_set_type->di_type = irb->codegen->builtin_types.entry_void->di_type;
- } else {
- err_set_type->type_ref = irb->codegen->builtin_types.entry_global_error_set->type_ref;
- err_set_type->di_type = irb->codegen->builtin_types.entry_global_error_set->di_type;
- irb->codegen->error_di_types.append(&err_set_type->di_type);
- err_set_type->data.error_set.errors = allocate<ErrorTableEntry *>(err_count);
- }
+ err_set_type->type_ref = irb->codegen->builtin_types.entry_global_error_set->type_ref;
+ err_set_type->di_type = irb->codegen->builtin_types.entry_global_error_set->di_type;
+ irb->codegen->error_di_types.append(&err_set_type->di_type);
+ err_set_type->data.error_set.errors = allocate<ErrorTableEntry *>(err_count);
ErrorTableEntry **errors = allocate<ErrorTableEntry *>(irb->codegen->errors_by_index.length + err_count);
@@ -6071,30 +6704,150 @@ static IrInstruction *ir_gen_fn_proto(IrBuilder *irb, Scope *parent_scope, AstNo
async_allocator_type_value, is_var_args);
}
-static IrInstruction *ir_gen_cancel(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
+static IrInstruction *ir_gen_cancel_target(IrBuilder *irb, Scope *scope, AstNode *node,
+ IrInstruction *target_inst, bool cancel_non_suspended, bool cancel_awaited)
+{
+ IrBasicBlock *done_block = ir_create_basic_block(irb, scope, "CancelDone");
+ IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, scope, "NotCanceled");
+ IrBasicBlock *pre_return_block = ir_create_basic_block(irb, scope, "PreReturn");
+ IrBasicBlock *post_return_block = ir_create_basic_block(irb, scope, "PostReturn");
+ IrBasicBlock *do_cancel_block = ir_create_basic_block(irb, scope, "DoCancel");
+
+ IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
+ IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
+ IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node, false);
+ IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
+ IrInstruction *promise_T_type_val = ir_build_const_type(irb, scope, node,
+ get_promise_type(irb->codegen, irb->codegen->builtin_types.entry_void));
+ IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, scope, node, 0x7); // 0b111
+ IrInstruction *ptr_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
+ IrInstruction *await_mask = ir_build_const_usize(irb, scope, node, 0x4); // 0b100
+ IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
+
+ // TODO relies on Zig not re-ordering fields
+ IrInstruction *casted_target_inst = ir_build_ptr_cast(irb, scope, node, promise_T_type_val, target_inst);
+ IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, scope, node, casted_target_inst);
+ Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
+ IrInstruction *atomic_state_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
+ atomic_state_field_name);
+
+ // set the is_canceled bit
+ IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
+ usize_type_val, atomic_state_ptr, nullptr, is_canceled_mask, nullptr,
+ AtomicRmwOp_or, AtomicOrderSeqCst);
+
+ IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
+ IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_canceled_bool, done_block, not_canceled_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
+ IrInstruction *awaiter_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
+ IrInstruction *is_returned_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpEq, awaiter_addr, ptr_mask, false);
+ ir_build_cond_br(irb, scope, node, is_returned_bool, post_return_block, pre_return_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, post_return_block);
+ if (cancel_awaited) {
+ ir_build_br(irb, scope, node, do_cancel_block, is_comptime);
+ } else {
+ IrInstruction *is_awaited_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, await_mask, false);
+ IrInstruction *is_awaited_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_awaited_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_awaited_bool, done_block, do_cancel_block, is_comptime);
+ }
+
+ ir_set_cursor_at_end_and_append_block(irb, pre_return_block);
+ if (cancel_awaited) {
+ if (cancel_non_suspended) {
+ ir_build_br(irb, scope, node, do_cancel_block, is_comptime);
+ } else {
+ IrInstruction *is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
+ IrInstruction *is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_suspended_bool, do_cancel_block, done_block, is_comptime);
+ }
+ } else {
+ ir_build_br(irb, scope, node, done_block, is_comptime);
+ }
+
+ ir_set_cursor_at_end_and_append_block(irb, do_cancel_block);
+ ir_build_cancel(irb, scope, node, target_inst);
+ ir_build_br(irb, scope, node, done_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, done_block);
+ return ir_build_const_void(irb, scope, node);
+}
+
+static IrInstruction *ir_gen_cancel(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypeCancel);
- IrInstruction *target_inst = ir_gen_node(irb, node->data.cancel_expr.expr, parent_scope);
+ IrInstruction *target_inst = ir_gen_node(irb, node->data.cancel_expr.expr, scope);
if (target_inst == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
- return ir_build_cancel(irb, parent_scope, node, target_inst);
+ return ir_gen_cancel_target(irb, scope, node, target_inst, false, true);
+}
+
+static IrInstruction *ir_gen_resume_target(IrBuilder *irb, Scope *scope, AstNode *node,
+ IrInstruction *target_inst)
+{
+ IrBasicBlock *done_block = ir_create_basic_block(irb, scope, "ResumeDone");
+ IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, scope, "NotCanceled");
+ IrBasicBlock *suspended_block = ir_create_basic_block(irb, scope, "IsSuspended");
+ IrBasicBlock *not_suspended_block = ir_create_basic_block(irb, scope, "IsNotSuspended");
+
+ IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
+ IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
+ IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
+ IrInstruction *and_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, is_suspended_mask);
+ IrInstruction *is_comptime = ir_build_const_bool(irb, scope, node, false);
+ IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
+ IrInstruction *promise_T_type_val = ir_build_const_type(irb, scope, node,
+ get_promise_type(irb->codegen, irb->codegen->builtin_types.entry_void));
+
+ // TODO relies on Zig not re-ordering fields
+ IrInstruction *casted_target_inst = ir_build_ptr_cast(irb, scope, node, promise_T_type_val, target_inst);
+ IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, scope, node, casted_target_inst);
+ Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
+ IrInstruction *atomic_state_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
+ atomic_state_field_name);
+
+ // clear the is_suspended bit
+ IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
+ usize_type_val, atomic_state_ptr, nullptr, and_mask, nullptr,
+ AtomicRmwOp_and, AtomicOrderSeqCst);
+
+ IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
+ IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_canceled_bool, done_block, not_canceled_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
+ IrInstruction *is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
+ IrInstruction *is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_suspended_bool, suspended_block, not_suspended_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_suspended_block);
+ ir_build_unreachable(irb, scope, node);
+
+ ir_set_cursor_at_end_and_append_block(irb, suspended_block);
+ ir_build_coro_resume(irb, scope, node, target_inst);
+ ir_build_br(irb, scope, node, done_block, is_comptime);
+
+ ir_set_cursor_at_end_and_append_block(irb, done_block);
+ return ir_build_const_void(irb, scope, node);
}
-static IrInstruction *ir_gen_resume(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
+static IrInstruction *ir_gen_resume(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypeResume);
- IrInstruction *target_inst = ir_gen_node(irb, node->data.resume_expr.expr, parent_scope);
+ IrInstruction *target_inst = ir_gen_node(irb, node->data.resume_expr.expr, scope);
if (target_inst == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
- return ir_build_coro_resume(irb, parent_scope, node, target_inst);
+ return ir_gen_resume_target(irb, scope, node, target_inst);
}
-static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
+static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypeAwaitExpr);
- IrInstruction *target_inst = ir_gen_node(irb, node->data.await_expr.expr, parent_scope);
+ IrInstruction *target_inst = ir_gen_node(irb, node->data.await_expr.expr, scope);
if (target_inst == irb->codegen->invalid_instruction)
return irb->codegen->invalid_instruction;
@@ -6108,7 +6861,7 @@ static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *parent_scope, Ast
return irb->codegen->invalid_instruction;
}
- ScopeDeferExpr *scope_defer_expr = get_scope_defer_expr(parent_scope);
+ ScopeDeferExpr *scope_defer_expr = get_scope_defer_expr(scope);
if (scope_defer_expr) {
if (!scope_defer_expr->reported_err) {
add_node_error(irb->codegen, node, buf_sprintf("cannot await inside defer expression"));
@@ -6119,85 +6872,157 @@ static IrInstruction *ir_gen_await_expr(IrBuilder *irb, Scope *parent_scope, Ast
Scope *outer_scope = irb->exec->begin_scope;
- IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, parent_scope, node, target_inst);
+ IrInstruction *coro_promise_ptr = ir_build_coro_promise(irb, scope, node, target_inst);
Buf *result_ptr_field_name = buf_create_from_str(RESULT_PTR_FIELD_NAME);
- IrInstruction *result_ptr_field_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, result_ptr_field_name);
+ IrInstruction *result_ptr_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, result_ptr_field_name);
if (irb->codegen->have_err_ret_tracing) {
- IrInstruction *err_ret_trace_ptr = ir_build_error_return_trace(irb, parent_scope, node, IrInstructionErrorReturnTrace::NonNull);
+ IrInstruction *err_ret_trace_ptr = ir_build_error_return_trace(irb, scope, node, IrInstructionErrorReturnTrace::NonNull);
Buf *err_ret_trace_ptr_field_name = buf_create_from_str(ERR_RET_TRACE_PTR_FIELD_NAME);
- IrInstruction *err_ret_trace_ptr_field_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, err_ret_trace_ptr_field_name);
- ir_build_store_ptr(irb, parent_scope, node, err_ret_trace_ptr_field_ptr, err_ret_trace_ptr);
- }
-
- Buf *awaiter_handle_field_name = buf_create_from_str(AWAITER_HANDLE_FIELD_NAME);
- IrInstruction *awaiter_field_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr,
- awaiter_handle_field_name);
-
- IrInstruction *const_bool_false = ir_build_const_bool(irb, parent_scope, node, false);
- VariableTableEntry *result_var = ir_create_var(irb, node, parent_scope, nullptr,
+ IrInstruction *err_ret_trace_ptr_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, err_ret_trace_ptr_field_name);
+ ir_build_store_ptr(irb, scope, node, err_ret_trace_ptr_field_ptr, err_ret_trace_ptr);
+ }
+
+ IrBasicBlock *already_awaited_block = ir_create_basic_block(irb, scope, "AlreadyAwaited");
+ IrBasicBlock *not_awaited_block = ir_create_basic_block(irb, scope, "NotAwaited");
+ IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, scope, "NotCanceled");
+ IrBasicBlock *yes_suspend_block = ir_create_basic_block(irb, scope, "YesSuspend");
+ IrBasicBlock *no_suspend_block = ir_create_basic_block(irb, scope, "NoSuspend");
+ IrBasicBlock *merge_block = ir_create_basic_block(irb, scope, "MergeSuspend");
+ IrBasicBlock *cleanup_block = ir_create_basic_block(irb, scope, "SuspendCleanup");
+ IrBasicBlock *resume_block = ir_create_basic_block(irb, scope, "SuspendResume");
+ IrBasicBlock *cancel_target_block = ir_create_basic_block(irb, scope, "CancelTarget");
+ IrBasicBlock *do_cancel_block = ir_create_basic_block(irb, scope, "DoCancel");
+ IrBasicBlock *do_defers_block = ir_create_basic_block(irb, scope, "DoDefers");
+ IrBasicBlock *destroy_block = ir_create_basic_block(irb, scope, "DestroyBlock");
+ IrBasicBlock *my_suspended_block = ir_create_basic_block(irb, scope, "AlreadySuspended");
+ IrBasicBlock *my_not_suspended_block = ir_create_basic_block(irb, scope, "NotAlreadySuspended");
+ IrBasicBlock *do_suspend_block = ir_create_basic_block(irb, scope, "DoSuspend");
+
+ Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
+ IrInstruction *atomic_state_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
+ atomic_state_field_name);
+
+ IrInstruction *promise_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_promise);
+ IrInstruction *const_bool_false = ir_build_const_bool(irb, scope, node, false);
+ IrInstruction *undefined_value = ir_build_const_undefined(irb, scope, node);
+ IrInstruction *usize_type_val = ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_usize);
+ IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
+ IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, scope, node, 0x7); // 0b111
+ IrInstruction *ptr_mask = ir_build_un_op(irb, scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
+ IrInstruction *await_mask = ir_build_const_usize(irb, scope, node, 0x4); // 0b100
+ IrInstruction *is_canceled_mask = ir_build_const_usize(irb, scope, node, 0x1); // 0b001
+ IrInstruction *is_suspended_mask = ir_build_const_usize(irb, scope, node, 0x2); // 0b010
+
+ VariableTableEntry *result_var = ir_create_var(irb, node, scope, nullptr,
false, false, true, const_bool_false);
- IrInstruction *undefined_value = ir_build_const_undefined(irb, parent_scope, node);
- IrInstruction *target_promise_type = ir_build_typeof(irb, parent_scope, node, target_inst);
- IrInstruction *promise_result_type = ir_build_promise_result_type(irb, parent_scope, node, target_promise_type);
- ir_build_await_bookkeeping(irb, parent_scope, node, promise_result_type);
- ir_build_var_decl(irb, parent_scope, node, result_var, promise_result_type, nullptr, undefined_value);
- IrInstruction *my_result_var_ptr = ir_build_var_ptr(irb, parent_scope, node, result_var, false, false);
- ir_build_store_ptr(irb, parent_scope, node, result_ptr_field_ptr, my_result_var_ptr);
- IrInstruction *save_token = ir_build_coro_save(irb, parent_scope, node, irb->exec->coro_handle);
- IrInstruction *promise_type_val = ir_build_const_type(irb, parent_scope, node,
- get_maybe_type(irb->codegen, irb->codegen->builtin_types.entry_promise));
- IrInstruction *maybe_await_handle = ir_build_atomic_rmw(irb, parent_scope, node,
- promise_type_val, awaiter_field_ptr, nullptr, irb->exec->coro_handle, nullptr,
- AtomicRmwOp_xchg, AtomicOrderSeqCst);
- IrInstruction *is_non_null = ir_build_test_nonnull(irb, parent_scope, node, maybe_await_handle);
- IrBasicBlock *yes_suspend_block = ir_create_basic_block(irb, parent_scope, "YesSuspend");
- IrBasicBlock *no_suspend_block = ir_create_basic_block(irb, parent_scope, "NoSuspend");
- IrBasicBlock *merge_block = ir_create_basic_block(irb, parent_scope, "MergeSuspend");
- ir_build_cond_br(irb, parent_scope, node, is_non_null, no_suspend_block, yes_suspend_block, const_bool_false);
+ IrInstruction *target_promise_type = ir_build_typeof(irb, scope, node, target_inst);
+ IrInstruction *promise_result_type = ir_build_promise_result_type(irb, scope, node, target_promise_type);
+ ir_build_await_bookkeeping(irb, scope, node, promise_result_type);
+ ir_build_var_decl(irb, scope, node, result_var, promise_result_type, nullptr, undefined_value);
+ IrInstruction *my_result_var_ptr = ir_build_var_ptr(irb, scope, node, result_var);
+ ir_build_store_ptr(irb, scope, node, result_ptr_field_ptr, my_result_var_ptr);
+ IrInstruction *save_token = ir_build_coro_save(irb, scope, node, irb->exec->coro_handle);
+
+ IrInstruction *coro_handle_addr = ir_build_ptr_to_int(irb, scope, node, irb->exec->coro_handle);
+ IrInstruction *mask_bits = ir_build_bin_op(irb, scope, node, IrBinOpBinOr, coro_handle_addr, await_mask, false);
+ IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
+ usize_type_val, atomic_state_ptr, nullptr, mask_bits, nullptr,
+ AtomicRmwOp_or, AtomicOrderSeqCst);
+
+ IrInstruction *is_awaited_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, await_mask, false);
+ IrInstruction *is_awaited_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_awaited_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_awaited_bool, already_awaited_block, not_awaited_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, already_awaited_block);
+ ir_build_unreachable(irb, scope, node);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_awaited_block);
+ IrInstruction *await_handle_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
+ IrInstruction *is_non_null = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, await_handle_addr, zero, false);
+ IrInstruction *is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
+ IrInstruction *is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
+ ir_build_cond_br(irb, scope, node, is_canceled_bool, cancel_target_block, not_canceled_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
+ ir_build_cond_br(irb, scope, node, is_non_null, no_suspend_block, yes_suspend_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, cancel_target_block);
+ ir_build_cancel(irb, scope, node, target_inst);
+ ir_mark_gen(ir_build_br(irb, scope, node, cleanup_block, const_bool_false));
ir_set_cursor_at_end_and_append_block(irb, no_suspend_block);
if (irb->codegen->have_err_ret_tracing) {
Buf *err_ret_trace_field_name = buf_create_from_str(ERR_RET_TRACE_FIELD_NAME);
- IrInstruction *src_err_ret_trace_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, err_ret_trace_field_name);
- IrInstruction *dest_err_ret_trace_ptr = ir_build_error_return_trace(irb, parent_scope, node, IrInstructionErrorReturnTrace::NonNull);
- ir_build_merge_err_ret_traces(irb, parent_scope, node, coro_promise_ptr, src_err_ret_trace_ptr, dest_err_ret_trace_ptr);
+ IrInstruction *src_err_ret_trace_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, err_ret_trace_field_name);
+ IrInstruction *dest_err_ret_trace_ptr = ir_build_error_return_trace(irb, scope, node, IrInstructionErrorReturnTrace::NonNull);
+ ir_build_merge_err_ret_traces(irb, scope, node, coro_promise_ptr, src_err_ret_trace_ptr, dest_err_ret_trace_ptr);
}
Buf *result_field_name = buf_create_from_str(RESULT_FIELD_NAME);
- IrInstruction *promise_result_ptr = ir_build_field_ptr(irb, parent_scope, node, coro_promise_ptr, result_field_name);
- IrInstruction *no_suspend_result = ir_build_load_ptr(irb, parent_scope, node, promise_result_ptr);
- ir_build_cancel(irb, parent_scope, node, target_inst);
- ir_build_br(irb, parent_scope, node, merge_block, const_bool_false);
+ IrInstruction *promise_result_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, result_field_name);
+ // If the type of the result handle_is_ptr then this does not actually perform a load. But we need it to,
+ // because we're about to destroy the memory. So we store it into our result variable.
+ IrInstruction *no_suspend_result = ir_build_load_ptr(irb, scope, node, promise_result_ptr);
+ ir_build_store_ptr(irb, scope, node, my_result_var_ptr, no_suspend_result);
+ ir_build_cancel(irb, scope, node, target_inst);
+ ir_build_br(irb, scope, node, merge_block, const_bool_false);
+
ir_set_cursor_at_end_and_append_block(irb, yes_suspend_block);
- IrInstruction *suspend_code = ir_build_coro_suspend(irb, parent_scope, node, save_token, const_bool_false);
- IrBasicBlock *cleanup_block = ir_create_basic_block(irb, parent_scope, "SuspendCleanup");
- IrBasicBlock *resume_block = ir_create_basic_block(irb, parent_scope, "SuspendResume");
+ IrInstruction *my_prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
+ usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, is_suspended_mask, nullptr,
+ AtomicRmwOp_or, AtomicOrderSeqCst);
+ IrInstruction *my_is_suspended_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, my_prev_atomic_value, is_suspended_mask, false);
+ IrInstruction *my_is_suspended_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, my_is_suspended_value, zero, false);
+ ir_build_cond_br(irb, scope, node, my_is_suspended_bool, my_suspended_block, my_not_suspended_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, my_suspended_block);
+ ir_build_unreachable(irb, scope, node);
+
+ ir_set_cursor_at_end_and_append_block(irb, my_not_suspended_block);
+ IrInstruction *my_is_canceled_value = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, my_prev_atomic_value, is_canceled_mask, false);
+ IrInstruction *my_is_canceled_bool = ir_build_bin_op(irb, scope, node, IrBinOpCmpNotEq, my_is_canceled_value, zero, false);
+ ir_build_cond_br(irb, scope, node, my_is_canceled_bool, cleanup_block, do_suspend_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, do_suspend_block);
+ IrInstruction *suspend_code = ir_build_coro_suspend(irb, scope, node, save_token, const_bool_false);
IrInstructionSwitchBrCase *cases = allocate<IrInstructionSwitchBrCase>(2);
- cases[0].value = ir_build_const_u8(irb, parent_scope, node, 0);
+ cases[0].value = ir_build_const_u8(irb, scope, node, 0);
cases[0].block = resume_block;
- cases[1].value = ir_build_const_u8(irb, parent_scope, node, 1);
- cases[1].block = cleanup_block;
- ir_build_switch_br(irb, parent_scope, node, suspend_code, irb->exec->coro_suspend_block,
- 2, cases, const_bool_false);
+ cases[1].value = ir_build_const_u8(irb, scope, node, 1);
+ cases[1].block = destroy_block;
+ ir_build_switch_br(irb, scope, node, suspend_code, irb->exec->coro_suspend_block,
+ 2, cases, const_bool_false, nullptr);
+
+ ir_set_cursor_at_end_and_append_block(irb, destroy_block);
+ ir_gen_cancel_target(irb, scope, node, target_inst, false, true);
+ ir_mark_gen(ir_build_br(irb, scope, node, cleanup_block, const_bool_false));
ir_set_cursor_at_end_and_append_block(irb, cleanup_block);
- ir_gen_defers_for_block(irb, parent_scope, outer_scope, true);
- ir_mark_gen(ir_build_br(irb, parent_scope, node, irb->exec->coro_final_cleanup_block, const_bool_false));
+ IrInstruction *my_mask_bits = ir_build_bin_op(irb, scope, node, IrBinOpBinOr, ptr_mask, is_canceled_mask, false);
+ IrInstruction *b_my_prev_atomic_value = ir_build_atomic_rmw(irb, scope, node,
+ usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, my_mask_bits, nullptr,
+ AtomicRmwOp_or, AtomicOrderSeqCst);
+ IrInstruction *my_await_handle_addr = ir_build_bin_op(irb, scope, node, IrBinOpBinAnd, b_my_prev_atomic_value, ptr_mask, false);
+ IrInstruction *dont_have_my_await_handle = ir_build_bin_op(irb, scope, node, IrBinOpCmpEq, my_await_handle_addr, zero, false);
+ IrInstruction *dont_destroy_ourselves = ir_build_bin_op(irb, scope, node, IrBinOpBoolAnd, dont_have_my_await_handle, is_canceled_bool, false);
+ ir_build_cond_br(irb, scope, node, dont_have_my_await_handle, do_defers_block, do_cancel_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, do_cancel_block);
+ IrInstruction *my_await_handle = ir_build_int_to_ptr(irb, scope, node, promise_type_val, my_await_handle_addr);
+ ir_gen_cancel_target(irb, scope, node, my_await_handle, true, false);
+ ir_mark_gen(ir_build_br(irb, scope, node, do_defers_block, const_bool_false));
+
+ ir_set_cursor_at_end_and_append_block(irb, do_defers_block);
+ ir_gen_defers_for_block(irb, scope, outer_scope, true);
+ ir_mark_gen(ir_build_cond_br(irb, scope, node, dont_destroy_ourselves, irb->exec->coro_early_final, irb->exec->coro_final_cleanup_block, const_bool_false));
ir_set_cursor_at_end_and_append_block(irb, resume_block);
- IrInstruction *yes_suspend_result = ir_build_load_ptr(irb, parent_scope, node, my_result_var_ptr);
- ir_build_br(irb, parent_scope, node, merge_block, const_bool_false);
+ ir_build_br(irb, scope, node, merge_block, const_bool_false);
ir_set_cursor_at_end_and_append_block(irb, merge_block);
- IrBasicBlock **incoming_blocks = allocate<IrBasicBlock *>(2);
- IrInstruction **incoming_values = allocate<IrInstruction *>(2);
- incoming_blocks[0] = resume_block;
- incoming_values[0] = yes_suspend_result;
- incoming_blocks[1] = no_suspend_block;
- incoming_values[1] = no_suspend_result;
- return ir_build_phi(irb, parent_scope, node, 2, incoming_blocks, incoming_values);
+ return ir_build_load_ptr(irb, scope, node, my_result_var_ptr);
}
static IrInstruction *ir_gen_suspend(IrBuilder *irb, Scope *parent_scope, AstNode *node) {
@@ -6216,54 +7041,105 @@ static IrInstruction *ir_gen_suspend(IrBuilder *irb, Scope *parent_scope, AstNod
ScopeDeferExpr *scope_defer_expr = get_scope_defer_expr(parent_scope);
if (scope_defer_expr) {
if (!scope_defer_expr->reported_err) {
- add_node_error(irb->codegen, node, buf_sprintf("cannot suspend inside defer expression"));
+ ErrorMsg *msg = add_node_error(irb->codegen, node, buf_sprintf("cannot suspend inside defer expression"));
+ add_error_note(irb->codegen, msg, scope_defer_expr->base.source_node, buf_sprintf("defer here"));
scope_defer_expr->reported_err = true;
}
return irb->codegen->invalid_instruction;
}
+ ScopeSuspend *existing_suspend_scope = get_scope_suspend(parent_scope);
+ if (existing_suspend_scope) {
+ if (!existing_suspend_scope->reported_err) {
+ ErrorMsg *msg = add_node_error(irb->codegen, node, buf_sprintf("cannot suspend inside suspend block"));
+ add_error_note(irb->codegen, msg, existing_suspend_scope->base.source_node, buf_sprintf("other suspend block here"));
+ existing_suspend_scope->reported_err = true;
+ }
+ return irb->codegen->invalid_instruction;
+ }
Scope *outer_scope = irb->exec->begin_scope;
-
- IrInstruction *suspend_code;
+ IrBasicBlock *cleanup_block = ir_create_basic_block(irb, parent_scope, "SuspendCleanup");
+ IrBasicBlock *resume_block = ir_create_basic_block(irb, parent_scope, "SuspendResume");
+ IrBasicBlock *suspended_block = ir_create_basic_block(irb, parent_scope, "AlreadySuspended");
+ IrBasicBlock *canceled_block = ir_create_basic_block(irb, parent_scope, "IsCanceled");
+ IrBasicBlock *not_canceled_block = ir_create_basic_block(irb, parent_scope, "NotCanceled");
+ IrBasicBlock *not_suspended_block = ir_create_basic_block(irb, parent_scope, "NotAlreadySuspended");
+ IrBasicBlock *cancel_awaiter_block = ir_create_basic_block(irb, parent_scope, "CancelAwaiter");
+
+ IrInstruction *promise_type_val = ir_build_const_type(irb, parent_scope, node, irb->codegen->builtin_types.entry_promise);
+ IrInstruction *const_bool_true = ir_build_const_bool(irb, parent_scope, node, true);
IrInstruction *const_bool_false = ir_build_const_bool(irb, parent_scope, node, false);
+ IrInstruction *usize_type_val = ir_build_const_type(irb, parent_scope, node, irb->codegen->builtin_types.entry_usize);
+ IrInstruction *is_canceled_mask = ir_build_const_usize(irb, parent_scope, node, 0x1); // 0b001
+ IrInstruction *is_suspended_mask = ir_build_const_usize(irb, parent_scope, node, 0x2); // 0b010
+ IrInstruction *zero = ir_build_const_usize(irb, parent_scope, node, 0);
+ IrInstruction *inverted_ptr_mask = ir_build_const_usize(irb, parent_scope, node, 0x7); // 0b111
+ IrInstruction *ptr_mask = ir_build_un_op(irb, parent_scope, node, IrUnOpBinNot, inverted_ptr_mask); // 0b111...000
+
+ IrInstruction *prev_atomic_value = ir_build_atomic_rmw(irb, parent_scope, node,
+ usize_type_val, irb->exec->atomic_state_field_ptr, nullptr, is_suspended_mask, nullptr,
+ AtomicRmwOp_or, AtomicOrderSeqCst);
+
+ IrInstruction *is_canceled_value = ir_build_bin_op(irb, parent_scope, node, IrBinOpBinAnd, prev_atomic_value, is_canceled_mask, false);
+ IrInstruction *is_canceled_bool = ir_build_bin_op(irb, parent_scope, node, IrBinOpCmpNotEq, is_canceled_value, zero, false);
+ ir_build_cond_br(irb, parent_scope, node, is_canceled_bool, canceled_block, not_canceled_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, canceled_block);
+ IrInstruction *await_handle_addr = ir_build_bin_op(irb, parent_scope, node, IrBinOpBinAnd, prev_atomic_value, ptr_mask, false);
+ IrInstruction *have_await_handle = ir_build_bin_op(irb, parent_scope, node, IrBinOpCmpNotEq, await_handle_addr, zero, false);
+ IrBasicBlock *post_canceled_block = irb->current_basic_block;
+ ir_build_cond_br(irb, parent_scope, node, have_await_handle, cancel_awaiter_block, cleanup_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, cancel_awaiter_block);
+ IrInstruction *await_handle = ir_build_int_to_ptr(irb, parent_scope, node, promise_type_val, await_handle_addr);
+ ir_gen_cancel_target(irb, parent_scope, node, await_handle, true, false);
+ IrBasicBlock *post_cancel_awaiter_block = irb->current_basic_block;
+ ir_build_br(irb, parent_scope, node, cleanup_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_canceled_block);
+ IrInstruction *is_suspended_value = ir_build_bin_op(irb, parent_scope, node, IrBinOpBinAnd, prev_atomic_value, is_suspended_mask, false);
+ IrInstruction *is_suspended_bool = ir_build_bin_op(irb, parent_scope, node, IrBinOpCmpNotEq, is_suspended_value, zero, false);
+ ir_build_cond_br(irb, parent_scope, node, is_suspended_bool, suspended_block, not_suspended_block, const_bool_false);
+
+ ir_set_cursor_at_end_and_append_block(irb, suspended_block);
+ ir_build_unreachable(irb, parent_scope, node);
+
+ ir_set_cursor_at_end_and_append_block(irb, not_suspended_block);
+ IrInstruction *suspend_code;
if (node->data.suspend.block == nullptr) {
suspend_code = ir_build_coro_suspend(irb, parent_scope, node, nullptr, const_bool_false);
} else {
- assert(node->data.suspend.promise_symbol != nullptr);
- assert(node->data.suspend.promise_symbol->type == NodeTypeSymbol);
- Buf *promise_symbol_name = node->data.suspend.promise_symbol->data.symbol_expr.symbol;
Scope *child_scope;
- if (!buf_eql_str(promise_symbol_name, "_")) {
- VariableTableEntry *promise_var = ir_create_var(irb, node, parent_scope, promise_symbol_name,
- true, true, false, const_bool_false);
- ir_build_var_decl(irb, parent_scope, node, promise_var, nullptr, nullptr, irb->exec->coro_handle);
- child_scope = promise_var->child_scope;
- } else {
- child_scope = parent_scope;
- }
+ ScopeSuspend *suspend_scope = create_suspend_scope(node, parent_scope);
+ suspend_scope->resume_block = resume_block;
+ child_scope = &suspend_scope->base;
IrInstruction *save_token = ir_build_coro_save(irb, child_scope, node, irb->exec->coro_handle);
ir_gen_node(irb, node->data.suspend.block, child_scope);
- suspend_code = ir_build_coro_suspend(irb, parent_scope, node, save_token, const_bool_false);
+ suspend_code = ir_mark_gen(ir_build_coro_suspend(irb, parent_scope, node, save_token, const_bool_false));
}
- IrBasicBlock *cleanup_block = ir_create_basic_block(irb, parent_scope, "SuspendCleanup");
- IrBasicBlock *resume_block = ir_create_basic_block(irb, parent_scope, "SuspendResume");
-
IrInstructionSwitchBrCase *cases = allocate<IrInstructionSwitchBrCase>(2);
- cases[0].value = ir_build_const_u8(irb, parent_scope, node, 0);
+ cases[0].value = ir_mark_gen(ir_build_const_u8(irb, parent_scope, node, 0));
cases[0].block = resume_block;
- cases[1].value = ir_build_const_u8(irb, parent_scope, node, 1);
- cases[1].block = cleanup_block;
- ir_build_switch_br(irb, parent_scope, node, suspend_code, irb->exec->coro_suspend_block,
- 2, cases, const_bool_false);
+ cases[1].value = ir_mark_gen(ir_build_const_u8(irb, parent_scope, node, 1));
+ cases[1].block = canceled_block;
+ ir_mark_gen(ir_build_switch_br(irb, parent_scope, node, suspend_code, irb->exec->coro_suspend_block,
+ 2, cases, const_bool_false, nullptr));
ir_set_cursor_at_end_and_append_block(irb, cleanup_block);
+ IrBasicBlock **incoming_blocks = allocate<IrBasicBlock *>(2);
+ IrInstruction **incoming_values = allocate<IrInstruction *>(2);
+ incoming_blocks[0] = post_canceled_block;
+ incoming_values[0] = const_bool_true;
+ incoming_blocks[1] = post_cancel_awaiter_block;
+ incoming_values[1] = const_bool_false;
+ IrInstruction *destroy_ourselves = ir_build_phi(irb, parent_scope, node, 2, incoming_blocks, incoming_values);
ir_gen_defers_for_block(irb, parent_scope, outer_scope, true);
- ir_mark_gen(ir_build_br(irb, parent_scope, node, irb->exec->coro_final_cleanup_block, const_bool_false));
+ ir_mark_gen(ir_build_cond_br(irb, parent_scope, node, destroy_ourselves, irb->exec->coro_final_cleanup_block, irb->exec->coro_early_final, const_bool_false));
ir_set_cursor_at_end_and_append_block(irb, resume_block);
- return ir_build_const_void(irb, parent_scope, node);
+ return ir_mark_gen(ir_build_const_void(irb, parent_scope, node));
}
static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scope,
@@ -6296,13 +7172,11 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
case NodeTypeSymbol:
return ir_gen_symbol(irb, scope, node, lval);
case NodeTypeFnCallExpr:
- return ir_lval_wrap(irb, scope, ir_gen_fn_call(irb, scope, node), lval);
+ return ir_gen_fn_call(irb, scope, node, lval);
case NodeTypeIfBoolExpr:
return ir_lval_wrap(irb, scope, ir_gen_if_bool_expr(irb, scope, node), lval);
case NodeTypePrefixOpExpr:
return ir_gen_prefix_op_expr(irb, scope, node, lval);
- case NodeTypeAddrOfExpr:
- return ir_lval_wrap(irb, scope, ir_gen_address_of(irb, scope, node), lval);
case NodeTypeContainerInitExpr:
return ir_lval_wrap(irb, scope, ir_gen_container_init_expr(irb, scope, node), lval);
case NodeTypeVariableDeclaration:
@@ -6316,13 +7190,44 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
case NodeTypeReturnExpr:
return ir_gen_return(irb, scope, node, lval);
case NodeTypeFieldAccessExpr:
- return ir_gen_field_access(irb, scope, node, lval);
+ {
+ IrInstruction *ptr_instruction = ir_gen_field_access(irb, scope, node);
+ if (ptr_instruction == irb->codegen->invalid_instruction)
+ return ptr_instruction;
+ if (lval == LValPtr)
+ return ptr_instruction;
+
+ return ir_build_load_ptr(irb, scope, node, ptr_instruction);
+ }
+ case NodeTypePtrDeref: {
+ AstNode *expr_node = node->data.ptr_deref_expr.target;
+ IrInstruction *value = ir_gen_node_extra(irb, expr_node, scope, lval);
+ if (value == irb->codegen->invalid_instruction)
+ return value;
+
+ return ir_build_un_op(irb, scope, node, IrUnOpDereference, value);
+ }
+ case NodeTypeUnwrapOptional: {
+ AstNode *expr_node = node->data.unwrap_optional.expr;
+
+ IrInstruction *maybe_ptr = ir_gen_node_extra(irb, expr_node, scope, LValPtr);
+ if (maybe_ptr == irb->codegen->invalid_instruction)
+ return irb->codegen->invalid_instruction;
+
+ IrInstruction *unwrapped_ptr = ir_build_unwrap_maybe(irb, scope, node, maybe_ptr, true);
+ if (lval == LValPtr)
+ return unwrapped_ptr;
+
+ return ir_build_load_ptr(irb, scope, node, unwrapped_ptr);
+ }
case NodeTypeThisLiteral:
return ir_lval_wrap(irb, scope, ir_gen_this_literal(irb, scope, node), lval);
case NodeTypeBoolLiteral:
return ir_lval_wrap(irb, scope, ir_gen_bool_literal(irb, scope, node), lval);
case NodeTypeArrayType:
return ir_lval_wrap(irb, scope, ir_gen_array_type(irb, scope, node), lval);
+ case NodeTypePointerType:
+ return ir_lval_wrap(irb, scope, ir_gen_pointer_type(irb, scope, node), lval);
case NodeTypePromiseType:
return ir_lval_wrap(irb, scope, ir_gen_promise_type(irb, scope, node), lval);
case NodeTypeStringLiteral:
@@ -6380,7 +7285,7 @@ static IrInstruction *ir_gen_node_extra(IrBuilder *irb, AstNode *node, Scope *sc
}
static IrInstruction *ir_gen_node(IrBuilder *irb, AstNode *node, Scope *scope) {
- return ir_gen_node_extra(irb, node, scope, LVAL_NONE);
+ return ir_gen_node_extra(irb, node, scope, LValNone);
}
static void invalidate_exec(IrExecutable *exec) {
@@ -6434,15 +7339,14 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
IrInstruction *coro_frame_type_value = ir_build_const_type(irb, coro_scope, node, coro_frame_type);
// TODO mark this var decl as "no safety" e.g. disable initializing the undef value to 0xaa
ir_build_var_decl(irb, coro_scope, node, promise_var, coro_frame_type_value, nullptr, undef);
- coro_promise_ptr = ir_build_var_ptr(irb, coro_scope, node, promise_var, false, false);
+ coro_promise_ptr = ir_build_var_ptr(irb, coro_scope, node, promise_var);
VariableTableEntry *await_handle_var = ir_create_var(irb, node, coro_scope, nullptr, false, false, true, const_bool_false);
IrInstruction *null_value = ir_build_const_null(irb, coro_scope, node);
IrInstruction *await_handle_type_val = ir_build_const_type(irb, coro_scope, node,
- get_maybe_type(irb->codegen, irb->codegen->builtin_types.entry_promise));
+ get_optional_type(irb->codegen, irb->codegen->builtin_types.entry_promise));
ir_build_var_decl(irb, coro_scope, node, await_handle_var, await_handle_type_val, nullptr, null_value);
- irb->exec->await_handle_var_ptr = ir_build_var_ptr(irb, coro_scope, node,
- await_handle_var, false, false);
+ irb->exec->await_handle_var_ptr = ir_build_var_ptr(irb, coro_scope, node, await_handle_var);
u8_ptr_type = ir_build_const_type(irb, coro_scope, node,
get_pointer_to_type(irb->codegen, irb->codegen->builtin_types.entry_u8, false));
@@ -6473,10 +7377,11 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
IrInstruction *coro_mem_ptr = ir_build_ptr_cast(irb, coro_scope, node, u8_ptr_type, maybe_coro_mem_ptr);
irb->exec->coro_handle = ir_build_coro_begin(irb, coro_scope, node, coro_id, coro_mem_ptr);
- Buf *awaiter_handle_field_name = buf_create_from_str(AWAITER_HANDLE_FIELD_NAME);
- irb->exec->coro_awaiter_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
- awaiter_handle_field_name);
- ir_build_store_ptr(irb, scope, node, irb->exec->coro_awaiter_field_ptr, null_value);
+ Buf *atomic_state_field_name = buf_create_from_str(ATOMIC_STATE_FIELD_NAME);
+ irb->exec->atomic_state_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr,
+ atomic_state_field_name);
+ IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
+ ir_build_store_ptr(irb, scope, node, irb->exec->atomic_state_field_ptr, zero);
Buf *result_field_name = buf_create_from_str(RESULT_FIELD_NAME);
irb->exec->coro_result_field_ptr = ir_build_field_ptr(irb, scope, node, coro_promise_ptr, result_field_name);
result_ptr_field_name = buf_create_from_str(RESULT_PTR_FIELD_NAME);
@@ -6494,7 +7399,6 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
// coordinate with builtin.zig
Buf *index_name = buf_create_from_str("index");
IrInstruction *index_ptr = ir_build_field_ptr(irb, scope, node, err_ret_trace_ptr, index_name);
- IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
ir_build_store_ptr(irb, scope, node, index_ptr, zero);
Buf *instruction_addresses_name = buf_create_from_str("instruction_addresses");
@@ -6511,7 +7415,7 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
irb->exec->coro_final_cleanup_block = ir_create_basic_block(irb, scope, "FinalCleanup");
}
- IrInstruction *result = ir_gen_node_extra(irb, node, scope, LVAL_NONE);
+ IrInstruction *result = ir_gen_node_extra(irb, node, scope, LValNone);
assert(result);
if (irb->exec->invalid)
return false;
@@ -6533,7 +7437,7 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
cases[0].block = invalid_resume_block;
cases[1].value = ir_build_const_u8(irb, scope, node, 1);
cases[1].block = irb->exec->coro_final_cleanup_block;
- ir_build_switch_br(irb, scope, node, suspend_code, irb->exec->coro_suspend_block, 2, cases, const_bool_false);
+ ir_build_switch_br(irb, scope, node, suspend_code, irb->exec->coro_suspend_block, 2, cases, const_bool_false, nullptr);
ir_set_cursor_at_end_and_append_block(irb, irb->exec->coro_suspend_block);
ir_build_coro_end(irb, scope, node);
@@ -6544,9 +7448,13 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
ir_set_cursor_at_end_and_append_block(irb, irb->exec->coro_normal_final);
if (type_has_bits(return_type)) {
+ IrInstruction *u8_ptr_type_unknown_len = ir_build_const_type(irb, scope, node,
+ get_pointer_to_type_extra(irb->codegen, irb->codegen->builtin_types.entry_u8,
+ false, false, PtrLenUnknown, get_abi_alignment(irb->codegen, irb->codegen->builtin_types.entry_u8),
+ 0, 0));
IrInstruction *result_ptr = ir_build_load_ptr(irb, scope, node, irb->exec->coro_result_ptr_field_ptr);
- IrInstruction *result_ptr_as_u8_ptr = ir_build_ptr_cast(irb, scope, node, u8_ptr_type, result_ptr);
- IrInstruction *return_value_ptr_as_u8_ptr = ir_build_ptr_cast(irb, scope, node, u8_ptr_type,
+ IrInstruction *result_ptr_as_u8_ptr = ir_build_ptr_cast(irb, scope, node, u8_ptr_type_unknown_len, result_ptr);
+ IrInstruction *return_value_ptr_as_u8_ptr = ir_build_ptr_cast(irb, scope, node, u8_ptr_type_unknown_len,
irb->exec->coro_result_field_ptr);
IrInstruction *return_type_inst = ir_build_const_type(irb, scope, node,
fn_entry->type_entry->data.fn.fn_type_id.return_type);
@@ -6559,6 +7467,12 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
IrInstruction *dest_err_ret_trace_ptr = ir_build_load_ptr(irb, scope, node, err_ret_trace_ptr_field_ptr);
ir_build_merge_err_ret_traces(irb, scope, node, coro_promise_ptr, err_ret_trace_ptr, dest_err_ret_trace_ptr);
}
+ // Before we destroy the coroutine frame, we need to load the target promise into
+ // a register or local variable which does not get spilled into the frame,
+ // otherwise llvm tries to access memory inside the destroyed frame.
+ IrInstruction *unwrapped_await_handle_ptr = ir_build_unwrap_maybe(irb, scope, node,
+ irb->exec->await_handle_var_ptr, false);
+ IrInstruction *await_handle_in_block = ir_build_load_ptr(irb, scope, node, unwrapped_await_handle_ptr);
ir_build_br(irb, scope, node, check_free_block, const_bool_false);
ir_set_cursor_at_end_and_append_block(irb, irb->exec->coro_final_cleanup_block);
@@ -6573,6 +7487,14 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
incoming_values[1] = const_bool_true;
IrInstruction *resume_awaiter = ir_build_phi(irb, scope, node, 2, incoming_blocks, incoming_values);
+ IrBasicBlock **merge_incoming_blocks = allocate<IrBasicBlock *>(2);
+ IrInstruction **merge_incoming_values = allocate<IrInstruction *>(2);
+ merge_incoming_blocks[0] = irb->exec->coro_final_cleanup_block;
+ merge_incoming_values[0] = ir_build_const_undefined(irb, scope, node);
+ merge_incoming_blocks[1] = irb->exec->coro_normal_final;
+ merge_incoming_values[1] = await_handle_in_block;
+ IrInstruction *awaiter_handle = ir_build_phi(irb, scope, node, 2, merge_incoming_blocks, merge_incoming_values);
+
Buf *free_field_name = buf_create_from_str(ASYNC_FREE_FIELD_NAME);
IrInstruction *implicit_allocator_ptr = ir_build_get_implicit_allocator(irb, scope, node,
ImplicitAllocatorIdLocalVar);
@@ -6580,25 +7502,26 @@ bool ir_gen(CodeGen *codegen, AstNode *node, Scope *scope, IrExecutable *ir_exec
IrInstruction *free_fn = ir_build_load_ptr(irb, scope, node, free_fn_ptr);
IrInstruction *zero = ir_build_const_usize(irb, scope, node, 0);
IrInstruction *coro_mem_ptr_maybe = ir_build_coro_free(irb, scope, node, coro_id, irb->exec->coro_handle);
- IrInstruction *coro_mem_ptr = ir_build_ptr_cast(irb, scope, node, u8_ptr_type, coro_mem_ptr_maybe);
+ IrInstruction *u8_ptr_type_unknown_len = ir_build_const_type(irb, scope, node,
+ get_pointer_to_type_extra(irb->codegen, irb->codegen->builtin_types.entry_u8,
+ false, false, PtrLenUnknown, get_abi_alignment(irb->codegen, irb->codegen->builtin_types.entry_u8),
+ 0, 0));
+ IrInstruction *coro_mem_ptr = ir_build_ptr_cast(irb, scope, node, u8_ptr_type_unknown_len, coro_mem_ptr_maybe);
IrInstruction *coro_mem_ptr_ref = ir_build_ref(irb, scope, node, coro_mem_ptr, true, false);
- IrInstruction *coro_size_ptr = ir_build_var_ptr(irb, scope, node, coro_size_var, true, false);
+ IrInstruction *coro_size_ptr = ir_build_var_ptr(irb, scope, node, coro_size_var);
IrInstruction *coro_size = ir_build_load_ptr(irb, scope, node, coro_size_ptr);
IrInstruction *mem_slice = ir_build_slice(irb, scope, node, coro_mem_ptr_ref, zero, coro_size, false);
size_t arg_count = 2;
IrInstruction **args = allocate<IrInstruction *>(arg_count);
args[0] = implicit_allocator_ptr; // self
args[1] = mem_slice; // old_mem
- ir_build_call(irb, scope, node, nullptr, free_fn, arg_count, args, false, FnInlineAuto, false, nullptr);
+ ir_build_call(irb, scope, node, nullptr, free_fn, arg_count, args, false, FnInlineAuto, false, nullptr, nullptr);
IrBasicBlock *resume_block = ir_create_basic_block(irb, scope, "Resume");
ir_build_cond_br(irb, scope, node, resume_awaiter, resume_block, irb->exec->coro_suspend_block, const_bool_false);
ir_set_cursor_at_end_and_append_block(irb, resume_block);
- IrInstruction *unwrapped_await_handle_ptr = ir_build_unwrap_maybe(irb, scope, node,
- irb->exec->await_handle_var_ptr, false);
- IrInstruction *awaiter_handle = ir_build_load_ptr(irb, scope, node, unwrapped_await_handle_ptr);
- ir_build_coro_resume(irb, scope, node, awaiter_handle);
+ ir_gen_resume_target(irb, scope, node, awaiter_handle);
ir_build_br(irb, scope, node, irb->exec->coro_suspend_block, const_bool_false);
}
@@ -6671,17 +7594,22 @@ static bool ir_emit_global_runtime_side_effect(IrAnalyze *ira, IrInstruction *so
}
static bool const_val_fits_in_num_lit(ConstExprValue *const_val, TypeTableEntry *num_lit_type) {
- return ((num_lit_type->id == TypeTableEntryIdNumLitFloat &&
- (const_val->type->id == TypeTableEntryIdFloat || const_val->type->id == TypeTableEntryIdNumLitFloat)) ||
- (num_lit_type->id == TypeTableEntryIdNumLitInt &&
- (const_val->type->id == TypeTableEntryIdInt || const_val->type->id == TypeTableEntryIdNumLitInt)));
+ return ((num_lit_type->id == TypeTableEntryIdComptimeFloat &&
+ (const_val->type->id == TypeTableEntryIdFloat || const_val->type->id == TypeTableEntryIdComptimeFloat)) ||
+ (num_lit_type->id == TypeTableEntryIdComptimeInt &&
+ (const_val->type->id == TypeTableEntryIdInt || const_val->type->id == TypeTableEntryIdComptimeInt)));
}
static bool float_has_fraction(ConstExprValue *const_val) {
- if (const_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (const_val->type->id == TypeTableEntryIdComptimeFloat) {
return bigfloat_has_fraction(&const_val->data.x_bigfloat);
} else if (const_val->type->id == TypeTableEntryIdFloat) {
switch (const_val->type->data.floating.bit_count) {
+ case 16:
+ {
+ float16_t floored = f16_roundToInt(const_val->data.x_f16, softfloat_round_minMag, false);
+ return !f16_eq(floored, const_val->data.x_f16);
+ }
case 32:
return floorf(const_val->data.x_f32) != const_val->data.x_f32;
case 64:
@@ -6701,10 +7629,13 @@ static bool float_has_fraction(ConstExprValue *const_val) {
}
static void float_append_buf(Buf *buf, ConstExprValue *const_val) {
- if (const_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (const_val->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_append_buf(buf, &const_val->data.x_bigfloat);
} else if (const_val->type->id == TypeTableEntryIdFloat) {
switch (const_val->type->data.floating.bit_count) {
+ case 16:
+ buf_appendf(buf, "%f", zig_f16_to_double(const_val->data.x_f16));
+ break;
case 32:
buf_appendf(buf, "%f", const_val->data.x_f32);
break;
@@ -6736,10 +7667,21 @@ static void float_append_buf(Buf *buf, ConstExprValue *const_val) {
}
static void float_init_bigint(BigInt *bigint, ConstExprValue *const_val) {
- if (const_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (const_val->type->id == TypeTableEntryIdComptimeFloat) {
bigint_init_bigfloat(bigint, &const_val->data.x_bigfloat);
} else if (const_val->type->id == TypeTableEntryIdFloat) {
switch (const_val->type->data.floating.bit_count) {
+ case 16:
+ {
+ double x = zig_f16_to_double(const_val->data.x_f16);
+ if (x >= 0) {
+ bigint_init_unsigned(bigint, (uint64_t)x);
+ } else {
+ bigint_init_unsigned(bigint, (uint64_t)-x);
+ bigint->is_negative = true;
+ }
+ break;
+ }
case 32:
if (const_val->data.x_f32 >= 0) {
bigint_init_unsigned(bigint, (uint64_t)(const_val->data.x_f32));
@@ -6772,10 +7714,13 @@ static void float_init_bigint(BigInt *bigint, ConstExprValue *const_val) {
}
static void float_init_bigfloat(ConstExprValue *dest_val, BigFloat *bigfloat) {
- if (dest_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (dest_val->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_init_bigfloat(&dest_val->data.x_bigfloat, bigfloat);
} else if (dest_val->type->id == TypeTableEntryIdFloat) {
switch (dest_val->type->data.floating.bit_count) {
+ case 16:
+ dest_val->data.x_f16 = bigfloat_to_f16(bigfloat);
+ break;
case 32:
dest_val->data.x_f32 = bigfloat_to_f32(bigfloat);
break;
@@ -6793,11 +7738,39 @@ static void float_init_bigfloat(ConstExprValue *dest_val, BigFloat *bigfloat) {
}
}
+static void float_init_f16(ConstExprValue *dest_val, float16_t x) {
+ if (dest_val->type->id == TypeTableEntryIdComptimeFloat) {
+ bigfloat_init_16(&dest_val->data.x_bigfloat, x);
+ } else if (dest_val->type->id == TypeTableEntryIdFloat) {
+ switch (dest_val->type->data.floating.bit_count) {
+ case 16:
+ dest_val->data.x_f16 = x;
+ break;
+ case 32:
+ dest_val->data.x_f32 = zig_f16_to_double(x);
+ break;
+ case 64:
+ dest_val->data.x_f64 = zig_f16_to_double(x);
+ break;
+ case 128:
+ f16_to_f128M(x, &dest_val->data.x_f128);
+ break;
+ default:
+ zig_unreachable();
+ }
+ } else {
+ zig_unreachable();
+ }
+}
+
static void float_init_f32(ConstExprValue *dest_val, float x) {
- if (dest_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (dest_val->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_init_32(&dest_val->data.x_bigfloat, x);
} else if (dest_val->type->id == TypeTableEntryIdFloat) {
switch (dest_val->type->data.floating.bit_count) {
+ case 16:
+ dest_val->data.x_f16 = zig_double_to_f16(x);
+ break;
case 32:
dest_val->data.x_f32 = x;
break;
@@ -6820,10 +7793,13 @@ static void float_init_f32(ConstExprValue *dest_val, float x) {
}
static void float_init_f64(ConstExprValue *dest_val, double x) {
- if (dest_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (dest_val->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_init_64(&dest_val->data.x_bigfloat, x);
} else if (dest_val->type->id == TypeTableEntryIdFloat) {
switch (dest_val->type->data.floating.bit_count) {
+ case 16:
+ dest_val->data.x_f16 = zig_double_to_f16(x);
+ break;
case 32:
dest_val->data.x_f32 = x;
break;
@@ -6846,10 +7822,13 @@ static void float_init_f64(ConstExprValue *dest_val, double x) {
}
static void float_init_f128(ConstExprValue *dest_val, float128_t x) {
- if (dest_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (dest_val->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_init_128(&dest_val->data.x_bigfloat, x);
} else if (dest_val->type->id == TypeTableEntryIdFloat) {
switch (dest_val->type->data.floating.bit_count) {
+ case 16:
+ dest_val->data.x_f16 = f128M_to_f16(&x);
+ break;
case 32:
{
float32_t f32_val = f128M_to_f32(&x);
@@ -6876,10 +7855,13 @@ static void float_init_f128(ConstExprValue *dest_val, float128_t x) {
}
static void float_init_float(ConstExprValue *dest_val, ConstExprValue *src_val) {
- if (src_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (src_val->type->id == TypeTableEntryIdComptimeFloat) {
float_init_bigfloat(dest_val, &src_val->data.x_bigfloat);
} else if (src_val->type->id == TypeTableEntryIdFloat) {
switch (src_val->type->data.floating.bit_count) {
+ case 16:
+ float_init_f16(dest_val, src_val->data.x_f16);
+ break;
case 32:
float_init_f32(dest_val, src_val->data.x_f32);
break;
@@ -6899,10 +7881,18 @@ static void float_init_float(ConstExprValue *dest_val, ConstExprValue *src_val)
static Cmp float_cmp(ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
return bigfloat_cmp(&op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ if (f16_lt(op1->data.x_f16, op2->data.x_f16)) {
+ return CmpLT;
+ } else if (f16_lt(op2->data.x_f16, op1->data.x_f16)) {
+ return CmpGT;
+ } else {
+ return CmpEQ;
+ }
case 32:
if (op1->data.x_f32 > op2->data.x_f32) {
return CmpGT;
@@ -6936,10 +7926,21 @@ static Cmp float_cmp(ConstExprValue *op1, ConstExprValue *op2) {
}
static Cmp float_cmp_zero(ConstExprValue *op) {
- if (op->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op->type->id == TypeTableEntryIdComptimeFloat) {
return bigfloat_cmp_zero(&op->data.x_bigfloat);
} else if (op->type->id == TypeTableEntryIdFloat) {
switch (op->type->data.floating.bit_count) {
+ case 16:
+ {
+ const float16_t zero = zig_double_to_f16(0);
+ if (f16_lt(op->data.x_f16, zero)) {
+ return CmpLT;
+ } else if (f16_lt(zero, op->data.x_f16)) {
+ return CmpGT;
+ } else {
+ return CmpEQ;
+ }
+ }
case 32:
if (op->data.x_f32 < 0.0) {
return CmpLT;
@@ -6977,10 +7978,13 @@ static Cmp float_cmp_zero(ConstExprValue *op) {
static void float_add(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_add(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_add(op1->data.x_f16, op2->data.x_f16);
+ return;
case 32:
out_val->data.x_f32 = op1->data.x_f32 + op2->data.x_f32;
return;
@@ -7001,10 +8005,13 @@ static void float_add(ConstExprValue *out_val, ConstExprValue *op1, ConstExprVal
static void float_sub(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_sub(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_sub(op1->data.x_f16, op2->data.x_f16);
+ return;
case 32:
out_val->data.x_f32 = op1->data.x_f32 - op2->data.x_f32;
return;
@@ -7025,10 +8032,13 @@ static void float_sub(ConstExprValue *out_val, ConstExprValue *op1, ConstExprVal
static void float_mul(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_mul(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_mul(op1->data.x_f16, op2->data.x_f16);
+ return;
case 32:
out_val->data.x_f32 = op1->data.x_f32 * op2->data.x_f32;
return;
@@ -7049,10 +8059,13 @@ static void float_mul(ConstExprValue *out_val, ConstExprValue *op1, ConstExprVal
static void float_div(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_div(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_div(op1->data.x_f16, op2->data.x_f16);
+ return;
case 32:
out_val->data.x_f32 = op1->data.x_f32 / op2->data.x_f32;
return;
@@ -7073,25 +8086,19 @@ static void float_div(ConstExprValue *out_val, ConstExprValue *op1, ConstExprVal
static void float_div_trunc(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_div_trunc(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_div(op1->data.x_f16, op2->data.x_f16);
+ out_val->data.x_f16 = f16_roundToInt(out_val->data.x_f16, softfloat_round_minMag, false);
+ return;
case 32:
- out_val->data.x_f32 = op1->data.x_f32 / op2->data.x_f32;
- if (out_val->data.x_f32 >= 0.0) {
- out_val->data.x_f32 = floorf(out_val->data.x_f32);
- } else {
- out_val->data.x_f32 = ceilf(out_val->data.x_f32);
- }
+ out_val->data.x_f32 = truncf(op1->data.x_f32 / op2->data.x_f32);
return;
case 64:
- out_val->data.x_f64 = op1->data.x_f64 / op2->data.x_f64;
- if (out_val->data.x_f64 >= 0.0) {
- out_val->data.x_f64 = floor(out_val->data.x_f64);
- } else {
- out_val->data.x_f64 = ceil(out_val->data.x_f64);
- }
+ out_val->data.x_f64 = trunc(op1->data.x_f64 / op2->data.x_f64);
return;
case 128:
f128M_div(&op1->data.x_f128, &op2->data.x_f128, &out_val->data.x_f128);
@@ -7108,10 +8115,14 @@ static void float_div_trunc(ConstExprValue *out_val, ConstExprValue *op1, ConstE
static void float_div_floor(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_div_floor(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_div(op1->data.x_f16, op2->data.x_f16);
+ out_val->data.x_f16 = f16_roundToInt(out_val->data.x_f16, softfloat_round_min, false);
+ return;
case 32:
out_val->data.x_f32 = floorf(op1->data.x_f32 / op2->data.x_f32);
return;
@@ -7133,10 +8144,13 @@ static void float_div_floor(ConstExprValue *out_val, ConstExprValue *op1, ConstE
static void float_rem(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_rem(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_rem(op1->data.x_f16, op2->data.x_f16);
+ return;
case 32:
out_val->data.x_f32 = fmodf(op1->data.x_f32, op2->data.x_f32);
return;
@@ -7154,13 +8168,34 @@ static void float_rem(ConstExprValue *out_val, ConstExprValue *op1, ConstExprVal
}
}
+// c = a - b * trunc(a / b)
+static float16_t zig_f16_mod(float16_t a, float16_t b) {
+ float16_t c;
+ c = f16_div(a, b);
+ c = f16_roundToInt(c, softfloat_round_min, true);
+ c = f16_mul(b, c);
+ c = f16_sub(a, c);
+ return c;
+}
+
+// c = a - b * trunc(a / b)
+static void zig_f128M_mod(const float128_t* a, const float128_t* b, float128_t* c) {
+ f128M_div(a, b, c);
+ f128M_roundToInt(c, softfloat_round_min, true, c);
+ f128M_mul(b, c, c);
+ f128M_sub(a, c, c);
+}
+
static void float_mod(ConstExprValue *out_val, ConstExprValue *op1, ConstExprValue *op2) {
assert(op1->type == op2->type);
out_val->type = op1->type;
- if (op1->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op1->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_mod(&out_val->data.x_bigfloat, &op1->data.x_bigfloat, &op2->data.x_bigfloat);
} else if (op1->type->id == TypeTableEntryIdFloat) {
switch (op1->type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = zig_f16_mod(op1->data.x_f16, op2->data.x_f16);
+ return;
case 32:
out_val->data.x_f32 = fmodf(fmodf(op1->data.x_f32, op2->data.x_f32) + op2->data.x_f32, op2->data.x_f32);
return;
@@ -7168,9 +8203,7 @@ static void float_mod(ConstExprValue *out_val, ConstExprValue *op1, ConstExprVal
out_val->data.x_f64 = fmod(fmod(op1->data.x_f64, op2->data.x_f64) + op2->data.x_f64, op2->data.x_f64);
return;
case 128:
- f128M_rem(&op1->data.x_f128, &op2->data.x_f128, &out_val->data.x_f128);
- f128M_add(&out_val->data.x_f128, &op2->data.x_f128, &out_val->data.x_f128);
- f128M_rem(&out_val->data.x_f128, &op2->data.x_f128, &out_val->data.x_f128);
+ zig_f128M_mod(&op1->data.x_f128, &op2->data.x_f128, &out_val->data.x_f128);
return;
default:
zig_unreachable();
@@ -7182,10 +8215,16 @@ static void float_mod(ConstExprValue *out_val, ConstExprValue *op1, ConstExprVal
static void float_negate(ConstExprValue *out_val, ConstExprValue *op) {
out_val->type = op->type;
- if (op->type->id == TypeTableEntryIdNumLitFloat) {
+ if (op->type->id == TypeTableEntryIdComptimeFloat) {
bigfloat_negate(&out_val->data.x_bigfloat, &op->data.x_bigfloat);
} else if (op->type->id == TypeTableEntryIdFloat) {
switch (op->type->data.floating.bit_count) {
+ case 16:
+ {
+ const float16_t zero = zig_double_to_f16(0);
+ out_val->data.x_f16 = f16_sub(zero, op->data.x_f16);
+ return;
+ }
case 32:
out_val->data.x_f32 = -op->data.x_f32;
return;
@@ -7208,6 +8247,9 @@ static void float_negate(ConstExprValue *out_val, ConstExprValue *op) {
void float_write_ieee597(ConstExprValue *op, uint8_t *buf, bool is_big_endian) {
if (op->type->id == TypeTableEntryIdFloat) {
switch (op->type->data.floating.bit_count) {
+ case 16:
+ memcpy(buf, &op->data.x_f16, 2); // TODO wrong when compiler is big endian
+ return;
case 32:
memcpy(buf, &op->data.x_f32, 4); // TODO wrong when compiler is big endian
return;
@@ -7228,6 +8270,9 @@ void float_write_ieee597(ConstExprValue *op, uint8_t *buf, bool is_big_endian) {
void float_read_ieee597(ConstExprValue *val, uint8_t *buf, bool is_big_endian) {
if (val->type->id == TypeTableEntryIdFloat) {
switch (val->type->data.floating.bit_count) {
+ case 16:
+ memcpy(&val->data.x_f16, buf, 2); // TODO wrong when compiler is big endian
+ return;
case 32:
memcpy(&val->data.x_f32, buf, 4); // TODO wrong when compiler is big endian
return;
@@ -7256,9 +8301,9 @@ static bool ir_num_lit_fits_in_other_type(IrAnalyze *ira, IrInstruction *instruc
assert(const_val->special != ConstValSpecialRuntime);
bool const_val_is_int = (const_val->type->id == TypeTableEntryIdInt ||
- const_val->type->id == TypeTableEntryIdNumLitInt);
+ const_val->type->id == TypeTableEntryIdComptimeInt);
bool const_val_is_float = (const_val->type->id == TypeTableEntryIdFloat ||
- const_val->type->id == TypeTableEntryIdNumLitFloat);
+ const_val->type->id == TypeTableEntryIdComptimeFloat);
if (other_type->id == TypeTableEntryIdFloat) {
return true;
} else if (other_type->id == TypeTableEntryIdInt && const_val_is_int) {
@@ -7278,7 +8323,7 @@ static bool ir_num_lit_fits_in_other_type(IrAnalyze *ira, IrInstruction *instruc
}
} else if (const_val_fits_in_num_lit(const_val, other_type)) {
return true;
- } else if (other_type->id == TypeTableEntryIdMaybe) {
+ } else if (other_type->id == TypeTableEntryIdOptional) {
TypeTableEntry *child_type = other_type->data.maybe.child_type;
if (const_val_fits_in_num_lit(const_val, child_type)) {
return true;
@@ -7302,7 +8347,7 @@ static bool ir_num_lit_fits_in_other_type(IrAnalyze *ira, IrInstruction *instruc
return true;
}
}
- if (explicit_cast && (other_type->id == TypeTableEntryIdInt || other_type->id == TypeTableEntryIdNumLitInt) &&
+ if (explicit_cast && (other_type->id == TypeTableEntryIdInt || other_type->id == TypeTableEntryIdComptimeInt) &&
const_val_is_float)
{
if (float_has_fraction(const_val)) {
@@ -7315,7 +8360,7 @@ static bool ir_num_lit_fits_in_other_type(IrAnalyze *ira, IrInstruction *instruc
buf_ptr(&other_type->name)));
return false;
} else {
- if (other_type->id == TypeTableEntryIdNumLitInt) {
+ if (other_type->id == TypeTableEntryIdComptimeInt) {
return true;
} else {
BigInt bigint;
@@ -7356,38 +8401,16 @@ static bool slice_is_const(TypeTableEntry *type) {
return type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.is_const;
}
-static bool resolve_inferred_error_set(IrAnalyze *ira, TypeTableEntry *err_set_type, AstNode *source_node) {
- assert(err_set_type->id == TypeTableEntryIdErrorSet);
- FnTableEntry *infer_fn = err_set_type->data.error_set.infer_fn;
- if (infer_fn != nullptr) {
- if (infer_fn->anal_state == FnAnalStateInvalid) {
- return false;
- } else if (infer_fn->anal_state == FnAnalStateReady) {
- analyze_fn_body(ira->codegen, infer_fn);
- if (err_set_type->data.error_set.infer_fn != nullptr) {
- assert(ira->codegen->errors.length != 0);
- return false;
- }
- } else {
- ir_add_error_node(ira, source_node,
- buf_sprintf("cannot resolve inferred error set '%s': function '%s' not fully analyzed yet",
- buf_ptr(&err_set_type->name), buf_ptr(&err_set_type->data.error_set.infer_fn->symbol_name)));
- return false;
- }
- }
- return true;
-}
-
static TypeTableEntry *get_error_set_intersection(IrAnalyze *ira, TypeTableEntry *set1, TypeTableEntry *set2,
AstNode *source_node)
{
assert(set1->id == TypeTableEntryIdErrorSet);
assert(set2->id == TypeTableEntryIdErrorSet);
- if (!resolve_inferred_error_set(ira, set1, source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, set1, source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
- if (!resolve_inferred_error_set(ira, set2, source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, set2, source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
if (type_is_global_error_set(set1)) {
@@ -7433,89 +8456,122 @@ static TypeTableEntry *get_error_set_intersection(IrAnalyze *ira, TypeTableEntry
}
-static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry *expected_type,
- TypeTableEntry *actual_type, AstNode *source_node)
+static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry *wanted_type,
+ TypeTableEntry *actual_type, AstNode *source_node, bool wanted_is_mutable)
{
CodeGen *g = ira->codegen;
ConstCastOnly result = {};
result.id = ConstCastResultIdOk;
- if (expected_type == actual_type)
+ if (wanted_type == actual_type)
return result;
- // pointer const
- if (expected_type->id == TypeTableEntryIdPointer &&
- actual_type->id == TypeTableEntryIdPointer &&
- (!actual_type->data.pointer.is_const || expected_type->data.pointer.is_const) &&
- (!actual_type->data.pointer.is_volatile || expected_type->data.pointer.is_volatile) &&
- actual_type->data.pointer.bit_offset == expected_type->data.pointer.bit_offset &&
- actual_type->data.pointer.unaligned_bit_count == expected_type->data.pointer.unaligned_bit_count &&
- actual_type->data.pointer.alignment >= expected_type->data.pointer.alignment)
+ // * and [*] can do a const-cast-only to ?* and ?[*], respectively
+ // but not if there is a mutable parent pointer
+ // and not if the pointer is zero bits
+ if (!wanted_is_mutable && wanted_type->id == TypeTableEntryIdOptional &&
+ wanted_type->data.maybe.child_type->id == TypeTableEntryIdPointer &&
+ actual_type->id == TypeTableEntryIdPointer && type_has_bits(actual_type))
{
- ConstCastOnly child = types_match_const_cast_only(ira, expected_type->data.pointer.child_type, actual_type->data.pointer.child_type, source_node);
+ ConstCastOnly child = types_match_const_cast_only(ira,
+ wanted_type->data.maybe.child_type, actual_type, source_node, wanted_is_mutable);
if (child.id != ConstCastResultIdOk) {
- result.id = ConstCastResultIdPointerChild;
- result.data.pointer_child = allocate_nonzero<ConstCastOnly>(1);
- *result.data.pointer_child = child;
+ result.id = ConstCastResultIdNullWrapPtr;
+ result.data.null_wrap_ptr_child = allocate_nonzero<ConstCastOnly>(1);
+ *result.data.null_wrap_ptr_child = child;
}
return result;
}
+ // pointer const
+ if (wanted_type->id == TypeTableEntryIdPointer && actual_type->id == TypeTableEntryIdPointer) {
+ ConstCastOnly child = types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
+ actual_type->data.pointer.child_type, source_node, !wanted_type->data.pointer.is_const);
+ if (child.id != ConstCastResultIdOk) {
+ result.id = ConstCastResultIdPointerChild;
+ result.data.pointer_mismatch = allocate_nonzero<ConstCastPointerMismatch>(1);
+ result.data.pointer_mismatch->child = child;
+ result.data.pointer_mismatch->wanted_child = wanted_type->data.pointer.child_type;
+ result.data.pointer_mismatch->actual_child = actual_type->data.pointer.child_type;
+ return result;
+ }
+ if ((actual_type->data.pointer.ptr_len == wanted_type->data.pointer.ptr_len) &&
+ (!actual_type->data.pointer.is_const || wanted_type->data.pointer.is_const) &&
+ (!actual_type->data.pointer.is_volatile || wanted_type->data.pointer.is_volatile) &&
+ actual_type->data.pointer.bit_offset == wanted_type->data.pointer.bit_offset &&
+ actual_type->data.pointer.unaligned_bit_count == wanted_type->data.pointer.unaligned_bit_count &&
+ actual_type->data.pointer.alignment >= wanted_type->data.pointer.alignment)
+ {
+ return result;
+ }
+ }
+
// slice const
- if (is_slice(expected_type) && is_slice(actual_type)) {
+ if (is_slice(wanted_type) && is_slice(actual_type)) {
TypeTableEntry *actual_ptr_type = actual_type->data.structure.fields[slice_ptr_index].type_entry;
- TypeTableEntry *expected_ptr_type = expected_type->data.structure.fields[slice_ptr_index].type_entry;
- if ((!actual_ptr_type->data.pointer.is_const || expected_ptr_type->data.pointer.is_const) &&
- (!actual_ptr_type->data.pointer.is_volatile || expected_ptr_type->data.pointer.is_volatile) &&
- actual_ptr_type->data.pointer.bit_offset == expected_ptr_type->data.pointer.bit_offset &&
- actual_ptr_type->data.pointer.unaligned_bit_count == expected_ptr_type->data.pointer.unaligned_bit_count &&
- actual_ptr_type->data.pointer.alignment >= expected_ptr_type->data.pointer.alignment)
+ TypeTableEntry *wanted_ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
+ if ((!actual_ptr_type->data.pointer.is_const || wanted_ptr_type->data.pointer.is_const) &&
+ (!actual_ptr_type->data.pointer.is_volatile || wanted_ptr_type->data.pointer.is_volatile) &&
+ actual_ptr_type->data.pointer.bit_offset == wanted_ptr_type->data.pointer.bit_offset &&
+ actual_ptr_type->data.pointer.unaligned_bit_count == wanted_ptr_type->data.pointer.unaligned_bit_count &&
+ actual_ptr_type->data.pointer.alignment >= wanted_ptr_type->data.pointer.alignment)
{
- ConstCastOnly child = types_match_const_cast_only(ira, expected_ptr_type->data.pointer.child_type,
- actual_ptr_type->data.pointer.child_type, source_node);
+ ConstCastOnly child = types_match_const_cast_only(ira, wanted_ptr_type->data.pointer.child_type,
+ actual_ptr_type->data.pointer.child_type, source_node, !wanted_ptr_type->data.pointer.is_const);
if (child.id != ConstCastResultIdOk) {
result.id = ConstCastResultIdSliceChild;
- result.data.slice_child = allocate_nonzero<ConstCastOnly>(1);
- *result.data.slice_child = child;
+ result.data.slice_mismatch = allocate_nonzero<ConstCastSliceMismatch>(1);
+ result.data.slice_mismatch->child = child;
+ result.data.slice_mismatch->actual_child = actual_ptr_type->data.pointer.child_type;
+ result.data.slice_mismatch->wanted_child = wanted_ptr_type->data.pointer.child_type;
}
return result;
}
}
// maybe
- if (expected_type->id == TypeTableEntryIdMaybe && actual_type->id == TypeTableEntryIdMaybe) {
- ConstCastOnly child = types_match_const_cast_only(ira, expected_type->data.maybe.child_type, actual_type->data.maybe.child_type, source_node);
+ if (wanted_type->id == TypeTableEntryIdOptional && actual_type->id == TypeTableEntryIdOptional) {
+ ConstCastOnly child = types_match_const_cast_only(ira, wanted_type->data.maybe.child_type,
+ actual_type->data.maybe.child_type, source_node, wanted_is_mutable);
if (child.id != ConstCastResultIdOk) {
- result.id = ConstCastResultIdNullableChild;
- result.data.nullable_child = allocate_nonzero<ConstCastOnly>(1);
- *result.data.nullable_child = child;
+ result.id = ConstCastResultIdOptionalChild;
+ result.data.optional = allocate_nonzero<ConstCastOptionalMismatch>(1);
+ result.data.optional->child = child;
+ result.data.optional->wanted_child = wanted_type->data.maybe.child_type;
+ result.data.optional->actual_child = actual_type->data.maybe.child_type;
}
return result;
}
// error union
- if (expected_type->id == TypeTableEntryIdErrorUnion && actual_type->id == TypeTableEntryIdErrorUnion) {
- ConstCastOnly payload_child = types_match_const_cast_only(ira, expected_type->data.error_union.payload_type, actual_type->data.error_union.payload_type, source_node);
+ if (wanted_type->id == TypeTableEntryIdErrorUnion && actual_type->id == TypeTableEntryIdErrorUnion) {
+ ConstCastOnly payload_child = types_match_const_cast_only(ira, wanted_type->data.error_union.payload_type,
+ actual_type->data.error_union.payload_type, source_node, wanted_is_mutable);
if (payload_child.id != ConstCastResultIdOk) {
result.id = ConstCastResultIdErrorUnionPayload;
- result.data.error_union_payload = allocate_nonzero<ConstCastOnly>(1);
- *result.data.error_union_payload = payload_child;
+ result.data.error_union_payload = allocate_nonzero<ConstCastErrUnionPayloadMismatch>(1);
+ result.data.error_union_payload->child = payload_child;
+ result.data.error_union_payload->wanted_payload = wanted_type->data.error_union.payload_type;
+ result.data.error_union_payload->actual_payload = actual_type->data.error_union.payload_type;
return result;
}
- ConstCastOnly error_set_child = types_match_const_cast_only(ira, expected_type->data.error_union.err_set_type, actual_type->data.error_union.err_set_type, source_node);
+ ConstCastOnly error_set_child = types_match_const_cast_only(ira, wanted_type->data.error_union.err_set_type,
+ actual_type->data.error_union.err_set_type, source_node, wanted_is_mutable);
if (error_set_child.id != ConstCastResultIdOk) {
result.id = ConstCastResultIdErrorUnionErrorSet;
- result.data.error_union_error_set = allocate_nonzero<ConstCastOnly>(1);
- *result.data.error_union_error_set = error_set_child;
+ result.data.error_union_error_set = allocate_nonzero<ConstCastErrUnionErrSetMismatch>(1);
+ result.data.error_union_error_set->child = error_set_child;
+ result.data.error_union_error_set->wanted_err_set = wanted_type->data.error_union.err_set_type;
+ result.data.error_union_error_set->actual_err_set = actual_type->data.error_union.err_set_type;
return result;
}
return result;
}
// error set
- if (expected_type->id == TypeTableEntryIdErrorSet && actual_type->id == TypeTableEntryIdErrorSet) {
+ if (wanted_type->id == TypeTableEntryIdErrorSet && actual_type->id == TypeTableEntryIdErrorSet) {
TypeTableEntry *contained_set = actual_type;
- TypeTableEntry *container_set = expected_type;
+ TypeTableEntry *container_set = wanted_type;
// if the container set is inferred, then this will always work.
if (container_set->data.error_set.infer_fn != nullptr) {
@@ -7526,7 +8582,7 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry
return result;
}
- if (!resolve_inferred_error_set(ira, contained_set, source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, contained_set, source_node)) {
result.id = ConstCastResultIdUnresolvedInferredErrSet;
return result;
}
@@ -7548,44 +8604,46 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry
if (error_entry == nullptr) {
if (result.id == ConstCastResultIdOk) {
result.id = ConstCastResultIdErrSet;
+ result.data.error_set_mismatch = allocate<ConstCastErrSetMismatch>(1);
}
- result.data.error_set.missing_errors.append(contained_error_entry);
+ result.data.error_set_mismatch->missing_errors.append(contained_error_entry);
}
}
free(errors);
return result;
}
- if (expected_type == ira->codegen->builtin_types.entry_promise &&
+ if (wanted_type == ira->codegen->builtin_types.entry_promise &&
actual_type->id == TypeTableEntryIdPromise)
{
return result;
}
// fn
- if (expected_type->id == TypeTableEntryIdFn &&
+ if (wanted_type->id == TypeTableEntryIdFn &&
actual_type->id == TypeTableEntryIdFn)
{
- if (expected_type->data.fn.fn_type_id.alignment > actual_type->data.fn.fn_type_id.alignment) {
+ if (wanted_type->data.fn.fn_type_id.alignment > actual_type->data.fn.fn_type_id.alignment) {
result.id = ConstCastResultIdFnAlign;
return result;
}
- if (expected_type->data.fn.fn_type_id.cc != actual_type->data.fn.fn_type_id.cc) {
+ if (wanted_type->data.fn.fn_type_id.cc != actual_type->data.fn.fn_type_id.cc) {
result.id = ConstCastResultIdFnCC;
return result;
}
- if (expected_type->data.fn.fn_type_id.is_var_args != actual_type->data.fn.fn_type_id.is_var_args) {
+ if (wanted_type->data.fn.fn_type_id.is_var_args != actual_type->data.fn.fn_type_id.is_var_args) {
result.id = ConstCastResultIdFnVarArgs;
return result;
}
- if (expected_type->data.fn.is_generic != actual_type->data.fn.is_generic) {
+ if (wanted_type->data.fn.is_generic != actual_type->data.fn.is_generic) {
result.id = ConstCastResultIdFnIsGeneric;
return result;
}
- if (!expected_type->data.fn.is_generic &&
+ if (!wanted_type->data.fn.is_generic &&
actual_type->data.fn.fn_type_id.return_type->id != TypeTableEntryIdUnreachable)
{
- ConstCastOnly child = types_match_const_cast_only(ira, expected_type->data.fn.fn_type_id.return_type, actual_type->data.fn.fn_type_id.return_type, source_node);
+ ConstCastOnly child = types_match_const_cast_only(ira, wanted_type->data.fn.fn_type_id.return_type,
+ actual_type->data.fn.fn_type_id.return_type, source_node, false);
if (child.id != ConstCastResultIdOk) {
result.id = ConstCastResultIdFnReturnType;
result.data.return_type = allocate_nonzero<ConstCastOnly>(1);
@@ -7593,9 +8651,11 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry
return result;
}
}
- if (!expected_type->data.fn.is_generic && expected_type->data.fn.fn_type_id.cc == CallingConventionAsync) {
- ConstCastOnly child = types_match_const_cast_only(ira, actual_type->data.fn.fn_type_id.async_allocator_type,
- expected_type->data.fn.fn_type_id.async_allocator_type, source_node);
+ if (!wanted_type->data.fn.is_generic && wanted_type->data.fn.fn_type_id.cc == CallingConventionAsync) {
+ ConstCastOnly child = types_match_const_cast_only(ira,
+ actual_type->data.fn.fn_type_id.async_allocator_type,
+ wanted_type->data.fn.fn_type_id.async_allocator_type,
+ source_node, false);
if (child.id != ConstCastResultIdOk) {
result.id = ConstCastResultIdAsyncAllocatorType;
result.data.async_allocator_type = allocate_nonzero<ConstCastOnly>(1);
@@ -7603,22 +8663,23 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry
return result;
}
}
- if (expected_type->data.fn.fn_type_id.param_count != actual_type->data.fn.fn_type_id.param_count) {
+ if (wanted_type->data.fn.fn_type_id.param_count != actual_type->data.fn.fn_type_id.param_count) {
result.id = ConstCastResultIdFnArgCount;
return result;
}
- if (expected_type->data.fn.fn_type_id.next_param_index != actual_type->data.fn.fn_type_id.next_param_index) {
+ if (wanted_type->data.fn.fn_type_id.next_param_index != actual_type->data.fn.fn_type_id.next_param_index) {
result.id = ConstCastResultIdFnGenericArgCount;
return result;
}
- assert(expected_type->data.fn.is_generic ||
- expected_type->data.fn.fn_type_id.next_param_index == expected_type->data.fn.fn_type_id.param_count);
- for (size_t i = 0; i < expected_type->data.fn.fn_type_id.next_param_index; i += 1) {
+ assert(wanted_type->data.fn.is_generic ||
+ wanted_type->data.fn.fn_type_id.next_param_index == wanted_type->data.fn.fn_type_id.param_count);
+ for (size_t i = 0; i < wanted_type->data.fn.fn_type_id.next_param_index; i += 1) {
// note it's reversed for parameters
FnTypeParamInfo *actual_param_info = &actual_type->data.fn.fn_type_id.param_info[i];
- FnTypeParamInfo *expected_param_info = &expected_type->data.fn.fn_type_id.param_info[i];
+ FnTypeParamInfo *expected_param_info = &wanted_type->data.fn.fn_type_id.param_info[i];
- ConstCastOnly arg_child = types_match_const_cast_only(ira, actual_param_info->type, expected_param_info->type, source_node);
+ ConstCastOnly arg_child = types_match_const_cast_only(ira, actual_param_info->type,
+ expected_param_info->type, source_node, false);
if (arg_child.id != ConstCastResultIdOk) {
result.id = ConstCastResultIdFnArg;
result.data.fn_arg.arg_index = i;
@@ -7637,271 +8698,19 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry
}
result.id = ConstCastResultIdType;
+ result.data.type_mismatch = allocate_nonzero<ConstCastTypeMismatch>(1);
+ result.data.type_mismatch->wanted_type = wanted_type;
+ result.data.type_mismatch->actual_type = actual_type;
return result;
}
-enum ImplicitCastMatchResult {
- ImplicitCastMatchResultNo,
- ImplicitCastMatchResultYes,
- ImplicitCastMatchResultReportedError,
-};
-
-static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira, TypeTableEntry *expected_type,
- TypeTableEntry *actual_type, IrInstruction *value)
-{
- AstNode *source_node = value->source_node;
- ConstCastOnly const_cast_result = types_match_const_cast_only(ira, expected_type, actual_type, source_node);
- if (const_cast_result.id == ConstCastResultIdOk) {
- return ImplicitCastMatchResultYes;
- }
-
- // if we got here with error sets, make an error showing the incompatibilities
- ZigList<ErrorTableEntry *> *missing_errors = nullptr;
- if (const_cast_result.id == ConstCastResultIdErrSet) {
- missing_errors = &const_cast_result.data.error_set.missing_errors;
- }
- if (const_cast_result.id == ConstCastResultIdErrorUnionErrorSet) {
- if (const_cast_result.data.error_union_error_set->id == ConstCastResultIdErrSet) {
- missing_errors = &const_cast_result.data.error_union_error_set->data.error_set.missing_errors;
- } else if (const_cast_result.data.error_union_error_set->id == ConstCastResultIdErrSetGlobal) {
- ErrorMsg *msg = ir_add_error(ira, value,
- buf_sprintf("expected '%s', found '%s'", buf_ptr(&expected_type->name), buf_ptr(&actual_type->name)));
- add_error_note(ira->codegen, msg, value->source_node,
- buf_sprintf("unable to cast global error set into smaller set"));
- return ImplicitCastMatchResultReportedError;
- }
- } else if (const_cast_result.id == ConstCastResultIdErrSetGlobal) {
- ErrorMsg *msg = ir_add_error(ira, value,
- buf_sprintf("expected '%s', found '%s'", buf_ptr(&expected_type->name), buf_ptr(&actual_type->name)));
- add_error_note(ira->codegen, msg, value->source_node,
- buf_sprintf("unable to cast global error set into smaller set"));
- return ImplicitCastMatchResultReportedError;
- }
- if (missing_errors != nullptr) {
- ErrorMsg *msg = ir_add_error(ira, value,
- buf_sprintf("expected '%s', found '%s'", buf_ptr(&expected_type->name), buf_ptr(&actual_type->name)));
- for (size_t i = 0; i < missing_errors->length; i += 1) {
- ErrorTableEntry *error_entry = missing_errors->at(i);
- add_error_note(ira->codegen, msg, error_entry->decl_node,
- buf_sprintf("'error.%s' not a member of destination error set", buf_ptr(&error_entry->name)));
- }
-
- return ImplicitCastMatchResultReportedError;
- }
-
- // implicit conversion from non maybe type to maybe type
- if (expected_type->id == TypeTableEntryIdMaybe &&
- ir_types_match_with_implicit_cast(ira, expected_type->data.maybe.child_type, actual_type, value))
- {
- return ImplicitCastMatchResultYes;
- }
-
- // implicit conversion from null literal to maybe type
- if (expected_type->id == TypeTableEntryIdMaybe &&
- actual_type->id == TypeTableEntryIdNullLit)
- {
- return ImplicitCastMatchResultYes;
- }
-
- // implicit T to U!T
- if (expected_type->id == TypeTableEntryIdErrorUnion &&
- ir_types_match_with_implicit_cast(ira, expected_type->data.error_union.payload_type, actual_type, value))
- {
- return ImplicitCastMatchResultYes;
- }
-
- // implicit conversion from error set to error union type
- if (expected_type->id == TypeTableEntryIdErrorUnion &&
- actual_type->id == TypeTableEntryIdErrorSet)
- {
- return ImplicitCastMatchResultYes;
- }
-
- // implicit conversion from T to U!?T
- if (expected_type->id == TypeTableEntryIdErrorUnion &&
- expected_type->data.error_union.payload_type->id == TypeTableEntryIdMaybe &&
- ir_types_match_with_implicit_cast(ira,
- expected_type->data.error_union.payload_type->data.maybe.child_type,
- actual_type, value))
- {
- return ImplicitCastMatchResultYes;
- }
-
- // implicit widening conversion
- if (expected_type->id == TypeTableEntryIdInt &&
- actual_type->id == TypeTableEntryIdInt &&
- expected_type->data.integral.is_signed == actual_type->data.integral.is_signed &&
- expected_type->data.integral.bit_count >= actual_type->data.integral.bit_count)
- {
- return ImplicitCastMatchResultYes;
- }
-
- // small enough unsigned ints can get casted to large enough signed ints
- if (expected_type->id == TypeTableEntryIdInt && expected_type->data.integral.is_signed &&
- actual_type->id == TypeTableEntryIdInt && !actual_type->data.integral.is_signed &&
- expected_type->data.integral.bit_count > actual_type->data.integral.bit_count)
- {
- return ImplicitCastMatchResultYes;
- }
-
- // implicit float widening conversion
- if (expected_type->id == TypeTableEntryIdFloat &&
- actual_type->id == TypeTableEntryIdFloat &&
- expected_type->data.floating.bit_count >= actual_type->data.floating.bit_count)
- {
- return ImplicitCastMatchResultYes;
- }
-
- // implicit [N]T to []const T
- if (is_slice(expected_type) && actual_type->id == TypeTableEntryIdArray) {
- TypeTableEntry *ptr_type = expected_type->data.structure.fields[slice_ptr_index].type_entry;
- assert(ptr_type->id == TypeTableEntryIdPointer);
-
- if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
- {
- return ImplicitCastMatchResultYes;
- }
- }
-
- // implicit &const [N]T to []const T
- if (is_slice(expected_type) &&
- actual_type->id == TypeTableEntryIdPointer &&
- actual_type->data.pointer.is_const &&
- actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
- {
- TypeTableEntry *ptr_type = expected_type->data.structure.fields[slice_ptr_index].type_entry;
- assert(ptr_type->id == TypeTableEntryIdPointer);
-
- TypeTableEntry *array_type = actual_type->data.pointer.child_type;
-
- if ((ptr_type->data.pointer.is_const || array_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, array_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
- {
- return ImplicitCastMatchResultYes;
- }
- }
-
- // implicit [N]T to &const []const T
- if (expected_type->id == TypeTableEntryIdPointer &&
- expected_type->data.pointer.is_const &&
- is_slice(expected_type->data.pointer.child_type) &&
- actual_type->id == TypeTableEntryIdArray)
- {
- TypeTableEntry *ptr_type =
- expected_type->data.pointer.child_type->data.structure.fields[slice_ptr_index].type_entry;
- assert(ptr_type->id == TypeTableEntryIdPointer);
- if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
- {
- return ImplicitCastMatchResultYes;
- }
- }
-
- // implicit [N]T to ?[]const T
- if (expected_type->id == TypeTableEntryIdMaybe &&
- is_slice(expected_type->data.maybe.child_type) &&
- actual_type->id == TypeTableEntryIdArray)
- {
- TypeTableEntry *ptr_type =
- expected_type->data.maybe.child_type->data.structure.fields[slice_ptr_index].type_entry;
- assert(ptr_type->id == TypeTableEntryIdPointer);
- if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
- {
- return ImplicitCastMatchResultYes;
- }
- }
-
-
- // implicit number literal to typed number
- // implicit number literal to &const integer
- if (actual_type->id == TypeTableEntryIdNumLitFloat ||
- actual_type->id == TypeTableEntryIdNumLitInt)
- {
- if (expected_type->id == TypeTableEntryIdPointer &&
- expected_type->data.pointer.is_const)
- {
- if (ir_num_lit_fits_in_other_type(ira, value, expected_type->data.pointer.child_type, false)) {
- return ImplicitCastMatchResultYes;
- } else {
- return ImplicitCastMatchResultReportedError;
- }
- } else if (ir_num_lit_fits_in_other_type(ira, value, expected_type, false)) {
- return ImplicitCastMatchResultYes;
- } else {
- return ImplicitCastMatchResultReportedError;
- }
- }
-
- // implicit typed number to integer or float literal.
- // works when the number is known
- if (value->value.special == ConstValSpecialStatic) {
- if (actual_type->id == TypeTableEntryIdInt && expected_type->id == TypeTableEntryIdNumLitInt) {
- return ImplicitCastMatchResultYes;
- } else if (actual_type->id == TypeTableEntryIdFloat && expected_type->id == TypeTableEntryIdNumLitFloat) {
- return ImplicitCastMatchResultYes;
- }
- }
-
- // implicit union to its enum tag type
- if (expected_type->id == TypeTableEntryIdEnum && actual_type->id == TypeTableEntryIdUnion &&
- (actual_type->data.unionation.decl_node->data.container_decl.auto_enum ||
- actual_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr))
- {
- type_ensure_zero_bits_known(ira->codegen, actual_type);
- if (actual_type->data.unionation.tag_type == expected_type) {
- return ImplicitCastMatchResultYes;
- }
- }
-
- // implicit enum to union which has the enum as the tag type
- if (expected_type->id == TypeTableEntryIdUnion && actual_type->id == TypeTableEntryIdEnum &&
- (expected_type->data.unionation.decl_node->data.container_decl.auto_enum ||
- expected_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr))
- {
- type_ensure_zero_bits_known(ira->codegen, expected_type);
- if (expected_type->data.unionation.tag_type == actual_type) {
- return ImplicitCastMatchResultYes;
- }
- }
-
- // implicit enum to &const union which has the enum as the tag type
- if (actual_type->id == TypeTableEntryIdEnum && expected_type->id == TypeTableEntryIdPointer) {
- TypeTableEntry *union_type = expected_type->data.pointer.child_type;
- if (union_type->data.unionation.decl_node->data.container_decl.auto_enum ||
- union_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr)
- {
- type_ensure_zero_bits_known(ira->codegen, union_type);
- if (union_type->data.unionation.tag_type == actual_type) {
- return ImplicitCastMatchResultYes;
- }
- }
- }
-
- // implicit undefined literal to anything
- if (actual_type->id == TypeTableEntryIdUndefLit) {
- return ImplicitCastMatchResultYes;
- }
-
- // implicitly take a const pointer to something
- if (!type_requires_comptime(actual_type)) {
- TypeTableEntry *const_ptr_actual = get_pointer_to_type(ira->codegen, actual_type, true);
- if (types_match_const_cast_only(ira, expected_type, const_ptr_actual, source_node).id == ConstCastResultIdOk) {
- return ImplicitCastMatchResultYes;
- }
- }
-
- return ImplicitCastMatchResultNo;
-}
-
static void update_errors_helper(CodeGen *g, ErrorTableEntry ***errors, size_t *errors_count) {
size_t old_errors_count = *errors_count;
*errors_count = g->errors_by_index.length;
*errors = reallocate(*errors, old_errors_count, *errors_count);
}
-static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_node, IrInstruction **instructions, size_t instruction_count) {
+static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_node, TypeTableEntry *expected_type, IrInstruction **instructions, size_t instruction_count) {
assert(instruction_count >= 1);
IrInstruction *prev_inst = instructions[0];
if (type_is_invalid(prev_inst->value.type)) {
@@ -7915,7 +8724,7 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
err_set_type = ira->codegen->builtin_types.entry_global_error_set;
} else {
err_set_type = prev_inst->value.type;
- if (!resolve_inferred_error_set(ira, err_set_type, prev_inst->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, err_set_type, prev_inst->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
update_errors_helper(ira->codegen, &errors, &errors_count);
@@ -7928,7 +8737,7 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
}
}
- bool any_are_null = (prev_inst->value.type->id == TypeTableEntryIdNullLit);
+ bool any_are_null = (prev_inst->value.type->id == TypeTableEntryIdNull);
bool convert_to_const_slice = false;
for (size_t i = 1; i < instruction_count; i += 1) {
IrInstruction *cur_inst = instructions[i];
@@ -7948,23 +8757,13 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
continue;
}
- if (prev_type->id == TypeTableEntryIdNullLit) {
- prev_inst = cur_inst;
- continue;
- }
-
- if (cur_type->id == TypeTableEntryIdNullLit) {
- any_are_null = true;
- continue;
- }
-
if (prev_type->id == TypeTableEntryIdErrorSet) {
assert(err_set_type != nullptr);
if (cur_type->id == TypeTableEntryIdErrorSet) {
if (type_is_global_error_set(err_set_type)) {
continue;
}
- if (!resolve_inferred_error_set(ira, cur_type, cur_inst->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, cur_type, cur_inst->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
if (type_is_global_error_set(cur_type)) {
@@ -8030,7 +8829,7 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
continue;
}
TypeTableEntry *cur_err_set_type = cur_type->data.error_union.err_set_type;
- if (!resolve_inferred_error_set(ira, cur_err_set_type, cur_inst->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, cur_err_set_type, cur_inst->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
if (type_is_global_error_set(cur_err_set_type)) {
@@ -8093,7 +8892,7 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
if (err_set_type != nullptr && type_is_global_error_set(err_set_type)) {
continue;
}
- if (!resolve_inferred_error_set(ira, cur_type, cur_inst->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, cur_type, cur_inst->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
@@ -8138,9 +8937,9 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
TypeTableEntry *cur_payload_type = cur_type->data.error_union.payload_type;
bool const_cast_prev = types_match_const_cast_only(ira, prev_payload_type, cur_payload_type,
- source_node).id == ConstCastResultIdOk;
+ source_node, false).id == ConstCastResultIdOk;
bool const_cast_cur = types_match_const_cast_only(ira, cur_payload_type, prev_payload_type,
- source_node).id == ConstCastResultIdOk;
+ source_node, false).id == ConstCastResultIdOk;
if (const_cast_prev || const_cast_cur) {
if (const_cast_cur) {
@@ -8150,11 +8949,11 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
TypeTableEntry *prev_err_set_type = (err_set_type == nullptr) ? prev_type->data.error_union.err_set_type : err_set_type;
TypeTableEntry *cur_err_set_type = cur_type->data.error_union.err_set_type;
- if (!resolve_inferred_error_set(ira, prev_err_set_type, cur_inst->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, prev_err_set_type, cur_inst->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
- if (!resolve_inferred_error_set(ira, cur_err_set_type, cur_inst->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, cur_err_set_type, cur_inst->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
@@ -8217,11 +9016,21 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
}
}
- if (types_match_const_cast_only(ira, prev_type, cur_type, source_node).id == ConstCastResultIdOk) {
+ if (prev_type->id == TypeTableEntryIdNull) {
+ prev_inst = cur_inst;
+ continue;
+ }
+
+ if (cur_type->id == TypeTableEntryIdNull) {
+ any_are_null = true;
+ continue;
+ }
+
+ if (types_match_const_cast_only(ira, prev_type, cur_type, source_node, false).id == ConstCastResultIdOk) {
continue;
}
- if (types_match_const_cast_only(ira, cur_type, prev_type, source_node).id == ConstCastResultIdOk) {
+ if (types_match_const_cast_only(ira, cur_type, prev_type, source_node, false).id == ConstCastResultIdOk) {
prev_inst = cur_inst;
continue;
}
@@ -8244,17 +9053,19 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
}
if (prev_type->id == TypeTableEntryIdErrorUnion &&
- types_match_const_cast_only(ira, prev_type->data.error_union.payload_type, cur_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira, prev_type->data.error_union.payload_type, cur_type,
+ source_node, false).id == ConstCastResultIdOk)
{
continue;
}
if (cur_type->id == TypeTableEntryIdErrorUnion &&
- types_match_const_cast_only(ira, cur_type->data.error_union.payload_type, prev_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira, cur_type->data.error_union.payload_type, prev_type,
+ source_node, false).id == ConstCastResultIdOk)
{
if (err_set_type != nullptr) {
TypeTableEntry *cur_err_set_type = cur_type->data.error_union.err_set_type;
- if (!resolve_inferred_error_set(ira, cur_err_set_type, cur_inst->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, cur_err_set_type, cur_inst->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
if (type_is_global_error_set(cur_err_set_type) || type_is_global_error_set(err_set_type)) {
@@ -8271,30 +9082,32 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
continue;
}
- if (prev_type->id == TypeTableEntryIdMaybe &&
- types_match_const_cast_only(ira, prev_type->data.maybe.child_type, cur_type, source_node).id == ConstCastResultIdOk)
+ if (prev_type->id == TypeTableEntryIdOptional &&
+ types_match_const_cast_only(ira, prev_type->data.maybe.child_type, cur_type,
+ source_node, false).id == ConstCastResultIdOk)
{
continue;
}
- if (cur_type->id == TypeTableEntryIdMaybe &&
- types_match_const_cast_only(ira, cur_type->data.maybe.child_type, prev_type, source_node).id == ConstCastResultIdOk)
+ if (cur_type->id == TypeTableEntryIdOptional &&
+ types_match_const_cast_only(ira, cur_type->data.maybe.child_type, prev_type,
+ source_node, false).id == ConstCastResultIdOk)
{
prev_inst = cur_inst;
continue;
}
- if (cur_type->id == TypeTableEntryIdUndefLit) {
+ if (cur_type->id == TypeTableEntryIdUndefined) {
continue;
}
- if (prev_type->id == TypeTableEntryIdUndefLit) {
+ if (prev_type->id == TypeTableEntryIdUndefined) {
prev_inst = cur_inst;
continue;
}
- if (prev_type->id == TypeTableEntryIdNumLitInt ||
- prev_type->id == TypeTableEntryIdNumLitFloat)
+ if (prev_type->id == TypeTableEntryIdComptimeInt ||
+ prev_type->id == TypeTableEntryIdComptimeFloat)
{
if (ir_num_lit_fits_in_other_type(ira, prev_inst, cur_type, false)) {
prev_inst = cur_inst;
@@ -8304,8 +9117,8 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
}
}
- if (cur_type->id == TypeTableEntryIdNumLitInt ||
- cur_type->id == TypeTableEntryIdNumLitFloat)
+ if (cur_type->id == TypeTableEntryIdComptimeInt ||
+ cur_type->id == TypeTableEntryIdComptimeFloat)
{
if (ir_num_lit_fits_in_other_type(ira, cur_inst, prev_type, false)) {
continue;
@@ -8315,8 +9128,9 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
}
if (cur_type->id == TypeTableEntryIdArray && prev_type->id == TypeTableEntryIdArray &&
- cur_type->data.array.len != prev_type->data.array.len &&
- types_match_const_cast_only(ira, cur_type->data.array.child_type, prev_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ cur_type->data.array.len != prev_type->data.array.len &&
+ types_match_const_cast_only(ira, cur_type->data.array.child_type, prev_type->data.array.child_type,
+ source_node, false).id == ConstCastResultIdOk)
{
convert_to_const_slice = true;
prev_inst = cur_inst;
@@ -8324,8 +9138,9 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
}
if (cur_type->id == TypeTableEntryIdArray && prev_type->id == TypeTableEntryIdArray &&
- cur_type->data.array.len != prev_type->data.array.len &&
- types_match_const_cast_only(ira, prev_type->data.array.child_type, cur_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ cur_type->data.array.len != prev_type->data.array.len &&
+ types_match_const_cast_only(ira, prev_type->data.array.child_type, cur_type->data.array.child_type,
+ source_node, false).id == ConstCastResultIdOk)
{
convert_to_const_slice = true;
continue;
@@ -8334,8 +9149,9 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
if (cur_type->id == TypeTableEntryIdArray && is_slice(prev_type) &&
(prev_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.is_const ||
cur_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, prev_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.child_type,
- cur_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira,
+ prev_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.child_type,
+ cur_type->data.array.child_type, source_node, false).id == ConstCastResultIdOk)
{
convert_to_const_slice = false;
continue;
@@ -8344,8 +9160,9 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
if (prev_type->id == TypeTableEntryIdArray && is_slice(cur_type) &&
(cur_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.is_const ||
prev_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, cur_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.child_type,
- prev_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira,
+ cur_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.child_type,
+ prev_type->data.array.child_type, source_node, false).id == ConstCastResultIdOk)
{
prev_inst = cur_inst;
convert_to_const_slice = false;
@@ -8390,7 +9207,11 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
if (convert_to_const_slice) {
assert(prev_inst->value.type->id == TypeTableEntryIdArray);
- TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, prev_inst->value.type->data.array.child_type, true);
+ TypeTableEntry *ptr_type = get_pointer_to_type_extra(
+ ira->codegen, prev_inst->value.type->data.array.child_type,
+ true, false, PtrLenUnknown,
+ get_abi_alignment(ira->codegen, prev_inst->value.type->data.array.child_type),
+ 0, 0);
TypeTableEntry *slice_type = get_slice_type(ira->codegen, ptr_type);
if (err_set_type != nullptr) {
return get_error_union_type(ira->codegen, err_set_type, slice_type);
@@ -8400,34 +9221,36 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
} else if (err_set_type != nullptr) {
if (prev_inst->value.type->id == TypeTableEntryIdErrorSet) {
return err_set_type;
+ } else if (prev_inst->value.type->id == TypeTableEntryIdErrorUnion) {
+ return get_error_union_type(ira->codegen, err_set_type, prev_inst->value.type->data.error_union.payload_type);
+ } else if (expected_type != nullptr && expected_type->id == TypeTableEntryIdErrorUnion) {
+ return get_error_union_type(ira->codegen, err_set_type, expected_type->data.error_union.payload_type);
} else {
- if (prev_inst->value.type->id == TypeTableEntryIdNumLitInt ||
- prev_inst->value.type->id == TypeTableEntryIdNumLitFloat)
+ if (prev_inst->value.type->id == TypeTableEntryIdComptimeInt ||
+ prev_inst->value.type->id == TypeTableEntryIdComptimeFloat)
{
ir_add_error_node(ira, source_node,
buf_sprintf("unable to make error union out of number literal"));
return ira->codegen->builtin_types.entry_invalid;
- } else if (prev_inst->value.type->id == TypeTableEntryIdNullLit) {
+ } else if (prev_inst->value.type->id == TypeTableEntryIdNull) {
ir_add_error_node(ira, source_node,
buf_sprintf("unable to make error union out of null literal"));
return ira->codegen->builtin_types.entry_invalid;
- } else if (prev_inst->value.type->id == TypeTableEntryIdErrorUnion) {
- return get_error_union_type(ira->codegen, err_set_type, prev_inst->value.type->data.error_union.payload_type);
} else {
return get_error_union_type(ira->codegen, err_set_type, prev_inst->value.type);
}
}
- } else if (any_are_null && prev_inst->value.type->id != TypeTableEntryIdNullLit) {
- if (prev_inst->value.type->id == TypeTableEntryIdNumLitInt ||
- prev_inst->value.type->id == TypeTableEntryIdNumLitFloat)
+ } else if (any_are_null && prev_inst->value.type->id != TypeTableEntryIdNull) {
+ if (prev_inst->value.type->id == TypeTableEntryIdComptimeInt ||
+ prev_inst->value.type->id == TypeTableEntryIdComptimeFloat)
{
ir_add_error_node(ira, source_node,
buf_sprintf("unable to make maybe out of number literal"));
return ira->codegen->builtin_types.entry_invalid;
- } else if (prev_inst->value.type->id == TypeTableEntryIdMaybe) {
+ } else if (prev_inst->value.type->id == TypeTableEntryIdOptional) {
return prev_inst->value.type;
} else {
- return get_maybe_type(ira->codegen, prev_inst->value.type);
+ return get_optional_type(ira->codegen, prev_inst->value.type);
}
} else {
return prev_inst->value.type;
@@ -8448,10 +9271,15 @@ static void copy_const_val(ConstExprValue *dest, ConstExprValue *src, bool same_
*dest = *src;
if (!same_global_refs) {
dest->global_refs = global_refs;
+ if (dest->type->id == TypeTableEntryIdStruct) {
+ dest->data.x_struct.fields = allocate_nonzero<ConstExprValue>(dest->type->data.structure.src_field_count);
+ memcpy(dest->data.x_struct.fields, src->data.x_struct.fields, sizeof(ConstExprValue) * dest->type->data.structure.src_field_count);
+ }
}
}
-static void eval_const_expr_implicit_cast(CastOp cast_op,
+static bool eval_const_expr_implicit_cast(IrAnalyze *ira, IrInstruction *source_instr,
+ CastOp cast_op,
ConstExprValue *other_val, TypeTableEntry *other_type,
ConstExprValue *const_val, TypeTableEntry *new_type)
{
@@ -8462,17 +9290,23 @@ static void eval_const_expr_implicit_cast(CastOp cast_op,
case CastOpNoCast:
zig_unreachable();
case CastOpErrSet:
+ case CastOpBitCast:
+ case CastOpPtrOfArrayToSlice:
zig_panic("TODO");
case CastOpNoop:
{
- copy_const_val(const_val, other_val, other_val->special == ConstValSpecialStatic);
+ bool same_global_refs = other_val->special == ConstValSpecialStatic;
+ copy_const_val(const_val, other_val, same_global_refs);
const_val->type = new_type;
break;
}
case CastOpNumLitToConcrete:
- if (other_val->type->id == TypeTableEntryIdNumLitFloat) {
+ if (other_val->type->id == TypeTableEntryIdComptimeFloat) {
assert(new_type->id == TypeTableEntryIdFloat);
switch (new_type->data.floating.bit_count) {
+ case 16:
+ const_val->data.x_f16 = bigfloat_to_f16(&other_val->data.x_bigfloat);
+ break;
case 32:
const_val->data.x_f32 = bigfloat_to_f32(&other_val->data.x_bigfloat);
break;
@@ -8485,7 +9319,7 @@ static void eval_const_expr_implicit_cast(CastOp cast_op,
default:
zig_unreachable();
}
- } else if (other_val->type->id == TypeTableEntryIdNumLitInt) {
+ } else if (other_val->type->id == TypeTableEntryIdComptimeInt) {
bigint_init_bigint(&const_val->data.x_bigint, &other_val->data.x_bigint);
} else {
zig_unreachable();
@@ -8503,6 +9337,9 @@ static void eval_const_expr_implicit_cast(CastOp cast_op,
BigFloat bigfloat;
bigfloat_init_bigint(&bigfloat, &other_val->data.x_bigint);
switch (new_type->data.floating.bit_count) {
+ case 16:
+ const_val->data.x_f16 = bigfloat_to_f16(&bigfloat);
+ break;
case 32:
const_val->data.x_f32 = bigfloat_to_f32(&bigfloat);
break;
@@ -8520,6 +9357,20 @@ static void eval_const_expr_implicit_cast(CastOp cast_op,
}
case CastOpFloatToInt:
float_init_bigint(&const_val->data.x_bigint, other_val);
+ if (new_type->id == TypeTableEntryIdInt) {
+ if (!bigint_fits_in_bits(&const_val->data.x_bigint, new_type->data.integral.bit_count,
+ new_type->data.integral.is_signed))
+ {
+ Buf *int_buf = buf_alloc();
+ bigint_append_buf(int_buf, &const_val->data.x_bigint, 10);
+
+ ir_add_error(ira, source_instr,
+ buf_sprintf("integer value '%s' cannot be stored in type '%s'",
+ buf_ptr(int_buf), buf_ptr(&new_type->name)));
+ return false;
+ }
+ }
+
const_val->special = ConstValSpecialStatic;
break;
case CastOpBoolToInt:
@@ -8527,17 +9378,21 @@ static void eval_const_expr_implicit_cast(CastOp cast_op,
const_val->special = ConstValSpecialStatic;
break;
}
+ return true;
}
static IrInstruction *ir_resolve_cast(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value,
TypeTableEntry *wanted_type, CastOp cast_op, bool need_alloca)
{
- if (value->value.special != ConstValSpecialRuntime &&
+ if ((instr_is_comptime(value) || !type_has_bits(wanted_type)) &&
cast_op != CastOpResizeSlice && cast_op != CastOpBytesToSlice)
{
IrInstruction *result = ir_create_const(&ira->new_irb, source_instr->scope,
source_instr->source_node, wanted_type);
- eval_const_expr_implicit_cast(cast_op, &value->value, value->value.type,
- &result->value, wanted_type);
+ if (!eval_const_expr_implicit_cast(ira, source_instr, cast_op, &value->value, value->value.type,
+ &result->value, wanted_type))
+ {
+ return ira->codegen->invalid_instruction;
+ }
return result;
} else {
IrInstruction *result = ir_build_cast(&ira->new_irb, source_instr->scope, source_instr->source_node, wanted_type, value, cast_op);
@@ -8549,17 +9404,69 @@ static IrInstruction *ir_resolve_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
+static IrInstruction *ir_resolve_ptr_of_array_to_unknown_len_ptr(IrAnalyze *ira, IrInstruction *source_instr,
+ IrInstruction *value, TypeTableEntry *wanted_type)
+{
+ assert(value->value.type->id == TypeTableEntryIdPointer);
+ wanted_type = adjust_ptr_align(ira->codegen, wanted_type, value->value.type->data.pointer.alignment);
+
+ if (instr_is_comptime(value)) {
+ ConstExprValue *pointee = const_ptr_pointee(ira->codegen, &value->value);
+ if (pointee->special != ConstValSpecialRuntime) {
+ IrInstruction *result = ir_create_const(&ira->new_irb, source_instr->scope,
+ source_instr->source_node, wanted_type);
+ result->value.type = wanted_type;
+ result->value.data.x_ptr.special = ConstPtrSpecialBaseArray;
+ result->value.data.x_ptr.mut = value->value.data.x_ptr.mut;
+ result->value.data.x_ptr.data.base_array.array_val = pointee;
+ result->value.data.x_ptr.data.base_array.elem_index = 0;
+ result->value.data.x_ptr.data.base_array.is_cstr = false;
+ return result;
+ }
+ }
+
+ IrInstruction *result = ir_build_cast(&ira->new_irb, source_instr->scope, source_instr->source_node,
+ wanted_type, value, CastOpBitCast);
+ result->value.type = wanted_type;
+ return result;
+}
+
+static IrInstruction *ir_resolve_ptr_of_array_to_slice(IrAnalyze *ira, IrInstruction *source_instr,
+ IrInstruction *value, TypeTableEntry *wanted_type)
+{
+ wanted_type = adjust_slice_align(ira->codegen, wanted_type, value->value.type->data.pointer.alignment);
+
+ if (instr_is_comptime(value)) {
+ ConstExprValue *pointee = const_ptr_pointee(ira->codegen, &value->value);
+ if (pointee->special != ConstValSpecialRuntime) {
+ assert(value->value.type->id == TypeTableEntryIdPointer);
+ TypeTableEntry *array_type = value->value.type->data.pointer.child_type;
+ assert(is_slice(wanted_type));
+ bool is_const = wanted_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.is_const;
+
+ IrInstruction *result = ir_create_const(&ira->new_irb, source_instr->scope,
+ source_instr->source_node, wanted_type);
+ init_const_slice(ira->codegen, &result->value, pointee, 0, array_type->data.array.len, is_const);
+ result->value.data.x_struct.fields[slice_ptr_index].data.x_ptr.mut =
+ value->value.data.x_ptr.mut;
+ result->value.type = wanted_type;
+ return result;
+ }
+ }
+
+ IrInstruction *result = ir_build_cast(&ira->new_irb, source_instr->scope, source_instr->source_node,
+ wanted_type, value, CastOpPtrOfArrayToSlice);
+ result->value.type = wanted_type;
+ ir_add_alloca(ira, result, wanted_type);
+ return result;
+}
+
static bool is_container(TypeTableEntry *type) {
return type->id == TypeTableEntryIdStruct ||
type->id == TypeTableEntryIdEnum ||
type->id == TypeTableEntryIdUnion;
}
-static bool is_u8(TypeTableEntry *type) {
- return type->id == TypeTableEntryIdInt &&
- !type->data.integral.is_signed && type->data.integral.bit_count == 8;
-}
-
static IrBasicBlock *ir_get_new_bb(IrAnalyze *ira, IrBasicBlock *old_bb, IrInstruction *ref_old_instruction) {
assert(old_bb);
@@ -8661,26 +9568,9 @@ static TypeTableEntry *ir_finish_anal(IrAnalyze *ira, TypeTableEntry *result_typ
}
static IrInstruction *ir_get_const(IrAnalyze *ira, IrInstruction *old_instruction) {
- IrInstruction *new_instruction;
- if (old_instruction->id == IrInstructionIdVarPtr) {
- IrInstructionVarPtr *old_var_ptr_instruction = (IrInstructionVarPtr *)old_instruction;
- IrInstructionVarPtr *var_ptr_instruction = ir_create_instruction<IrInstructionVarPtr>(&ira->new_irb,
- old_instruction->scope, old_instruction->source_node);
- var_ptr_instruction->var = old_var_ptr_instruction->var;
- new_instruction = &var_ptr_instruction->base;
- } else if (old_instruction->id == IrInstructionIdFieldPtr) {
- IrInstructionFieldPtr *field_ptr_instruction = ir_create_instruction<IrInstructionFieldPtr>(&ira->new_irb,
- old_instruction->scope, old_instruction->source_node);
- new_instruction = &field_ptr_instruction->base;
- } else if (old_instruction->id == IrInstructionIdElemPtr) {
- IrInstructionElemPtr *elem_ptr_instruction = ir_create_instruction<IrInstructionElemPtr>(&ira->new_irb,
- old_instruction->scope, old_instruction->source_node);
- new_instruction = &elem_ptr_instruction->base;
- } else {
- IrInstructionConst *const_instruction = ir_create_instruction<IrInstructionConst>(&ira->new_irb,
- old_instruction->scope, old_instruction->source_node);
- new_instruction = &const_instruction->base;
- }
+ IrInstructionConst *const_instruction = ir_create_instruction<IrInstructionConst>(&ira->new_irb,
+ old_instruction->scope, old_instruction->source_node);
+ IrInstruction *new_instruction = &const_instruction->base;
new_instruction->value.special = ConstValSpecialStatic;
return new_instruction;
}
@@ -8700,34 +9590,15 @@ static IrInstruction *ir_get_const_ptr(IrAnalyze *ira, IrInstruction *instructio
ConstExprValue *pointee, TypeTableEntry *pointee_type,
ConstPtrMut ptr_mut, bool ptr_is_const, bool ptr_is_volatile, uint32_t ptr_align)
{
- if (pointee_type->id == TypeTableEntryIdMetaType) {
- TypeTableEntry *type_entry = pointee->data.x_type;
- if (type_entry->id == TypeTableEntryIdUnreachable) {
- ir_add_error(ira, instruction, buf_sprintf("pointer to noreturn not allowed"));
- return ira->codegen->invalid_instruction;
- }
-
- IrInstruction *const_instr = ir_get_const(ira, instruction);
- ConstExprValue *const_val = &const_instr->value;
- const_val->type = pointee_type;
- type_ensure_zero_bits_known(ira->codegen, type_entry);
- if (type_is_invalid(type_entry)) {
- return ira->codegen->invalid_instruction;
- }
- const_val->data.x_type = get_pointer_to_type_extra(ira->codegen, type_entry,
- ptr_is_const, ptr_is_volatile, get_abi_alignment(ira->codegen, type_entry), 0, 0);
- return const_instr;
- } else {
- TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, pointee_type,
- ptr_is_const, ptr_is_volatile, ptr_align, 0, 0);
- IrInstruction *const_instr = ir_get_const(ira, instruction);
- ConstExprValue *const_val = &const_instr->value;
- const_val->type = ptr_type;
- const_val->data.x_ptr.special = ConstPtrSpecialRef;
- const_val->data.x_ptr.mut = ptr_mut;
- const_val->data.x_ptr.data.ref.pointee = pointee;
- return const_instr;
- }
+ TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, pointee_type,
+ ptr_is_const, ptr_is_volatile, PtrLenSingle, ptr_align, 0, 0);
+ IrInstruction *const_instr = ir_get_const(ira, instruction);
+ ConstExprValue *const_val = &const_instr->value;
+ const_val->type = ptr_type;
+ const_val->data.x_ptr.special = ConstPtrSpecialRef;
+ const_val->data.x_ptr.mut = ptr_mut;
+ const_val->data.x_ptr.data.ref.pointee = pointee;
+ return const_instr;
}
static TypeTableEntry *ir_analyze_const_ptr(IrAnalyze *ira, IrInstruction *instruction,
@@ -8757,6 +9628,9 @@ static ConstExprValue *ir_resolve_const(IrAnalyze *ira, IrInstruction *value, Un
case ConstValSpecialStatic:
return &value->value;
case ConstValSpecialRuntime:
+ if (!type_has_bits(value->value.type)) {
+ return &value->value;
+ }
ir_add_error(ira, value, buf_sprintf("unable to evaluate constant expression"));
return nullptr;
case ConstValSpecialUndef:
@@ -8861,7 +9735,7 @@ static FnTableEntry *ir_resolve_fn(IrAnalyze *ira, IrInstruction *fn_value) {
}
static IrInstruction *ir_analyze_maybe_wrap(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value, TypeTableEntry *wanted_type) {
- assert(wanted_type->id == TypeTableEntryIdMaybe);
+ assert(wanted_type->id == TypeTableEntryIdOptional);
if (instr_is_comptime(value)) {
TypeTableEntry *payload_type = wanted_type->data.maybe.child_type;
@@ -8869,21 +9743,25 @@ static IrInstruction *ir_analyze_maybe_wrap(IrAnalyze *ira, IrInstruction *sourc
if (type_is_invalid(casted_payload->value.type))
return ira->codegen->invalid_instruction;
- ConstExprValue *val = ir_resolve_const(ira, casted_payload, UndefBad);
+ ConstExprValue *val = ir_resolve_const(ira, casted_payload, UndefOk);
if (!val)
return ira->codegen->invalid_instruction;
IrInstructionConst *const_instruction = ir_create_instruction<IrInstructionConst>(&ira->new_irb,
source_instr->scope, source_instr->source_node);
- const_instruction->base.value.type = wanted_type;
const_instruction->base.value.special = ConstValSpecialStatic;
- const_instruction->base.value.data.x_maybe = val;
+ if (get_codegen_ptr_type(wanted_type) != nullptr) {
+ copy_const_val(&const_instruction->base.value, val, val->data.x_ptr.mut == ConstPtrMutComptimeConst);
+ } else {
+ const_instruction->base.value.data.x_optional = val;
+ }
+ const_instruction->base.value.type = wanted_type;
return &const_instruction->base;
}
IrInstruction *result = ir_build_maybe_wrap(&ira->new_irb, source_instr->scope, source_instr->source_node, value);
result->value.type = wanted_type;
- result->value.data.rh_maybe = RuntimeHintMaybeNonNull;
+ result->value.data.rh_maybe = RuntimeHintOptionalNonNull;
ir_add_alloca(ira, result, wanted_type);
return result;
}
@@ -8930,7 +9808,7 @@ static IrInstruction *ir_analyze_err_set_cast(IrAnalyze *ira, IrInstruction *sou
if (!val)
return ira->codegen->invalid_instruction;
- if (!resolve_inferred_error_set(ira, wanted_type, source_instr->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, wanted_type, source_instr->source_node)) {
return ira->codegen->invalid_instruction;
}
if (!type_is_global_error_set(wanted_type)) {
@@ -9025,16 +9903,21 @@ static IrInstruction *ir_analyze_cast_ref(IrAnalyze *ira, IrInstruction *source_
}
static IrInstruction *ir_analyze_null_to_maybe(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value, TypeTableEntry *wanted_type) {
- assert(wanted_type->id == TypeTableEntryIdMaybe);
+ assert(wanted_type->id == TypeTableEntryIdOptional);
assert(instr_is_comptime(value));
ConstExprValue *val = ir_resolve_const(ira, value, UndefBad);
assert(val);
IrInstructionConst *const_instruction = ir_create_instruction<IrInstructionConst>(&ira->new_irb, source_instr->scope, source_instr->source_node);
- const_instruction->base.value.type = wanted_type;
const_instruction->base.value.special = ConstValSpecialStatic;
- const_instruction->base.value.data.x_maybe = nullptr;
+ if (get_codegen_ptr_type(wanted_type) != nullptr) {
+ const_instruction->base.value.data.x_ptr.special = ConstPtrSpecialHardCodedAddr;
+ const_instruction->base.value.data.x_ptr.data.hard_coded_addr.addr = 0;
+ } else {
+ const_instruction->base.value.data.x_optional = nullptr;
+ }
+ const_instruction->base.value.type = wanted_type;
return &const_instruction->base;
}
@@ -9044,25 +9927,17 @@ static IrInstruction *ir_get_ref(IrAnalyze *ira, IrInstruction *source_instructi
if (type_is_invalid(value->value.type))
return ira->codegen->invalid_instruction;
- if (value->id == IrInstructionIdLoadPtr) {
- IrInstructionLoadPtr *load_ptr_inst = (IrInstructionLoadPtr *) value;
- if (load_ptr_inst->ptr->value.type->data.pointer.is_const) {
- return load_ptr_inst->ptr;
- }
- }
-
if (instr_is_comptime(value)) {
ConstExprValue *val = ir_resolve_const(ira, value, UndefOk);
if (!val)
return ira->codegen->invalid_instruction;
- bool final_is_const = (value->value.type->id == TypeTableEntryIdMetaType) ? is_const : true;
return ir_get_const_ptr(ira, source_instruction, val, value->value.type,
- ConstPtrMutComptimeConst, final_is_const, is_volatile,
+ ConstPtrMutComptimeConst, is_const, is_volatile,
get_abi_alignment(ira->codegen, value->value.type));
}
TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, value->value.type,
- is_const, is_volatile, get_abi_alignment(ira->codegen, value->value.type), 0, 0);
+ is_const, is_volatile, PtrLenSingle, get_abi_alignment(ira->codegen, value->value.type), 0, 0);
IrInstruction *new_instruction = ir_build_ref(&ira->new_irb, source_instruction->scope,
source_instruction->source_node, value, is_const, is_volatile);
new_instruction->value.type = ptr_type;
@@ -9114,6 +9989,8 @@ static IrInstruction *ir_analyze_array_to_slice(IrAnalyze *ira, IrInstruction *s
IrInstruction *result = ir_build_slice(&ira->new_irb, source_instr->scope,
source_instr->source_node, array_ptr, start, end, false);
result->value.type = wanted_type;
+ result->value.data.rh_slice.id = RuntimeHintSliceIdLen;
+ result->value.data.rh_slice.len = array_type->data.array.len;
ir_add_alloca(ira, result, result->value.type);
return result;
@@ -9202,6 +10079,8 @@ static IrInstruction *ir_analyze_enum_to_union(IrAnalyze *ira, IrInstruction *so
TypeUnionField *union_field = find_union_field_by_tag(wanted_type, &val->data.x_enum_tag);
assert(union_field != nullptr);
type_ensure_zero_bits_known(ira->codegen, union_field->type_entry);
+ if (type_is_invalid(union_field->type_entry))
+ return ira->codegen->invalid_instruction;
if (!union_field->type_entry->zero_bits) {
AstNode *field_node = wanted_type->data.unionation.decl_node->data.container_decl.fields.at(
union_field->enum_field->decl_index);
@@ -9332,7 +10211,7 @@ static IrInstruction *ir_analyze_int_to_enum(IrAnalyze *ira, IrInstruction *sour
}
IrInstruction *result = ir_build_int_to_enum(&ira->new_irb, source_instr->scope,
- source_instr->source_node, target);
+ source_instr->source_node, nullptr, target);
result->value.type = wanted_type;
return result;
}
@@ -9346,9 +10225,9 @@ static IrInstruction *ir_analyze_number_to_literal(IrAnalyze *ira, IrInstruction
IrInstruction *result = ir_create_const(&ira->new_irb, source_instr->scope,
source_instr->source_node, wanted_type);
- if (wanted_type->id == TypeTableEntryIdNumLitFloat) {
+ if (wanted_type->id == TypeTableEntryIdComptimeFloat) {
float_init_float(&result->value, val);
- } else if (wanted_type->id == TypeTableEntryIdNumLitInt) {
+ } else if (wanted_type->id == TypeTableEntryIdComptimeInt) {
bigint_init_bigint(&result->value.data.x_bigint, &val->data.x_bigint);
} else {
zig_unreachable();
@@ -9371,7 +10250,7 @@ static IrInstruction *ir_analyze_int_to_err(IrAnalyze *ira, IrInstruction *sourc
IrInstruction *result = ir_create_const(&ira->new_irb, source_instr->scope,
source_instr->source_node, wanted_type);
- if (!resolve_inferred_error_set(ira, wanted_type, source_instr->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, wanted_type, source_instr->source_node)) {
return ira->codegen->invalid_instruction;
}
@@ -9469,7 +10348,7 @@ static IrInstruction *ir_analyze_err_to_int(IrAnalyze *ira, IrInstruction *sourc
zig_unreachable();
}
if (!type_is_global_error_set(err_set_type)) {
- if (!resolve_inferred_error_set(ira, err_set_type, source_instr->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, err_set_type, source_instr->source_node)) {
return ira->codegen->invalid_instruction;
}
if (err_set_type->data.error_set.err_count == 0) {
@@ -9501,6 +10380,140 @@ static IrInstruction *ir_analyze_err_to_int(IrAnalyze *ira, IrInstruction *sourc
return result;
}
+static IrInstruction *ir_analyze_ptr_to_array(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *target,
+ TypeTableEntry *wanted_type)
+{
+ assert(wanted_type->id == TypeTableEntryIdPointer);
+ wanted_type = adjust_ptr_align(ira->codegen, wanted_type, target->value.type->data.pointer.alignment);
+ TypeTableEntry *array_type = wanted_type->data.pointer.child_type;
+ assert(array_type->id == TypeTableEntryIdArray);
+ assert(array_type->data.array.len == 1);
+
+ if (instr_is_comptime(target)) {
+ ConstExprValue *val = ir_resolve_const(ira, target, UndefBad);
+ if (!val)
+ return ira->codegen->invalid_instruction;
+
+ assert(val->type->id == TypeTableEntryIdPointer);
+ ConstExprValue *pointee = const_ptr_pointee(ira->codegen, val);
+ if (pointee->special != ConstValSpecialRuntime) {
+ ConstExprValue *array_val = create_const_vals(1);
+ array_val->special = ConstValSpecialStatic;
+ array_val->type = array_type;
+ array_val->data.x_array.special = ConstArraySpecialNone;
+ array_val->data.x_array.s_none.elements = pointee;
+ array_val->data.x_array.s_none.parent.id = ConstParentIdScalar;
+ array_val->data.x_array.s_none.parent.data.p_scalar.scalar_val = pointee;
+
+ IrInstructionConst *const_instruction = ir_create_instruction<IrInstructionConst>(&ira->new_irb,
+ source_instr->scope, source_instr->source_node);
+ const_instruction->base.value.type = wanted_type;
+ const_instruction->base.value.special = ConstValSpecialStatic;
+ const_instruction->base.value.data.x_ptr.special = ConstPtrSpecialRef;
+ const_instruction->base.value.data.x_ptr.data.ref.pointee = array_val;
+ const_instruction->base.value.data.x_ptr.mut = val->data.x_ptr.mut;
+ return &const_instruction->base;
+ }
+ }
+
+ // pointer to array and pointer to single item are represented the same way at runtime
+ IrInstruction *result = ir_build_cast(&ira->new_irb, target->scope, target->source_node,
+ wanted_type, target, CastOpBitCast);
+ result->value.type = wanted_type;
+ return result;
+}
+
+static void report_recursive_error(IrAnalyze *ira, AstNode *source_node, ConstCastOnly *cast_result,
+ ErrorMsg *parent_msg)
+{
+ switch (cast_result->id) {
+ case ConstCastResultIdOk:
+ zig_unreachable();
+ case ConstCastResultIdOptionalChild: {
+ ErrorMsg *msg = add_error_note(ira->codegen, parent_msg, source_node,
+ buf_sprintf("optional type child '%s' cannot cast into optional type child '%s'",
+ buf_ptr(&cast_result->data.optional->actual_child->name),
+ buf_ptr(&cast_result->data.optional->wanted_child->name)));
+ report_recursive_error(ira, source_node, &cast_result->data.optional->child, msg);
+ break;
+ }
+ case ConstCastResultIdErrorUnionErrorSet: {
+ ErrorMsg *msg = add_error_note(ira->codegen, parent_msg, source_node,
+ buf_sprintf("error set '%s' cannot cast into error set '%s'",
+ buf_ptr(&cast_result->data.error_union_error_set->actual_err_set->name),
+ buf_ptr(&cast_result->data.error_union_error_set->wanted_err_set->name)));
+ report_recursive_error(ira, source_node, &cast_result->data.error_union_error_set->child, msg);
+ break;
+ }
+ case ConstCastResultIdErrSet: {
+ ZigList<ErrorTableEntry *> *missing_errors = &cast_result->data.error_set_mismatch->missing_errors;
+ for (size_t i = 0; i < missing_errors->length; i += 1) {
+ ErrorTableEntry *error_entry = missing_errors->at(i);
+ add_error_note(ira->codegen, parent_msg, error_entry->decl_node,
+ buf_sprintf("'error.%s' not a member of destination error set", buf_ptr(&error_entry->name)));
+ }
+ break;
+ }
+ case ConstCastResultIdErrSetGlobal: {
+ add_error_note(ira->codegen, parent_msg, source_node,
+ buf_sprintf("cannot cast global error set into smaller set"));
+ break;
+ }
+ case ConstCastResultIdPointerChild: {
+ ErrorMsg *msg = add_error_note(ira->codegen, parent_msg, source_node,
+ buf_sprintf("pointer type child '%s' cannot cast into pointer type child '%s'",
+ buf_ptr(&cast_result->data.pointer_mismatch->actual_child->name),
+ buf_ptr(&cast_result->data.pointer_mismatch->wanted_child->name)));
+ report_recursive_error(ira, source_node, &cast_result->data.pointer_mismatch->child, msg);
+ break;
+ }
+ case ConstCastResultIdSliceChild: {
+ ErrorMsg *msg = add_error_note(ira->codegen, parent_msg, source_node,
+ buf_sprintf("slice type child '%s' cannot cast into slice type child '%s'",
+ buf_ptr(&cast_result->data.slice_mismatch->actual_child->name),
+ buf_ptr(&cast_result->data.slice_mismatch->wanted_child->name)));
+ report_recursive_error(ira, source_node, &cast_result->data.slice_mismatch->child, msg);
+ break;
+ }
+ case ConstCastResultIdErrorUnionPayload: {
+ ErrorMsg *msg = add_error_note(ira->codegen, parent_msg, source_node,
+ buf_sprintf("error union payload '%s' cannot cast into error union payload '%s'",
+ buf_ptr(&cast_result->data.error_union_payload->actual_payload->name),
+ buf_ptr(&cast_result->data.error_union_payload->wanted_payload->name)));
+ report_recursive_error(ira, source_node, &cast_result->data.error_union_payload->child, msg);
+ break;
+ }
+ case ConstCastResultIdType: {
+ AstNode *wanted_decl_node = type_decl_node(cast_result->data.type_mismatch->wanted_type);
+ AstNode *actual_decl_node = type_decl_node(cast_result->data.type_mismatch->actual_type);
+ if (wanted_decl_node != nullptr) {
+ add_error_note(ira->codegen, parent_msg, wanted_decl_node,
+ buf_sprintf("%s declared here",
+ buf_ptr(&cast_result->data.type_mismatch->wanted_type->name)));
+ }
+ if (actual_decl_node != nullptr) {
+ add_error_note(ira->codegen, parent_msg, actual_decl_node,
+ buf_sprintf("%s declared here",
+ buf_ptr(&cast_result->data.type_mismatch->actual_type->name)));
+ }
+ break;
+ }
+ case ConstCastResultIdFnAlign: // TODO
+ case ConstCastResultIdFnCC: // TODO
+ case ConstCastResultIdFnVarArgs: // TODO
+ case ConstCastResultIdFnIsGeneric: // TODO
+ case ConstCastResultIdFnReturnType: // TODO
+ case ConstCastResultIdFnArgCount: // TODO
+ case ConstCastResultIdFnGenericArgCount: // TODO
+ case ConstCastResultIdFnArg: // TODO
+ case ConstCastResultIdFnArgNoAlias: // TODO
+ case ConstCastResultIdUnresolvedInferredErrSet: // TODO
+ case ConstCastResultIdAsyncAllocatorType: // TODO
+ case ConstCastResultIdNullWrapPtr: // TODO
+ break;
+ }
+}
+
static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_instr,
TypeTableEntry *wanted_type, IrInstruction *value)
{
@@ -9511,60 +10524,52 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
return ira->codegen->invalid_instruction;
}
- // explicit match or non-const to const
- if (types_match_const_cast_only(ira, wanted_type, actual_type, source_node).id == ConstCastResultIdOk) {
+ // perfect match or non-const to const
+ ConstCastOnly const_cast_result = types_match_const_cast_only(ira, wanted_type, actual_type,
+ source_node, false);
+ if (const_cast_result.id == ConstCastResultIdOk) {
return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpNoop, false);
}
- // explicit cast from bool to int
+ // widening conversion
if (wanted_type->id == TypeTableEntryIdInt &&
- actual_type->id == TypeTableEntryIdBool)
- {
- return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpBoolToInt, false);
- }
-
- // explicit widening or shortening cast
- if ((wanted_type->id == TypeTableEntryIdInt &&
- actual_type->id == TypeTableEntryIdInt) ||
- (wanted_type->id == TypeTableEntryIdFloat &&
- actual_type->id == TypeTableEntryIdFloat))
+ actual_type->id == TypeTableEntryIdInt &&
+ wanted_type->data.integral.is_signed == actual_type->data.integral.is_signed &&
+ wanted_type->data.integral.bit_count >= actual_type->data.integral.bit_count)
{
return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
}
- // explicit error set cast
- if (wanted_type->id == TypeTableEntryIdErrorSet &&
- actual_type->id == TypeTableEntryIdErrorSet)
+ // small enough unsigned ints can get casted to large enough signed ints
+ if (wanted_type->id == TypeTableEntryIdInt && wanted_type->data.integral.is_signed &&
+ actual_type->id == TypeTableEntryIdInt && !actual_type->data.integral.is_signed &&
+ wanted_type->data.integral.bit_count > actual_type->data.integral.bit_count)
{
- return ir_analyze_err_set_cast(ira, source_instr, value, wanted_type);
+ return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
}
- // explicit cast from int to float
+ // float widening conversion
if (wanted_type->id == TypeTableEntryIdFloat &&
- actual_type->id == TypeTableEntryIdInt)
+ actual_type->id == TypeTableEntryIdFloat &&
+ wanted_type->data.floating.bit_count >= actual_type->data.floating.bit_count)
{
- return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpIntToFloat, false);
+ return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
}
- // explicit cast from float to int
- if (wanted_type->id == TypeTableEntryIdInt &&
- actual_type->id == TypeTableEntryIdFloat)
- {
- return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpFloatToInt, false);
- }
- // explicit cast from [N]T to []const T
+ // cast from [N]T to []const T
if (is_slice(wanted_type) && actual_type->id == TypeTableEntryIdArray) {
TypeTableEntry *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
assert(ptr_type->id == TypeTableEntryIdPointer);
if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
+ source_node, false).id == ConstCastResultIdOk)
{
return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
}
}
- // explicit cast from &const [N]T to []const T
+ // cast from *const [N]T to []const T
if (is_slice(wanted_type) &&
actual_type->id == TypeTableEntryIdPointer &&
actual_type->data.pointer.is_const &&
@@ -9576,13 +10581,14 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
TypeTableEntry *array_type = actual_type->data.pointer.child_type;
if ((ptr_type->data.pointer.is_const || array_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, array_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, array_type->data.array.child_type,
+ source_node, false).id == ConstCastResultIdOk)
{
return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
}
}
- // explicit cast from [N]T to &const []const N
+ // cast from [N]T to *const []const T
if (wanted_type->id == TypeTableEntryIdPointer &&
wanted_type->data.pointer.is_const &&
is_slice(wanted_type->data.pointer.child_type) &&
@@ -9592,7 +10598,8 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
wanted_type->data.pointer.child_type->data.structure.fields[slice_ptr_index].type_entry;
assert(ptr_type->id == TypeTableEntryIdPointer);
if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
+ source_node, false).id == ConstCastResultIdOk)
{
IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.pointer.child_type, value);
if (type_is_invalid(cast1->value.type))
@@ -9606,8 +10613,8 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from [N]T to ?[]const N
- if (wanted_type->id == TypeTableEntryIdMaybe &&
+ // cast from [N]T to ?[]const T
+ if (wanted_type->id == TypeTableEntryIdOptional &&
is_slice(wanted_type->data.maybe.child_type) &&
actual_type->id == TypeTableEntryIdArray)
{
@@ -9615,7 +10622,8 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
wanted_type->data.maybe.child_type->data.structure.fields[slice_ptr_index].type_entry;
assert(ptr_type->id == TypeTableEntryIdPointer);
if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
+ source_node, false).id == ConstCastResultIdOk)
{
IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.maybe.child_type, value);
if (type_is_invalid(cast1->value.type))
@@ -9629,59 +10637,47 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from []T to []u8 or []u8 to []T
- if (is_slice(wanted_type) && is_slice(actual_type)) {
- TypeTableEntry *wanted_ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
- TypeTableEntry *actual_ptr_type = actual_type->data.structure.fields[slice_ptr_index].type_entry;
- if ((is_u8(wanted_ptr_type->data.pointer.child_type) || is_u8(actual_ptr_type->data.pointer.child_type)) &&
- (wanted_ptr_type->data.pointer.is_const || !actual_ptr_type->data.pointer.is_const))
- {
- uint32_t src_align_bytes = get_ptr_align(actual_ptr_type);
- uint32_t dest_align_bytes = get_ptr_align(wanted_ptr_type);
-
- if (dest_align_bytes > src_align_bytes) {
- ErrorMsg *msg = ir_add_error(ira, source_instr,
- buf_sprintf("cast increases pointer alignment"));
- add_error_note(ira->codegen, msg, source_instr->source_node,
- buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&actual_type->name), src_align_bytes));
- add_error_note(ira->codegen, msg, source_instr->source_node,
- buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&wanted_type->name), dest_align_bytes));
- return ira->codegen->invalid_instruction;
- }
-
- if (!ir_emit_global_runtime_side_effect(ira, source_instr))
- return ira->codegen->invalid_instruction;
- return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpResizeSlice, true);
- }
+ // *[N]T to [*]T
+ if (wanted_type->id == TypeTableEntryIdPointer &&
+ wanted_type->data.pointer.ptr_len == PtrLenUnknown &&
+ actual_type->id == TypeTableEntryIdPointer &&
+ actual_type->data.pointer.ptr_len == PtrLenSingle &&
+ actual_type->data.pointer.child_type->id == TypeTableEntryIdArray &&
+ actual_type->data.pointer.alignment >= wanted_type->data.pointer.alignment &&
+ types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
+ actual_type->data.pointer.child_type->data.array.child_type, source_node,
+ !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
+ {
+ return ir_resolve_ptr_of_array_to_unknown_len_ptr(ira, source_instr, value, wanted_type);
}
- // explicit cast from [N]u8 to []const T
+ // *[N]T to []T
if (is_slice(wanted_type) &&
- wanted_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.is_const &&
- actual_type->id == TypeTableEntryIdArray &&
- is_u8(actual_type->data.array.child_type))
+ actual_type->id == TypeTableEntryIdPointer &&
+ actual_type->data.pointer.ptr_len == PtrLenSingle &&
+ actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
{
- if (!ir_emit_global_runtime_side_effect(ira, source_instr))
- return ira->codegen->invalid_instruction;
- uint64_t child_type_size = type_size(ira->codegen,
- wanted_type->data.structure.fields[slice_ptr_index].type_entry->data.pointer.child_type);
- if (actual_type->data.array.len % child_type_size == 0) {
- return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpBytesToSlice, true);
- } else {
- ir_add_error_node(ira, source_instr->source_node,
- buf_sprintf("unable to convert %s to %s: size mismatch",
- buf_ptr(&actual_type->name), buf_ptr(&wanted_type->name)));
- return ira->codegen->invalid_instruction;
+ TypeTableEntry *slice_ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
+ assert(slice_ptr_type->id == TypeTableEntryIdPointer);
+ if (types_match_const_cast_only(ira, slice_ptr_type->data.pointer.child_type,
+ actual_type->data.pointer.child_type->data.array.child_type, source_node,
+ !slice_ptr_type->data.pointer.is_const).id == ConstCastResultIdOk)
+ {
+ return ir_resolve_ptr_of_array_to_slice(ira, source_instr, value, wanted_type);
}
}
- // explicit cast from child type of maybe type to maybe type
- if (wanted_type->id == TypeTableEntryIdMaybe) {
+
+ // cast from T to ?T
+ // note that the *T to ?*T case is handled via the "ConstCastOnly" mechanism
+ if (wanted_type->id == TypeTableEntryIdOptional) {
TypeTableEntry *wanted_child_type = wanted_type->data.maybe.child_type;
- if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node).id == ConstCastResultIdOk) {
+ if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node,
+ false).id == ConstCastResultIdOk)
+ {
return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
- } else if (actual_type->id == TypeTableEntryIdNumLitInt ||
- actual_type->id == TypeTableEntryIdNumLitFloat)
+ } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
+ actual_type->id == TypeTableEntryIdComptimeFloat)
{
if (ir_num_lit_fits_in_other_type(ira, value, wanted_child_type, true)) {
return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
@@ -9704,19 +10700,21 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from null literal to maybe type
- if (wanted_type->id == TypeTableEntryIdMaybe &&
- actual_type->id == TypeTableEntryIdNullLit)
+ // cast from null literal to maybe type
+ if (wanted_type->id == TypeTableEntryIdOptional &&
+ actual_type->id == TypeTableEntryIdNull)
{
return ir_analyze_null_to_maybe(ira, source_instr, value, wanted_type);
}
- // explicit cast from child type of error type to error type
+ // cast from child type of error type to error type
if (wanted_type->id == TypeTableEntryIdErrorUnion) {
- if (types_match_const_cast_only(ira, wanted_type->data.error_union.payload_type, actual_type, source_node).id == ConstCastResultIdOk) {
+ if (types_match_const_cast_only(ira, wanted_type->data.error_union.payload_type, actual_type,
+ source_node, false).id == ConstCastResultIdOk)
+ {
return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
- } else if (actual_type->id == TypeTableEntryIdNumLitInt ||
- actual_type->id == TypeTableEntryIdNumLitFloat)
+ } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
+ actual_type->id == TypeTableEntryIdComptimeFloat)
{
if (ir_num_lit_fits_in_other_type(ira, value, wanted_type->data.error_union.payload_type, true)) {
return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
@@ -9726,7 +10724,7 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from [N]T to %[]const T
+ // cast from [N]T to E![]const T
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
is_slice(wanted_type->data.error_union.payload_type) &&
actual_type->id == TypeTableEntryIdArray)
@@ -9735,7 +10733,8 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
wanted_type->data.error_union.payload_type->data.structure.fields[slice_ptr_index].type_entry;
assert(ptr_type->id == TypeTableEntryIdPointer);
if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
- types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type, source_node).id == ConstCastResultIdOk)
+ types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
+ source_node, false).id == ConstCastResultIdOk)
{
IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
if (type_is_invalid(cast1->value.type))
@@ -9749,23 +10748,23 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from error set to error union type
+ // cast from error set to error union type
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
actual_type->id == TypeTableEntryIdErrorSet)
{
return ir_analyze_err_wrap_code(ira, source_instr, value, wanted_type);
}
- // explicit cast from T to %?T
+ // cast from T to E!?T
if (wanted_type->id == TypeTableEntryIdErrorUnion &&
- wanted_type->data.error_union.payload_type->id == TypeTableEntryIdMaybe &&
- actual_type->id != TypeTableEntryIdMaybe)
+ wanted_type->data.error_union.payload_type->id == TypeTableEntryIdOptional &&
+ actual_type->id != TypeTableEntryIdOptional)
{
TypeTableEntry *wanted_child_type = wanted_type->data.error_union.payload_type->data.maybe.child_type;
- if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node).id == ConstCastResultIdOk ||
- actual_type->id == TypeTableEntryIdNullLit ||
- actual_type->id == TypeTableEntryIdNumLitInt ||
- actual_type->id == TypeTableEntryIdNumLitFloat)
+ if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node, false).id == ConstCastResultIdOk ||
+ actual_type->id == TypeTableEntryIdNull ||
+ actual_type->id == TypeTableEntryIdComptimeInt ||
+ actual_type->id == TypeTableEntryIdComptimeFloat)
{
IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
if (type_is_invalid(cast1->value.type))
@@ -9779,11 +10778,14 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from number literal to another type
- // explicit cast from number literal to &const integer
- if (actual_type->id == TypeTableEntryIdNumLitFloat ||
- actual_type->id == TypeTableEntryIdNumLitInt)
+ // cast from number literal to another type
+ // cast from number literal to *const integer
+ if (actual_type->id == TypeTableEntryIdComptimeFloat ||
+ actual_type->id == TypeTableEntryIdComptimeInt)
{
+ ensure_complete_type(ira->codegen, wanted_type);
+ if (type_is_invalid(wanted_type))
+ return ira->codegen->invalid_instruction;
if (wanted_type->id == TypeTableEntryIdEnum) {
IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.enumeration.tag_int_type, value);
if (type_is_invalid(cast1->value.type))
@@ -9808,9 +10810,9 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
return cast2;
} else if (ir_num_lit_fits_in_other_type(ira, value, wanted_type, true)) {
CastOp op;
- if ((actual_type->id == TypeTableEntryIdNumLitFloat &&
+ if ((actual_type->id == TypeTableEntryIdComptimeFloat &&
wanted_type->id == TypeTableEntryIdFloat) ||
- (actual_type->id == TypeTableEntryIdNumLitInt &&
+ (actual_type->id == TypeTableEntryIdComptimeInt &&
wanted_type->id == TypeTableEntryIdInt))
{
op = CastOpNumLitToConcrete;
@@ -9827,41 +10829,16 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from typed number to integer or float literal.
+ // cast from typed number to integer or float literal.
// works when the number is known at compile time
if (instr_is_comptime(value) &&
- ((actual_type->id == TypeTableEntryIdInt && wanted_type->id == TypeTableEntryIdNumLitInt) ||
- (actual_type->id == TypeTableEntryIdFloat && wanted_type->id == TypeTableEntryIdNumLitFloat)))
+ ((actual_type->id == TypeTableEntryIdInt && wanted_type->id == TypeTableEntryIdComptimeInt) ||
+ (actual_type->id == TypeTableEntryIdFloat && wanted_type->id == TypeTableEntryIdComptimeFloat)))
{
return ir_analyze_number_to_literal(ira, source_instr, value, wanted_type);
}
- // explicit cast from T!void to integer type which can fit it
- bool actual_type_is_void_err = actual_type->id == TypeTableEntryIdErrorUnion &&
- !type_has_bits(actual_type->data.error_union.payload_type);
- bool actual_type_is_err_set = actual_type->id == TypeTableEntryIdErrorSet;
- if ((actual_type_is_void_err || actual_type_is_err_set) && wanted_type->id == TypeTableEntryIdInt) {
- return ir_analyze_err_to_int(ira, source_instr, value, wanted_type);
- }
-
- // explicit cast from integer to error set
- if (wanted_type->id == TypeTableEntryIdErrorSet && actual_type->id == TypeTableEntryIdInt &&
- !actual_type->data.integral.is_signed)
- {
- return ir_analyze_int_to_err(ira, source_instr, value, wanted_type);
- }
-
- // explicit cast from integer to enum type with no payload
- if (actual_type->id == TypeTableEntryIdInt && wanted_type->id == TypeTableEntryIdEnum) {
- return ir_analyze_int_to_enum(ira, source_instr, value, wanted_type);
- }
-
- // explicit cast from enum type with no payload to integer
- if (wanted_type->id == TypeTableEntryIdInt && actual_type->id == TypeTableEntryIdEnum) {
- return ir_analyze_enum_to_int(ira, source_instr, value, wanted_type);
- }
-
- // explicit cast from union to the enum type of the union
+ // cast from union to the enum type of the union
if (actual_type->id == TypeTableEntryIdUnion && wanted_type->id == TypeTableEntryIdEnum) {
type_ensure_zero_bits_known(ira->codegen, actual_type);
if (type_is_invalid(actual_type))
@@ -9872,7 +10849,7 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit enum to union which has the enum as the tag type
+ // enum to union which has the enum as the tag type
if (wanted_type->id == TypeTableEntryIdUnion && actual_type->id == TypeTableEntryIdEnum &&
(wanted_type->data.unionation.decl_node->data.container_decl.auto_enum ||
wanted_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr))
@@ -9883,7 +10860,7 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit enum to &const union which has the enum as the tag type
+ // enum to &const union which has the enum as the tag type
if (actual_type->id == TypeTableEntryIdEnum && wanted_type->id == TypeTableEntryIdPointer) {
TypeTableEntry *union_type = wanted_type->data.pointer.child_type;
if (union_type->data.unionation.decl_node->data.container_decl.auto_enum ||
@@ -9904,23 +10881,63 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
}
}
- // explicit cast from undefined to anything
- if (actual_type->id == TypeTableEntryIdUndefLit) {
+ // cast from *T to *[1]T
+ if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
+ actual_type->id == TypeTableEntryIdPointer && actual_type->data.pointer.ptr_len == PtrLenSingle)
+ {
+ TypeTableEntry *array_type = wanted_type->data.pointer.child_type;
+ if (array_type->id == TypeTableEntryIdArray && array_type->data.array.len == 1 &&
+ types_match_const_cast_only(ira, array_type->data.array.child_type,
+ actual_type->data.pointer.child_type, source_node,
+ !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
+ {
+ if (wanted_type->data.pointer.alignment > actual_type->data.pointer.alignment) {
+ ErrorMsg *msg = ir_add_error(ira, source_instr, buf_sprintf("cast increases pointer alignment"));
+ add_error_note(ira->codegen, msg, value->source_node,
+ buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&actual_type->name),
+ actual_type->data.pointer.alignment));
+ add_error_note(ira->codegen, msg, source_instr->source_node,
+ buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&wanted_type->name),
+ wanted_type->data.pointer.alignment));
+ return ira->codegen->invalid_instruction;
+ }
+ return ir_analyze_ptr_to_array(ira, source_instr, value, wanted_type);
+ }
+ }
+
+ // cast from T to *T where T is zero bits
+ if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
+ types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
+ actual_type, source_node, !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
+ {
+ type_ensure_zero_bits_known(ira->codegen, actual_type);
+ if (type_is_invalid(actual_type)) {
+ return ira->codegen->invalid_instruction;
+ }
+ if (!type_has_bits(actual_type)) {
+ return ir_get_ref(ira, source_instr, value, false, false);
+ }
+ }
+
+
+ // cast from undefined to anything
+ if (actual_type->id == TypeTableEntryIdUndefined) {
return ir_analyze_undefined_to_anything(ira, source_instr, value, wanted_type);
}
- // explicit cast from something to const pointer of it
+ // cast from something to const pointer of it
if (!type_requires_comptime(actual_type)) {
TypeTableEntry *const_ptr_actual = get_pointer_to_type(ira->codegen, actual_type, true);
- if (types_match_const_cast_only(ira, wanted_type, const_ptr_actual, source_node).id == ConstCastResultIdOk) {
+ if (types_match_const_cast_only(ira, wanted_type, const_ptr_actual, source_node, false).id == ConstCastResultIdOk) {
return ir_analyze_cast_ref(ira, source_instr, value, wanted_type);
}
}
- ir_add_error_node(ira, source_instr->source_node,
- buf_sprintf("invalid cast from type '%s' to '%s'",
- buf_ptr(&actual_type->name),
- buf_ptr(&wanted_type->name)));
+ ErrorMsg *parent_msg = ir_add_error_node(ira, source_instr->source_node,
+ buf_sprintf("expected type '%s', found '%s'",
+ buf_ptr(&wanted_type->name),
+ buf_ptr(&actual_type->name)));
+ report_recursive_error(ira, source_instr->source_node, &const_cast_result, parent_msg);
return ira->codegen->invalid_instruction;
}
@@ -9937,29 +10954,7 @@ static IrInstruction *ir_implicit_cast(IrAnalyze *ira, IrInstruction *value, Typ
if (value->value.type->id == TypeTableEntryIdUnreachable)
return value;
- ImplicitCastMatchResult result = ir_types_match_with_implicit_cast(ira, expected_type, value->value.type, value);
- switch (result) {
- case ImplicitCastMatchResultNo:
- ir_add_error(ira, value,
- buf_sprintf("expected type '%s', found '%s'",
- buf_ptr(&expected_type->name),
- buf_ptr(&value->value.type->name)));
- return ira->codegen->invalid_instruction;
-
- case ImplicitCastMatchResultYes:
- return ir_analyze_cast(ira, value, expected_type, value);
- case ImplicitCastMatchResultReportedError:
- return ira->codegen->invalid_instruction;
- }
-
- zig_unreachable();
-}
-
-static IrInstruction *ir_implicit_byval_const_ref_cast(IrAnalyze *ira, IrInstruction *inst) {
- if (type_is_copyable(ira->codegen, inst->value.type))
- return inst;
- TypeTableEntry *const_ref_type = get_pointer_to_type(ira->codegen, inst->value.type, true);
- return ir_implicit_cast(ira, inst, const_ref_type);
+ return ir_analyze_cast(ira, value, expected_type, value);
}
static IrInstruction *ir_get_deref(IrAnalyze *ira, IrInstruction *source_instruction, IrInstruction *ptr) {
@@ -9977,6 +10972,7 @@ static IrInstruction *ir_get_deref(IrAnalyze *ira, IrInstruction *source_instruc
IrInstruction *result = ir_create_const(&ira->new_irb, source_instruction->scope,
source_instruction->source_node, child_type);
copy_const_val(&result->value, pointee, ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst);
+ result->value.type = child_type;
return result;
}
}
@@ -9986,21 +10982,6 @@ static IrInstruction *ir_get_deref(IrAnalyze *ira, IrInstruction *source_instruc
source_instruction->source_node, ptr);
load_ptr_instruction->value.type = child_type;
return load_ptr_instruction;
- } else if (type_entry->id == TypeTableEntryIdMetaType) {
- ConstExprValue *ptr_val = ir_resolve_const(ira, ptr, UndefBad);
- if (!ptr_val)
- return ira->codegen->invalid_instruction;
-
- TypeTableEntry *ptr_type = ptr_val->data.x_type;
- if (ptr_type->id == TypeTableEntryIdPointer) {
- TypeTableEntry *child_type = ptr_type->data.pointer.child_type;
- return ir_create_const_type(&ira->new_irb, source_instruction->scope,
- source_instruction->source_node, child_type);
- } else {
- ir_add_error(ira, source_instruction,
- buf_sprintf("attempt to dereference non pointer type '%s'", buf_ptr(&ptr_type->name)));
- return ira->codegen->invalid_instruction;
- }
} else {
ir_add_error_node(ira, source_instruction->source_node,
buf_sprintf("attempt to dereference non pointer type '%s'",
@@ -10044,11 +11025,11 @@ static bool ir_resolve_align(IrAnalyze *ira, IrInstruction *value, uint32_t *out
return true;
}
-static bool ir_resolve_usize(IrAnalyze *ira, IrInstruction *value, uint64_t *out) {
+static bool ir_resolve_unsigned(IrAnalyze *ira, IrInstruction *value, TypeTableEntry *int_type, uint64_t *out) {
if (type_is_invalid(value->value.type))
return false;
- IrInstruction *casted_value = ir_implicit_cast(ira, value, ira->codegen->builtin_types.entry_usize);
+ IrInstruction *casted_value = ir_implicit_cast(ira, value, int_type);
if (type_is_invalid(casted_value->value.type))
return false;
@@ -10060,6 +11041,10 @@ static bool ir_resolve_usize(IrAnalyze *ira, IrInstruction *value, uint64_t *out
return true;
}
+static bool ir_resolve_usize(IrAnalyze *ira, IrInstruction *value, uint64_t *out) {
+ return ir_resolve_unsigned(ira, value, ira->codegen->builtin_types.entry_usize, out);
+}
+
static bool ir_resolve_bool(IrAnalyze *ira, IrInstruction *value, bool *out) {
if (type_is_invalid(value->value.type))
return false;
@@ -10169,7 +11154,9 @@ static Buf *ir_resolve_str(IrAnalyze *ira, IrInstruction *value) {
if (type_is_invalid(value->value.type))
return nullptr;
- TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, true);
+ TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, ira->codegen->builtin_types.entry_u8,
+ true, false, PtrLenUnknown,
+ get_abi_alignment(ira->codegen, ira->codegen->builtin_types.entry_u8), 0, 0);
TypeTableEntry *str_type = get_slice_type(ira->codegen, ptr_type);
IrInstruction *casted_value = ir_implicit_cast(ira, value, str_type);
if (type_is_invalid(casted_value->value.type))
@@ -10267,10 +11254,15 @@ static TypeTableEntry *ir_analyze_bin_op_bool(IrAnalyze *ira, IrInstructionBinOp
if (casted_op2 == ira->codegen->invalid_instruction)
return ira->codegen->builtin_types.entry_invalid;
- ConstExprValue *op1_val = &casted_op1->value;
- ConstExprValue *op2_val = &casted_op2->value;
- if (op1_val->special != ConstValSpecialRuntime && op2_val->special != ConstValSpecialRuntime) {
+ if (instr_is_comptime(casted_op1) && instr_is_comptime(casted_op2)) {
ConstExprValue *out_val = ir_build_const_from(ira, &bin_op_instruction->base);
+ ConstExprValue *op1_val = ir_resolve_const(ira, casted_op1, UndefBad);
+ if (op1_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *op2_val = ir_resolve_const(ira, casted_op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
assert(casted_op1->value.type->id == TypeTableEntryIdBool);
assert(casted_op2->value.type->id == TypeTableEntryIdBool);
@@ -10307,6 +11299,16 @@ static bool resolve_cmp_op_id(IrBinOp op_id, Cmp cmp) {
}
}
+static bool optional_value_is_null(ConstExprValue *val) {
+ assert(val->special == ConstValSpecialStatic);
+ if (get_codegen_ptr_type(val->type) != nullptr) {
+ return val->data.x_ptr.special == ConstPtrSpecialHardCodedAddr &&
+ val->data.x_ptr.data.hard_coded_addr.addr == 0;
+ } else {
+ return val->data.x_optional == nullptr;
+ }
+}
+
static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp *bin_op_instruction) {
IrInstruction *op1 = bin_op_instruction->op1->other;
IrInstruction *op2 = bin_op_instruction->op2->other;
@@ -10315,19 +11317,19 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
IrBinOp op_id = bin_op_instruction->op_id;
bool is_equality_cmp = (op_id == IrBinOpCmpEq || op_id == IrBinOpCmpNotEq);
if (is_equality_cmp &&
- ((op1->value.type->id == TypeTableEntryIdNullLit && op2->value.type->id == TypeTableEntryIdMaybe) ||
- (op2->value.type->id == TypeTableEntryIdNullLit && op1->value.type->id == TypeTableEntryIdMaybe) ||
- (op1->value.type->id == TypeTableEntryIdNullLit && op2->value.type->id == TypeTableEntryIdNullLit)))
+ ((op1->value.type->id == TypeTableEntryIdNull && op2->value.type->id == TypeTableEntryIdOptional) ||
+ (op2->value.type->id == TypeTableEntryIdNull && op1->value.type->id == TypeTableEntryIdOptional) ||
+ (op1->value.type->id == TypeTableEntryIdNull && op2->value.type->id == TypeTableEntryIdNull)))
{
- if (op1->value.type->id == TypeTableEntryIdNullLit && op2->value.type->id == TypeTableEntryIdNullLit) {
+ if (op1->value.type->id == TypeTableEntryIdNull && op2->value.type->id == TypeTableEntryIdNull) {
ConstExprValue *out_val = ir_build_const_from(ira, &bin_op_instruction->base);
out_val->data.x_bool = (op_id == IrBinOpCmpEq);
return ira->codegen->builtin_types.entry_bool;
}
IrInstruction *maybe_op;
- if (op1->value.type->id == TypeTableEntryIdNullLit) {
+ if (op1->value.type->id == TypeTableEntryIdNull) {
maybe_op = op2;
- } else if (op2->value.type->id == TypeTableEntryIdNullLit) {
+ } else if (op2->value.type->id == TypeTableEntryIdNull) {
maybe_op = op1;
} else {
zig_unreachable();
@@ -10336,7 +11338,7 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
ConstExprValue *maybe_val = ir_resolve_const(ira, maybe_op, UndefBad);
if (!maybe_val)
return ira->codegen->builtin_types.entry_invalid;
- bool is_null = (maybe_val->data.x_maybe == nullptr);
+ bool is_null = optional_value_is_null(maybe_val);
ConstExprValue *out_val = ir_build_const_from(ira, &bin_op_instruction->base);
out_val->data.x_bool = (op_id == IrBinOpCmpEq) ? is_null : !is_null;
return ira->codegen->builtin_types.entry_bool;
@@ -10364,7 +11366,7 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
return ira->codegen->builtin_types.entry_invalid;
}
- if (!resolve_inferred_error_set(ira, intersect_type, source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, intersect_type, source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
@@ -10410,9 +11412,14 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
}
}
- ConstExprValue *op1_val = &op1->value;
- ConstExprValue *op2_val = &op2->value;
- if (value_is_comptime(op1_val) && value_is_comptime(op2_val)) {
+ if (instr_is_comptime(op1) && instr_is_comptime(op2)) {
+ ConstExprValue *op1_val = ir_resolve_const(ira, op1, UndefBad);
+ if (op1_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+ ConstExprValue *op2_val = ir_resolve_const(ira, op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
bool answer;
bool are_equal = op1_val->data.x_err_set->value == op2_val->data.x_err_set->value;
if (op_id == IrBinOpCmpEq) {
@@ -10435,22 +11442,23 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
}
IrInstruction *instructions[] = {op1, op2};
- TypeTableEntry *resolved_type = ir_resolve_peer_types(ira, source_node, instructions, 2);
+ TypeTableEntry *resolved_type = ir_resolve_peer_types(ira, source_node, nullptr, instructions, 2);
if (type_is_invalid(resolved_type))
return resolved_type;
type_ensure_zero_bits_known(ira->codegen, resolved_type);
if (type_is_invalid(resolved_type))
return resolved_type;
-
+ bool operator_allowed;
switch (resolved_type->id) {
case TypeTableEntryIdInvalid:
zig_unreachable(); // handled above
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
case TypeTableEntryIdInt:
case TypeTableEntryIdFloat:
+ operator_allowed = true;
break;
case TypeTableEntryIdBool:
@@ -10465,32 +11473,27 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
case TypeTableEntryIdBoundFn:
case TypeTableEntryIdArgTuple:
case TypeTableEntryIdPromise:
- if (!is_equality_cmp) {
- ir_add_error_node(ira, source_node,
- buf_sprintf("operator not allowed for type '%s'", buf_ptr(&resolved_type->name)));
- return ira->codegen->builtin_types.entry_invalid;
- }
- break;
-
case TypeTableEntryIdEnum:
- if (!is_equality_cmp) {
- ir_add_error_node(ira, source_node,
- buf_sprintf("operator not allowed for type '%s'", buf_ptr(&resolved_type->name)));
- return ira->codegen->builtin_types.entry_invalid;
- }
+ operator_allowed = is_equality_cmp;
break;
case TypeTableEntryIdUnreachable:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdUnion:
- ir_add_error_node(ira, source_node,
- buf_sprintf("operator not allowed for type '%s'", buf_ptr(&resolved_type->name)));
- return ira->codegen->builtin_types.entry_invalid;
+ operator_allowed = false;
+ break;
+ case TypeTableEntryIdOptional:
+ operator_allowed = is_equality_cmp && get_codegen_ptr_type(resolved_type) != nullptr;
+ break;
+ }
+ if (!operator_allowed) {
+ ir_add_error_node(ira, source_node,
+ buf_sprintf("operator not allowed for type '%s'", buf_ptr(&resolved_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
}
IrInstruction *casted_op1 = ir_implicit_cast(ira, op1, resolved_type);
@@ -10501,15 +11504,20 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
if (casted_op2 == ira->codegen->invalid_instruction)
return ira->codegen->builtin_types.entry_invalid;
- ConstExprValue *op1_val = &casted_op1->value;
- ConstExprValue *op2_val = &casted_op2->value;
bool one_possible_value = !type_requires_comptime(resolved_type) && !type_has_bits(resolved_type);
- if (one_possible_value || (value_is_comptime(op1_val) && value_is_comptime(op2_val))) {
+ if (one_possible_value || (instr_is_comptime(casted_op1) && instr_is_comptime(casted_op2))) {
+ ConstExprValue *op1_val = one_possible_value ? &casted_op1->value : ir_resolve_const(ira, casted_op1, UndefBad);
+ if (op1_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+ ConstExprValue *op2_val = one_possible_value ? &casted_op2->value : ir_resolve_const(ira, casted_op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
bool answer;
- if (resolved_type->id == TypeTableEntryIdNumLitFloat || resolved_type->id == TypeTableEntryIdFloat) {
+ if (resolved_type->id == TypeTableEntryIdComptimeFloat || resolved_type->id == TypeTableEntryIdFloat) {
Cmp cmp_result = float_cmp(op1_val, op2_val);
answer = resolve_cmp_op_id(op_id, cmp_result);
- } else if (resolved_type->id == TypeTableEntryIdNumLitInt || resolved_type->id == TypeTableEntryIdInt) {
+ } else if (resolved_type->id == TypeTableEntryIdComptimeInt || resolved_type->id == TypeTableEntryIdInt) {
Cmp cmp_result = bigint_cmp(&op1_val->data.x_bigint, &op2_val->data.x_bigint);
answer = resolve_cmp_op_id(op_id, cmp_result);
} else {
@@ -10532,11 +11540,17 @@ static TypeTableEntry *ir_analyze_bin_op_cmp(IrAnalyze *ira, IrInstructionBinOp
if (resolved_type->id == TypeTableEntryIdInt && !resolved_type->data.integral.is_signed) {
ConstExprValue *known_left_val;
IrBinOp flipped_op_id;
- if (value_is_comptime(op1_val)) {
- known_left_val = op1_val;
+ if (instr_is_comptime(casted_op1)) {
+ known_left_val = ir_resolve_const(ira, casted_op1, UndefBad);
+ if (known_left_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
flipped_op_id = op_id;
- } else if (value_is_comptime(op2_val)) {
- known_left_val = op2_val;
+ } else if (instr_is_comptime(casted_op2)) {
+ known_left_val = ir_resolve_const(ira, casted_op2, UndefBad);
+ if (known_left_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
if (op_id == IrBinOpCmpLessThan) {
flipped_op_id = IrBinOpCmpGreaterThan;
} else if (op_id == IrBinOpCmpGreaterThan) {
@@ -10573,12 +11587,12 @@ static int ir_eval_math_op(TypeTableEntry *type_entry, ConstExprValue *op1_val,
bool is_int;
bool is_float;
Cmp op2_zcmp;
- if (type_entry->id == TypeTableEntryIdInt || type_entry->id == TypeTableEntryIdNumLitInt) {
+ if (type_entry->id == TypeTableEntryIdInt || type_entry->id == TypeTableEntryIdComptimeInt) {
is_int = true;
is_float = false;
op2_zcmp = bigint_cmp_zero(&op2_val->data.x_bigint);
} else if (type_entry->id == TypeTableEntryIdFloat ||
- type_entry->id == TypeTableEntryIdNumLitFloat)
+ type_entry->id == TypeTableEntryIdComptimeFloat)
{
is_int = false;
is_float = true;
@@ -10752,7 +11766,7 @@ static TypeTableEntry *ir_analyze_bit_shift(IrAnalyze *ira, IrInstructionBinOp *
if (type_is_invalid(op1->value.type))
return ira->codegen->builtin_types.entry_invalid;
- if (op1->value.type->id != TypeTableEntryIdInt && op1->value.type->id != TypeTableEntryIdNumLitInt) {
+ if (op1->value.type->id != TypeTableEntryIdInt && op1->value.type->id != TypeTableEntryIdComptimeInt) {
ir_add_error(ira, &bin_op_instruction->base,
buf_sprintf("bit shifting operation expected integer type, found '%s'",
buf_ptr(&op1->value.type->name)));
@@ -10765,7 +11779,7 @@ static TypeTableEntry *ir_analyze_bit_shift(IrAnalyze *ira, IrInstructionBinOp *
IrInstruction *casted_op2;
IrBinOp op_id = bin_op_instruction->op_id;
- if (op1->value.type->id == TypeTableEntryIdNumLitInt) {
+ if (op1->value.type->id == TypeTableEntryIdComptimeInt) {
casted_op2 = op2;
if (op_id == IrBinOpBitShiftLeftLossy) {
@@ -10781,6 +11795,26 @@ static TypeTableEntry *ir_analyze_bit_shift(IrAnalyze *ira, IrInstructionBinOp *
} else {
TypeTableEntry *shift_amt_type = get_smallest_unsigned_int_type(ira->codegen,
op1->value.type->data.integral.bit_count - 1);
+ if (bin_op_instruction->op_id == IrBinOpBitShiftLeftLossy &&
+ op2->value.type->id == TypeTableEntryIdComptimeInt) {
+ if (!bigint_fits_in_bits(&op2->value.data.x_bigint,
+ shift_amt_type->data.integral.bit_count,
+ op2->value.data.x_bigint.is_negative)) {
+ Buf *val_buf = buf_alloc();
+ bigint_append_buf(val_buf, &op2->value.data.x_bigint, 10);
+ ErrorMsg* msg = ir_add_error(ira,
+ &bin_op_instruction->base,
+ buf_sprintf("RHS of shift is too large for LHS type"));
+ add_error_note(
+ ira->codegen,
+ msg,
+ op2->source_node,
+ buf_sprintf("value %s cannot fit into type %s",
+ buf_ptr(val_buf),
+ buf_ptr(&shift_amt_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ }
casted_op2 = ir_implicit_cast(ira, op2, shift_amt_type);
if (casted_op2 == ira->codegen->invalid_instruction)
@@ -10788,8 +11822,14 @@ static TypeTableEntry *ir_analyze_bit_shift(IrAnalyze *ira, IrInstructionBinOp *
}
if (instr_is_comptime(op1) && instr_is_comptime(casted_op2)) {
- ConstExprValue *op1_val = &op1->value;
- ConstExprValue *op2_val = &casted_op2->value;
+ ConstExprValue *op1_val = ir_resolve_const(ira, op1, UndefBad);
+ if (op1_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *op2_val = ir_resolve_const(ira, casted_op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
IrInstruction *result_instruction = ir_get_const(ira, &bin_op_instruction->base);
ir_link_new_instruction(result_instruction, &bin_op_instruction->base);
ConstExprValue *out_val = &result_instruction->value;
@@ -10810,7 +11850,7 @@ static TypeTableEntry *ir_analyze_bit_shift(IrAnalyze *ira, IrInstructionBinOp *
ir_num_lit_fits_in_other_type(ira, result_instruction, op1->value.type, false);
return op1->value.type;
- } else if (op1->value.type->id == TypeTableEntryIdNumLitInt) {
+ } else if (op1->value.type->id == TypeTableEntryIdComptimeInt) {
ir_add_error(ira, &bin_op_instruction->base,
buf_sprintf("LHS of shift must be an integer type, or RHS must be compile-time known"));
return ira->codegen->builtin_types.entry_invalid;
@@ -10823,22 +11863,44 @@ static TypeTableEntry *ir_analyze_bit_shift(IrAnalyze *ira, IrInstructionBinOp *
static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp *bin_op_instruction) {
IrInstruction *op1 = bin_op_instruction->op1->other;
+ if (type_is_invalid(op1->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
IrInstruction *op2 = bin_op_instruction->op2->other;
+ if (type_is_invalid(op2->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrBinOp op_id = bin_op_instruction->op_id;
+
+ // look for pointer math
+ if (op1->value.type->id == TypeTableEntryIdPointer && op1->value.type->data.pointer.ptr_len == PtrLenUnknown &&
+ (op_id == IrBinOpAdd || op_id == IrBinOpSub))
+ {
+ IrInstruction *casted_op2 = ir_implicit_cast(ira, op2, ira->codegen->builtin_types.entry_usize);
+ if (casted_op2 == ira->codegen->invalid_instruction)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *result = ir_build_bin_op(&ira->new_irb, bin_op_instruction->base.scope,
+ bin_op_instruction->base.source_node, op_id, op1, casted_op2, true);
+ result->value.type = op1->value.type;
+ ir_link_new_instruction(result, &bin_op_instruction->base);
+ return result->value.type;
+ }
+
IrInstruction *instructions[] = {op1, op2};
- TypeTableEntry *resolved_type = ir_resolve_peer_types(ira, bin_op_instruction->base.source_node, instructions, 2);
+ TypeTableEntry *resolved_type = ir_resolve_peer_types(ira, bin_op_instruction->base.source_node, nullptr, instructions, 2);
if (type_is_invalid(resolved_type))
return resolved_type;
- IrBinOp op_id = bin_op_instruction->op_id;
- bool is_int = resolved_type->id == TypeTableEntryIdInt || resolved_type->id == TypeTableEntryIdNumLitInt;
- bool is_float = resolved_type->id == TypeTableEntryIdFloat || resolved_type->id == TypeTableEntryIdNumLitFloat;
+ bool is_int = resolved_type->id == TypeTableEntryIdInt || resolved_type->id == TypeTableEntryIdComptimeInt;
+ bool is_float = resolved_type->id == TypeTableEntryIdFloat || resolved_type->id == TypeTableEntryIdComptimeFloat;
bool is_signed_div = (
(resolved_type->id == TypeTableEntryIdInt && resolved_type->data.integral.is_signed) ||
resolved_type->id == TypeTableEntryIdFloat ||
- (resolved_type->id == TypeTableEntryIdNumLitFloat &&
+ (resolved_type->id == TypeTableEntryIdComptimeFloat &&
((bigfloat_cmp_zero(&op1->value.data.x_bigfloat) != CmpGT) !=
(bigfloat_cmp_zero(&op2->value.data.x_bigfloat) != CmpGT))) ||
- (resolved_type->id == TypeTableEntryIdNumLitInt &&
+ (resolved_type->id == TypeTableEntryIdComptimeInt &&
((bigint_cmp_zero(&op1->value.data.x_bigint) != CmpGT) !=
(bigint_cmp_zero(&op2->value.data.x_bigint) != CmpGT)))
);
@@ -10846,7 +11908,15 @@ static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp
if (is_signed_div) {
bool ok = false;
if (instr_is_comptime(op1) && instr_is_comptime(op2)) {
- if (bigint_cmp_zero(&op2->value.data.x_bigint) == CmpEQ) {
+ ConstExprValue *op1_val = ir_resolve_const(ira, op1, UndefBad);
+ if (op1_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *op2_val = ir_resolve_const(ira, op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (bigint_cmp_zero(&op2_val->data.x_bigint) == CmpEQ) {
// the division by zero error will be caught later, but we don't have a
// division function ambiguity problem.
op_id = IrBinOpDivTrunc;
@@ -10854,8 +11924,8 @@ static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp
} else {
BigInt trunc_result;
BigInt floor_result;
- bigint_div_trunc(&trunc_result, &op1->value.data.x_bigint, &op2->value.data.x_bigint);
- bigint_div_floor(&floor_result, &op1->value.data.x_bigint, &op2->value.data.x_bigint);
+ bigint_div_trunc(&trunc_result, &op1_val->data.x_bigint, &op2_val->data.x_bigint);
+ bigint_div_floor(&floor_result, &op1_val->data.x_bigint, &op2_val->data.x_bigint);
if (bigint_cmp(&trunc_result, &floor_result) == CmpEQ) {
ok = true;
op_id = IrBinOpDivTrunc;
@@ -10876,7 +11946,15 @@ static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp
if (is_signed_div && (is_int || is_float)) {
bool ok = false;
if (instr_is_comptime(op1) && instr_is_comptime(op2)) {
+ ConstExprValue *op1_val = ir_resolve_const(ira, op1, UndefBad);
+ if (op1_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
if (is_int) {
+ ConstExprValue *op2_val = ir_resolve_const(ira, op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
if (bigint_cmp_zero(&op2->value.data.x_bigint) == CmpEQ) {
// the division by zero error will be caught later, but we don't
// have a remainder function ambiguity problem
@@ -10884,14 +11962,19 @@ static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp
} else {
BigInt rem_result;
BigInt mod_result;
- bigint_rem(&rem_result, &op1->value.data.x_bigint, &op2->value.data.x_bigint);
- bigint_mod(&mod_result, &op1->value.data.x_bigint, &op2->value.data.x_bigint);
+ bigint_rem(&rem_result, &op1_val->data.x_bigint, &op2_val->data.x_bigint);
+ bigint_mod(&mod_result, &op1_val->data.x_bigint, &op2_val->data.x_bigint);
ok = bigint_cmp(&rem_result, &mod_result) == CmpEQ;
}
} else {
IrInstruction *casted_op2 = ir_implicit_cast(ira, op2, resolved_type);
if (casted_op2 == ira->codegen->invalid_instruction)
return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *op2_val = ir_resolve_const(ira, casted_op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
if (float_cmp_zero(&casted_op2->value) == CmpEQ) {
// the division by zero error will be caught later, but we don't
// have a remainder function ambiguity problem
@@ -10899,8 +11982,8 @@ static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp
} else {
ConstExprValue rem_result;
ConstExprValue mod_result;
- float_rem(&rem_result, &op1->value, &casted_op2->value);
- float_mod(&mod_result, &op1->value, &casted_op2->value);
+ float_rem(&rem_result, op1_val, op2_val);
+ float_mod(&mod_result, op1_val, op2_val);
ok = float_cmp(&rem_result, &mod_result) == CmpEQ;
}
}
@@ -10939,7 +12022,7 @@ static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp
return ira->codegen->builtin_types.entry_invalid;
}
- if (resolved_type->id == TypeTableEntryIdNumLitInt) {
+ if (resolved_type->id == TypeTableEntryIdComptimeInt) {
if (op_id == IrBinOpAddWrap) {
op_id = IrBinOpAdd;
} else if (op_id == IrBinOpSubWrap) {
@@ -10958,8 +12041,13 @@ static TypeTableEntry *ir_analyze_bin_op_math(IrAnalyze *ira, IrInstructionBinOp
return ira->codegen->builtin_types.entry_invalid;
if (instr_is_comptime(casted_op1) && instr_is_comptime(casted_op2)) {
- ConstExprValue *op1_val = &casted_op1->value;
- ConstExprValue *op2_val = &casted_op2->value;
+ ConstExprValue *op1_val = ir_resolve_const(ira, casted_op1, UndefBad);
+ if (op1_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+ ConstExprValue *op2_val = ir_resolve_const(ira, casted_op2, UndefBad);
+ if (op2_val == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
IrInstruction *result_instruction = ir_get_const(ira, &bin_op_instruction->base);
ir_link_new_instruction(result_instruction, &bin_op_instruction->base);
ConstExprValue *out_val = &result_instruction->value;
@@ -11101,7 +12189,8 @@ static TypeTableEntry *ir_analyze_array_cat(IrAnalyze *ira, IrInstructionBinOp *
out_array_val = out_val;
} else if (is_slice(op1_type) || is_slice(op2_type)) {
- TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, child_type, true);
+ TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, child_type,
+ true, false, PtrLenUnknown, get_abi_alignment(ira->codegen, child_type), 0, 0);
result_type = get_slice_type(ira->codegen, ptr_type);
out_array_val = create_const_vals(1);
out_array_val->special = ConstValSpecialStatic;
@@ -11121,7 +12210,9 @@ static TypeTableEntry *ir_analyze_array_cat(IrAnalyze *ira, IrInstructionBinOp *
} else {
new_len += 1; // null byte
- result_type = get_pointer_to_type(ira->codegen, child_type, true);
+ // TODO make this `[*]null T` instead of `[*]T`
+ result_type = get_pointer_to_type_extra(ira->codegen, child_type, true, false,
+ PtrLenUnknown, get_abi_alignment(ira->codegen, child_type), 0, 0);
out_array_val = create_const_vals(1);
out_array_val->special = ConstValSpecialStatic;
@@ -11131,9 +12222,16 @@ static TypeTableEntry *ir_analyze_array_cat(IrAnalyze *ira, IrInstructionBinOp *
out_val->data.x_ptr.data.base_array.array_val = out_array_val;
out_val->data.x_ptr.data.base_array.elem_index = 0;
}
- out_array_val->data.x_array.s_none.elements = create_const_vals(new_len);
+ if (op1_array_val->data.x_array.special == ConstArraySpecialUndef &&
+ op2_array_val->data.x_array.special == ConstArraySpecialUndef) {
+ out_array_val->data.x_array.special = ConstArraySpecialUndef;
+ return result_type;
+ }
+
+ out_array_val->data.x_array.s_none.elements = create_const_vals(new_len);
expand_undef_array(ira->codegen, op1_array_val);
+ expand_undef_array(ira->codegen, op2_array_val);
size_t next_index = 0;
for (size_t i = op1_array_index; i < op1_array_end; i += 1, next_index += 1) {
@@ -11185,10 +12283,14 @@ static TypeTableEntry *ir_analyze_array_mult(IrAnalyze *ira, IrInstructionBinOp
}
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+ if (array_val->data.x_array.special == ConstArraySpecialUndef) {
+ out_val->data.x_array.special = ConstArraySpecialUndef;
- out_val->data.x_array.s_none.elements = create_const_vals(new_array_len);
+ TypeTableEntry *child_type = array_type->data.array.child_type;
+ return get_array_type(ira->codegen, child_type, new_array_len);
+ }
- expand_undef_array(ira->codegen, array_val);
+ out_val->data.x_array.s_none.elements = create_const_vals(new_array_len);
uint64_t i = 0;
for (uint64_t x = 0; x < mult_amt; x += 1) {
@@ -11220,11 +12322,11 @@ static TypeTableEntry *ir_analyze_merge_error_sets(IrAnalyze *ira, IrInstruction
return ira->codegen->builtin_types.entry_type;
}
- if (!resolve_inferred_error_set(ira, op1_type, instruction->op1->other->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, op1_type, instruction->op1->other->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
- if (!resolve_inferred_error_set(ira, op2_type, instruction->op2->other->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, op2_type, instruction->op2->other->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
@@ -11290,61 +12392,6 @@ static TypeTableEntry *ir_analyze_instruction_bin_op(IrAnalyze *ira, IrInstructi
zig_unreachable();
}
-enum VarClassRequired {
- VarClassRequiredAny,
- VarClassRequiredConst,
- VarClassRequiredIllegal,
-};
-
-static VarClassRequired get_var_class_required(TypeTableEntry *type_entry) {
- switch (type_entry->id) {
- case TypeTableEntryIdInvalid:
- zig_unreachable();
- case TypeTableEntryIdUnreachable:
- return VarClassRequiredIllegal;
- case TypeTableEntryIdBool:
- case TypeTableEntryIdInt:
- case TypeTableEntryIdFloat:
- case TypeTableEntryIdVoid:
- case TypeTableEntryIdErrorSet:
- case TypeTableEntryIdFn:
- case TypeTableEntryIdPromise:
- return VarClassRequiredAny;
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdBlock:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdOpaque:
- case TypeTableEntryIdMetaType:
- case TypeTableEntryIdNamespace:
- case TypeTableEntryIdBoundFn:
- case TypeTableEntryIdArgTuple:
- return VarClassRequiredConst;
-
- case TypeTableEntryIdPointer:
- if (type_entry->data.pointer.child_type->id == TypeTableEntryIdOpaque) {
- return VarClassRequiredAny;
- } else {
- return get_var_class_required(type_entry->data.pointer.child_type);
- }
- case TypeTableEntryIdArray:
- return get_var_class_required(type_entry->data.array.child_type);
- case TypeTableEntryIdMaybe:
- return get_var_class_required(type_entry->data.maybe.child_type);
- case TypeTableEntryIdErrorUnion:
- return get_var_class_required(type_entry->data.error_union.payload_type);
-
- case TypeTableEntryIdStruct:
- case TypeTableEntryIdEnum:
- case TypeTableEntryIdUnion:
- // TODO check the fields of these things and make sure that they don't recursively
- // contain any of the other variable classes
- return VarClassRequiredAny;
- }
- zig_unreachable();
-}
-
static TypeTableEntry *ir_analyze_instruction_decl_var(IrAnalyze *ira, IrInstructionDeclVar *decl_var_instruction) {
VariableTableEntry *var = decl_var_instruction->var;
@@ -11379,36 +12426,41 @@ static TypeTableEntry *ir_analyze_instruction_decl_var(IrAnalyze *ira, IrInstruc
if (type_is_invalid(result_type)) {
result_type = ira->codegen->builtin_types.entry_invalid;
} else {
- switch (get_var_class_required(result_type)) {
- case VarClassRequiredIllegal:
+ type_ensure_zero_bits_known(ira->codegen, result_type);
+ if (type_is_invalid(result_type)) {
+ result_type = ira->codegen->builtin_types.entry_invalid;
+ }
+ }
+
+ if (!type_is_invalid(result_type)) {
+ if (result_type->id == TypeTableEntryIdUnreachable ||
+ result_type->id == TypeTableEntryIdOpaque)
+ {
+ ir_add_error_node(ira, source_node,
+ buf_sprintf("variable of type '%s' not allowed", buf_ptr(&result_type->name)));
+ result_type = ira->codegen->builtin_types.entry_invalid;
+ } else if (type_requires_comptime(result_type)) {
+ var_class_requires_const = true;
+ if (!var->gen_is_const && !is_comptime_var) {
ir_add_error_node(ira, source_node,
- buf_sprintf("variable of type '%s' not allowed", buf_ptr(&result_type->name)));
+ buf_sprintf("variable of type '%s' must be const or comptime",
+ buf_ptr(&result_type->name)));
result_type = ira->codegen->builtin_types.entry_invalid;
- break;
- case VarClassRequiredConst:
+ }
+ } else {
+ if (casted_init_value->value.special == ConstValSpecialStatic &&
+ casted_init_value->value.type->id == TypeTableEntryIdFn &&
+ casted_init_value->value.data.x_ptr.data.fn.fn_entry->fn_inline == FnInlineAlways)
+ {
var_class_requires_const = true;
if (!var->src_is_const && !is_comptime_var) {
- ir_add_error_node(ira, source_node,
- buf_sprintf("variable of type '%s' must be const or comptime",
- buf_ptr(&result_type->name)));
+ ErrorMsg *msg = ir_add_error_node(ira, source_node,
+ buf_sprintf("functions marked inline must be stored in const or comptime var"));
+ AstNode *proto_node = casted_init_value->value.data.x_ptr.data.fn.fn_entry->proto_node;
+ add_error_note(ira->codegen, msg, proto_node, buf_sprintf("declared here"));
result_type = ira->codegen->builtin_types.entry_invalid;
}
- break;
- case VarClassRequiredAny:
- if (casted_init_value->value.special == ConstValSpecialStatic &&
- casted_init_value->value.type->id == TypeTableEntryIdFn &&
- casted_init_value->value.data.x_ptr.data.fn.fn_entry->fn_inline == FnInlineAlways)
- {
- var_class_requires_const = true;
- if (!var->src_is_const && !is_comptime_var) {
- ErrorMsg *msg = ir_add_error_node(ira, source_node,
- buf_sprintf("functions marked inline must be stored in const or comptime var"));
- AstNode *proto_node = casted_init_value->value.data.x_ptr.data.fn.fn_entry->proto_node;
- add_error_note(ira->codegen, msg, proto_node, buf_sprintf("declared here"));
- result_type = ira->codegen->builtin_types.entry_invalid;
- }
- }
- break;
+ }
}
}
@@ -11432,7 +12484,8 @@ static TypeTableEntry *ir_analyze_instruction_decl_var(IrAnalyze *ira, IrInstruc
if (var->mem_slot_index != SIZE_MAX) {
assert(var->mem_slot_index < ira->exec_context.mem_slot_count);
ConstExprValue *mem_slot = &ira->exec_context.mem_slot_list[var->mem_slot_index];
- *mem_slot = casted_init_value->value;
+ copy_const_val(mem_slot, &casted_init_value->value,
+ !is_comptime_var || var->gen_is_const);
if (is_comptime_var || (var_class_requires_const && var->gen_is_const)) {
ir_build_const_from(ira, &decl_var_instruction->base);
@@ -11578,11 +12631,11 @@ static TypeTableEntry *ir_analyze_instruction_export(IrAnalyze *ira, IrInstructi
case TypeTableEntryIdMetaType:
case TypeTableEntryIdVoid:
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdNamespace:
@@ -11602,11 +12655,11 @@ static TypeTableEntry *ir_analyze_instruction_export(IrAnalyze *ira, IrInstructi
case TypeTableEntryIdFloat:
case TypeTableEntryIdPointer:
case TypeTableEntryIdArray:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
zig_panic("TODO export const value of type %s", buf_ptr(&target->value.type->name));
@@ -11633,22 +12686,24 @@ static bool exec_has_err_ret_trace(CodeGen *g, IrExecutable *exec) {
static TypeTableEntry *ir_analyze_instruction_error_return_trace(IrAnalyze *ira,
IrInstructionErrorReturnTrace *instruction)
{
- if (instruction->nullable == IrInstructionErrorReturnTrace::Null) {
+ if (instruction->optional == IrInstructionErrorReturnTrace::Null) {
TypeTableEntry *ptr_to_stack_trace_type = get_ptr_to_stack_trace_type(ira->codegen);
- TypeTableEntry *nullable_type = get_maybe_type(ira->codegen, ptr_to_stack_trace_type);
+ TypeTableEntry *optional_type = get_optional_type(ira->codegen, ptr_to_stack_trace_type);
if (!exec_has_err_ret_trace(ira->codegen, ira->new_irb.exec)) {
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
- out_val->data.x_maybe = nullptr;
- return nullable_type;
+ assert(get_codegen_ptr_type(optional_type) != nullptr);
+ out_val->data.x_ptr.special = ConstPtrSpecialHardCodedAddr;
+ out_val->data.x_ptr.data.hard_coded_addr.addr = 0;
+ return optional_type;
}
IrInstruction *new_instruction = ir_build_error_return_trace(&ira->new_irb, instruction->base.scope,
- instruction->base.source_node, instruction->nullable);
+ instruction->base.source_node, instruction->optional);
ir_link_new_instruction(new_instruction, &instruction->base);
- return nullable_type;
+ return optional_type;
} else {
assert(ira->codegen->have_err_ret_tracing);
IrInstruction *new_instruction = ir_build_error_return_trace(&ira->new_irb, instruction->base.scope,
- instruction->base.source_node, instruction->nullable);
+ instruction->base.source_node, instruction->optional);
ir_link_new_instruction(new_instruction, &instruction->base);
return get_ptr_to_stack_trace_type(ira->codegen);
}
@@ -11706,7 +12761,7 @@ IrInstruction *ir_get_implicit_allocator(IrAnalyze *ira, IrInstruction *source_i
{
VariableTableEntry *coro_allocator_var = ira->old_irb.exec->coro_allocator_var;
assert(coro_allocator_var != nullptr);
- IrInstruction *var_ptr_inst = ir_get_var_ptr(ira, source_instr, coro_allocator_var, true, false);
+ IrInstruction *var_ptr_inst = ir_get_var_ptr(ira, source_instr, coro_allocator_var);
IrInstruction *result = ir_get_deref(ira, source_instr, var_ptr_inst);
assert(result->value.type != nullptr);
return result;
@@ -11749,7 +12804,7 @@ static IrInstruction *ir_analyze_async_call(IrAnalyze *ira, IrInstructionCall *c
TypeTableEntry *async_return_type = get_error_union_type(ira->codegen, alloc_fn_error_set_type, promise_type);
IrInstruction *result = ir_build_call(&ira->new_irb, call_instruction->base.scope, call_instruction->base.source_node,
- fn_entry, fn_ref, arg_count, casted_args, false, FnInlineAuto, true, async_allocator_inst);
+ fn_entry, fn_ref, arg_count, casted_args, false, FnInlineAuto, true, async_allocator_inst, nullptr);
result->value.type = async_return_type;
return result;
}
@@ -11799,7 +12854,7 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
IrInstruction *casted_arg;
if (is_var_args) {
arg_part_of_generic_id = true;
- casted_arg = ir_implicit_byval_const_ref_cast(ira, arg);
+ casted_arg = arg;
} else {
if (param_decl_node->data.param_decl.var_token == nullptr) {
AstNode *param_type_node = param_decl_node->data.param_decl.type;
@@ -11812,12 +12867,12 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
return false;
} else {
arg_part_of_generic_id = true;
- casted_arg = ir_implicit_byval_const_ref_cast(ira, arg);
+ casted_arg = arg;
}
}
bool comptime_arg = param_decl_node->data.param_decl.is_inline ||
- casted_arg->value.type->id == TypeTableEntryIdNumLitInt || casted_arg->value.type->id == TypeTableEntryIdNumLitFloat;
+ casted_arg->value.type->id == TypeTableEntryIdComptimeInt || casted_arg->value.type->id == TypeTableEntryIdComptimeFloat;
ConstExprValue *arg_val;
@@ -11835,6 +12890,7 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
}
Buf *param_name = param_decl_node->data.param_decl.name;
+ if (!param_name) return false;
if (!is_var_args) {
VariableTableEntry *var = add_variable(ira->codegen, param_decl_node,
*child_scope, param_name, true, arg_val, nullptr);
@@ -11842,11 +12898,11 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
var->shadowable = !comptime_arg;
*next_proto_i += 1;
- } else if (casted_arg->value.type->id == TypeTableEntryIdNumLitInt ||
- casted_arg->value.type->id == TypeTableEntryIdNumLitFloat)
+ } else if (casted_arg->value.type->id == TypeTableEntryIdComptimeInt ||
+ casted_arg->value.type->id == TypeTableEntryIdComptimeFloat)
{
ir_add_error(ira, casted_arg,
- buf_sprintf("compiler bug: integer and float literals in var args function must be casted. https://github.com/zig-lang/zig/issues/557"));
+ buf_sprintf("compiler bug: integer and float literals in var args function must be casted. https://github.com/ziglang/zig/issues/557"));
return false;
}
@@ -11887,7 +12943,7 @@ static VariableTableEntry *get_fn_var_by_index(FnTableEntry *fn_entry, size_t in
}
static IrInstruction *ir_get_var_ptr(IrAnalyze *ira, IrInstruction *instruction,
- VariableTableEntry *var, bool is_const_ptr, bool is_volatile_ptr)
+ VariableTableEntry *var)
{
if (var->mem_slot_index != SIZE_MAX && var->owner_exec->analysis == nullptr) {
assert(ira->codegen->errors.length != 0);
@@ -11913,8 +12969,8 @@ static IrInstruction *ir_get_var_ptr(IrAnalyze *ira, IrInstruction *instruction,
}
}
- bool is_const = (var->value->type->id == TypeTableEntryIdMetaType) ? is_const_ptr : var->src_is_const;
- bool is_volatile = (var->value->type->id == TypeTableEntryIdMetaType) ? is_volatile_ptr : false;
+ bool is_const = var->src_is_const;
+ bool is_volatile = false;
if (mem_slot != nullptr) {
switch (mem_slot->special) {
case ConstValSpecialRuntime:
@@ -11940,9 +12996,9 @@ static IrInstruction *ir_get_var_ptr(IrAnalyze *ira, IrInstruction *instruction,
no_mem_slot:
IrInstruction *var_ptr_instruction = ir_build_var_ptr(&ira->new_irb,
- instruction->scope, instruction->source_node, var, is_const, is_volatile);
+ instruction->scope, instruction->source_node, var);
var_ptr_instruction->value.type = get_pointer_to_type_extra(ira->codegen, var->value->type,
- var->src_is_const, is_volatile, var->align_bytes, 0, 0);
+ var->src_is_const, is_volatile, PtrLenSingle, var->align_bytes, 0, 0);
type_ensure_zero_bits_known(ira->codegen, var->value->type);
bool in_fn_scope = (scope_fn_entry(var->parent_scope) != nullptr);
@@ -11961,14 +13017,22 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
// for extern functions, the var args argument is not counted.
// for zig functions, it is.
size_t var_args_1_or_0;
- if (fn_type_id->cc == CallingConventionUnspecified) {
- var_args_1_or_0 = fn_type_id->is_var_args ? 1 : 0;
- } else {
+ if (fn_type_id->cc == CallingConventionC) {
var_args_1_or_0 = 0;
+ } else {
+ var_args_1_or_0 = fn_type_id->is_var_args ? 1 : 0;
}
size_t src_param_count = fn_type_id->param_count - var_args_1_or_0;
size_t call_param_count = call_instruction->arg_count + first_arg_1_or_0;
+ for (size_t i = 0; i < call_instruction->arg_count; i += 1) {
+ ConstExprValue *arg_tuple_value = &call_instruction->args[i]->other->value;
+ if (arg_tuple_value->type->id == TypeTableEntryIdArgTuple) {
+ call_param_count -= 1;
+ call_param_count += arg_tuple_value->data.x_arg_tuple.end_index -
+ arg_tuple_value->data.x_arg_tuple.start_index;
+ }
+ }
AstNode *source_node = call_instruction->base.source_node;
AstNode *fn_proto_node = fn_entry ? fn_entry->proto_node : nullptr;;
@@ -12031,9 +13095,18 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
size_t next_proto_i = 0;
if (first_arg_ptr) {
- IrInstruction *first_arg;
assert(first_arg_ptr->value.type->id == TypeTableEntryIdPointer);
- if (handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
+
+ bool first_arg_known_bare = false;
+ if (fn_type_id->next_param_index >= 1) {
+ TypeTableEntry *param_type = fn_type_id->param_info[next_proto_i].type;
+ if (type_is_invalid(param_type))
+ return ira->codegen->builtin_types.entry_invalid;
+ first_arg_known_bare = param_type->id != TypeTableEntryIdPointer;
+ }
+
+ IrInstruction *first_arg;
+ if (!first_arg_known_bare && handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
first_arg = first_arg_ptr;
} else {
first_arg = ir_get_deref(ira, first_arg_ptr, first_arg_ptr);
@@ -12047,7 +13120,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
if (fn_proto_node->data.fn_proto.is_var_args) {
ir_add_error(ira, &call_instruction->base,
- buf_sprintf("compiler bug: unable to call var args function at compile time. https://github.com/zig-lang/zig/issues/313"));
+ buf_sprintf("compiler bug: unable to call var args function at compile time. https://github.com/ziglang/zig/issues/313"));
return ira->codegen->builtin_types.entry_invalid;
}
@@ -12119,17 +13192,27 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
return ir_finish_anal(ira, return_type);
}
+ IrInstruction *casted_new_stack = nullptr;
+ if (call_instruction->new_stack != nullptr) {
+ TypeTableEntry *u8_ptr = get_pointer_to_type_extra(ira->codegen, ira->codegen->builtin_types.entry_u8,
+ false, false, PtrLenUnknown,
+ get_abi_alignment(ira->codegen, ira->codegen->builtin_types.entry_u8), 0, 0);
+ TypeTableEntry *u8_slice = get_slice_type(ira->codegen, u8_ptr);
+ IrInstruction *new_stack = call_instruction->new_stack->other;
+ if (type_is_invalid(new_stack->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ casted_new_stack = ir_implicit_cast(ira, new_stack, u8_slice);
+ if (type_is_invalid(casted_new_stack->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
if (fn_type->data.fn.is_generic) {
if (!fn_entry) {
ir_add_error(ira, call_instruction->fn_ref,
buf_sprintf("calling a generic function requires compile-time known function value"));
return ira->codegen->builtin_types.entry_invalid;
}
- if (call_instruction->is_async && fn_type_id->is_var_args) {
- ir_add_error(ira, call_instruction->fn_ref,
- buf_sprintf("compiler bug: TODO: implement var args async functions. https://github.com/zig-lang/zig/issues/557"));
- return ira->codegen->builtin_types.entry_invalid;
- }
// Count the arguments of the function type id we are creating
size_t new_fn_arg_count = first_arg_1_or_0;
@@ -12168,9 +13251,18 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
size_t next_proto_i = 0;
if (first_arg_ptr) {
- IrInstruction *first_arg;
assert(first_arg_ptr->value.type->id == TypeTableEntryIdPointer);
- if (handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
+
+ bool first_arg_known_bare = false;
+ if (fn_type_id->next_param_index >= 1) {
+ TypeTableEntry *param_type = fn_type_id->param_info[next_proto_i].type;
+ if (type_is_invalid(param_type))
+ return ira->codegen->builtin_types.entry_invalid;
+ first_arg_known_bare = param_type->id != TypeTableEntryIdPointer;
+ }
+
+ IrInstruction *first_arg;
+ if (!first_arg_known_bare && handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
first_arg = first_arg_ptr;
} else {
first_arg = ir_get_deref(ira, first_arg_ptr, first_arg_ptr);
@@ -12195,25 +13287,25 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
if (type_is_invalid(arg->value.type))
return ira->codegen->builtin_types.entry_invalid;
- AstNode *param_decl_node = fn_proto_node->data.fn_proto.params.at(next_proto_i);
- assert(param_decl_node->type == NodeTypeParamDecl);
- bool is_var_args = param_decl_node->data.param_decl.is_var_args;
- if (is_var_args && !found_first_var_arg) {
- first_var_arg = inst_fn_type_id.param_count;
- found_first_var_arg = true;
- }
-
if (arg->value.type->id == TypeTableEntryIdArgTuple) {
for (size_t arg_tuple_i = arg->value.data.x_arg_tuple.start_index;
arg_tuple_i < arg->value.data.x_arg_tuple.end_index; arg_tuple_i += 1)
{
+ AstNode *param_decl_node = fn_proto_node->data.fn_proto.params.at(next_proto_i);
+ assert(param_decl_node->type == NodeTypeParamDecl);
+ bool is_var_args = param_decl_node->data.param_decl.is_var_args;
+ if (is_var_args && !found_first_var_arg) {
+ first_var_arg = inst_fn_type_id.param_count;
+ found_first_var_arg = true;
+ }
+
VariableTableEntry *arg_var = get_fn_var_by_index(parent_fn_entry, arg_tuple_i);
if (arg_var == nullptr) {
ir_add_error(ira, arg,
- buf_sprintf("compiler bug: var args can't handle void. https://github.com/zig-lang/zig/issues/557"));
+ buf_sprintf("compiler bug: var args can't handle void. https://github.com/ziglang/zig/issues/557"));
return ira->codegen->builtin_types.entry_invalid;
}
- IrInstruction *arg_var_ptr_inst = ir_get_var_ptr(ira, arg, arg_var, true, false);
+ IrInstruction *arg_var_ptr_inst = ir_get_var_ptr(ira, arg, arg_var);
if (type_is_invalid(arg_var_ptr_inst->value.type))
return ira->codegen->builtin_types.entry_invalid;
@@ -12227,10 +13319,20 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
return ira->codegen->builtin_types.entry_invalid;
}
}
- } else if (!ir_analyze_fn_call_generic_arg(ira, fn_proto_node, arg, &impl_fn->child_scope,
- &next_proto_i, generic_id, &inst_fn_type_id, casted_args, impl_fn))
- {
- return ira->codegen->builtin_types.entry_invalid;
+ } else {
+ AstNode *param_decl_node = fn_proto_node->data.fn_proto.params.at(next_proto_i);
+ assert(param_decl_node->type == NodeTypeParamDecl);
+ bool is_var_args = param_decl_node->data.param_decl.is_var_args;
+ if (is_var_args && !found_first_var_arg) {
+ first_var_arg = inst_fn_type_id.param_count;
+ found_first_var_arg = true;
+ }
+
+ if (!ir_analyze_fn_call_generic_arg(ira, fn_proto_node, arg, &impl_fn->child_scope,
+ &next_proto_i, generic_id, &inst_fn_type_id, casted_args, impl_fn))
+ {
+ return ira->codegen->builtin_types.entry_invalid;
+ }
}
}
@@ -12273,6 +13375,10 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
inst_fn_type_id.return_type = specified_return_type;
}
+ type_ensure_zero_bits_known(ira->codegen, specified_return_type);
+ if (type_is_invalid(specified_return_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
if (type_requires_comptime(specified_return_type)) {
// Throw out our work and call the function as if it were comptime.
return ir_analyze_fn_call(ira, call_instruction, fn_entry, fn_type, fn_ref, first_arg_ptr, true, FnInlineAuto);
@@ -12299,10 +13405,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
return ira->codegen->builtin_types.entry_invalid;
}
if (inst_fn_type_id.async_allocator_type == nullptr) {
- IrInstruction *casted_inst = ir_implicit_byval_const_ref_cast(ira, uncasted_async_allocator_inst);
- if (type_is_invalid(casted_inst->value.type))
- return ira->codegen->builtin_types.entry_invalid;
- inst_fn_type_id.async_allocator_type = casted_inst->value.type;
+ inst_fn_type_id.async_allocator_type = uncasted_async_allocator_inst->value.type;
}
async_allocator_inst = ir_implicit_cast(ira, uncasted_async_allocator_inst, inst_fn_type_id.async_allocator_type);
if (type_is_invalid(async_allocator_inst->value.type))
@@ -12323,6 +13426,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
impl_fn->ir_executable.parent_exec = ira->new_irb.exec;
impl_fn->analyzed_executable.source_node = call_instruction->base.source_node;
impl_fn->analyzed_executable.parent_exec = ira->new_irb.exec;
+ impl_fn->analyzed_executable.backward_branch_quota = ira->new_irb.exec->backward_branch_quota;
impl_fn->analyzed_executable.is_generic_instantiation = true;
ira->codegen->fn_defs.append(impl_fn);
@@ -12345,7 +13449,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
assert(async_allocator_inst == nullptr);
IrInstruction *new_call_instruction = ir_build_call_from(&ira->new_irb, &call_instruction->base,
impl_fn, nullptr, impl_param_count, casted_args, false, fn_inline,
- call_instruction->is_async, nullptr);
+ call_instruction->is_async, nullptr, casted_new_stack);
ir_add_alloca(ira, new_call_instruction, return_type);
@@ -12363,9 +13467,16 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
IrInstruction **casted_args = allocate<IrInstruction *>(call_param_count);
size_t next_arg_index = 0;
if (first_arg_ptr) {
- IrInstruction *first_arg;
assert(first_arg_ptr->value.type->id == TypeTableEntryIdPointer);
- if (handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type)) {
+
+ TypeTableEntry *param_type = fn_type_id->param_info[next_arg_index].type;
+ if (type_is_invalid(param_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *first_arg;
+ if (param_type->id == TypeTableEntryIdPointer &&
+ handle_is_ptr(first_arg_ptr->value.type->data.pointer.child_type))
+ {
first_arg = first_arg_ptr;
} else {
first_arg = ir_get_deref(ira, first_arg_ptr, first_arg_ptr);
@@ -12373,10 +13484,6 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
return ira->codegen->builtin_types.entry_invalid;
}
- TypeTableEntry *param_type = fn_type_id->param_info[next_arg_index].type;
- if (type_is_invalid(param_type))
- return ira->codegen->builtin_types.entry_invalid;
-
IrInstruction *casted_arg = ir_implicit_cast(ira, first_arg, param_type);
if (type_is_invalid(casted_arg->value.type))
return ira->codegen->builtin_types.entry_invalid;
@@ -12436,7 +13543,7 @@ static TypeTableEntry *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCall *cal
IrInstruction *new_call_instruction = ir_build_call_from(&ira->new_irb, &call_instruction->base,
- fn_entry, fn_ref, call_param_count, casted_args, false, fn_inline, false, nullptr);
+ fn_entry, fn_ref, call_param_count, casted_args, false, fn_inline, false, nullptr, casted_new_stack);
ir_add_alloca(ira, new_call_instruction, return_type);
return ir_finish_anal(ira, return_type);
@@ -12474,6 +13581,8 @@ static TypeTableEntry *ir_analyze_instruction_call(IrAnalyze *ira, IrInstruction
return ir_finish_anal(ira, cast_instruction->value.type);
} else if (fn_ref->value.type->id == TypeTableEntryIdFn) {
FnTableEntry *fn_table_entry = ir_resolve_fn(ira, fn_ref);
+ if (fn_table_entry == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
return ir_analyze_fn_call(ira, call_instruction, fn_table_entry, fn_table_entry->type_entry,
fn_ref, nullptr, is_comptime, call_instruction->fn_inline);
} else if (fn_ref->value.type->id == TypeTableEntryIdBoundFn) {
@@ -12481,7 +13590,7 @@ static TypeTableEntry *ir_analyze_instruction_call(IrAnalyze *ira, IrInstruction
FnTableEntry *fn_table_entry = fn_ref->value.data.x_bound_fn.fn;
IrInstruction *first_arg_ptr = fn_ref->value.data.x_bound_fn.first_arg;
return ir_analyze_fn_call(ira, call_instruction, fn_table_entry, fn_table_entry->type_entry,
- nullptr, first_arg_ptr, is_comptime, call_instruction->fn_inline);
+ fn_ref, first_arg_ptr, is_comptime, call_instruction->fn_inline);
} else {
ir_add_error_node(ira, fn_ref->source_node,
buf_sprintf("type '%s' not a function", buf_ptr(&fn_ref->value.type->name)));
@@ -12507,6 +13616,12 @@ static TypeTableEntry *ir_analyze_dereference(IrAnalyze *ira, IrInstructionUnOp
if (type_is_invalid(ptr_type)) {
return ira->codegen->builtin_types.entry_invalid;
} else if (ptr_type->id == TypeTableEntryIdPointer) {
+ if (ptr_type->data.pointer.ptr_len == PtrLenUnknown) {
+ ir_add_error_node(ira, un_op_instruction->base.source_node,
+ buf_sprintf("index syntax required for unknown-length pointer type '%s'",
+ buf_ptr(&ptr_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
child_type = ptr_type->data.pointer.child_type;
} else {
ir_add_error_node(ira, un_op_instruction->base.source_node,
@@ -12519,7 +13634,11 @@ static TypeTableEntry *ir_analyze_dereference(IrAnalyze *ira, IrInstructionUnOp
// one of the ptr instructions
if (instr_is_comptime(value)) {
- ConstExprValue *pointee = const_ptr_pointee(ira->codegen, &value->value);
+ ConstExprValue *comptime_value = ir_resolve_const(ira, value, UndefBad);
+ if (comptime_value == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *pointee = const_ptr_pointee(ira->codegen, comptime_value);
if (pointee->type == child_type) {
ConstExprValue *out_val = ir_build_const_from(ira, &un_op_instruction->base);
copy_const_val(out_val, pointee, value->value.data.x_ptr.mut == ConstPtrMutComptimeConst);
@@ -12536,6 +13655,10 @@ static TypeTableEntry *ir_analyze_maybe(IrAnalyze *ira, IrInstructionUnOp *un_op
TypeTableEntry *type_entry = ir_resolve_type(ira, value);
if (type_is_invalid(type_entry))
return ira->codegen->builtin_types.entry_invalid;
+ ensure_complete_type(ira->codegen, type_entry);
+ if (type_is_invalid(type_entry))
+ return ira->codegen->builtin_types.entry_invalid;
+
switch (type_entry->id) {
case TypeTableEntryIdInvalid:
zig_unreachable();
@@ -12547,11 +13670,11 @@ static TypeTableEntry *ir_analyze_maybe(IrAnalyze *ira, IrInstructionUnOp *un_op
case TypeTableEntryIdPointer:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
@@ -12564,13 +13687,13 @@ static TypeTableEntry *ir_analyze_maybe(IrAnalyze *ira, IrInstructionUnOp *un_op
case TypeTableEntryIdPromise:
{
ConstExprValue *out_val = ir_build_const_from(ira, &un_op_instruction->base);
- out_val->data.x_type = get_maybe_type(ira->codegen, type_entry);
+ out_val->data.x_type = get_optional_type(ira->codegen, type_entry);
return ira->codegen->builtin_types.entry_type;
}
case TypeTableEntryIdUnreachable:
case TypeTableEntryIdOpaque:
ir_add_error_node(ira, un_op_instruction->base.source_node,
- buf_sprintf("type '%s' not nullable", buf_ptr(&type_entry->name)));
+ buf_sprintf("type '%s' not optional", buf_ptr(&type_entry->name)));
return ira->codegen->builtin_types.entry_invalid;
}
zig_unreachable();
@@ -12584,10 +13707,10 @@ static TypeTableEntry *ir_analyze_negation(IrAnalyze *ira, IrInstructionUnOp *un
bool is_wrap_op = (un_op_instruction->op_id == IrUnOpNegationWrap);
- bool is_float = (expr_type->id == TypeTableEntryIdFloat || expr_type->id == TypeTableEntryIdNumLitFloat);
+ bool is_float = (expr_type->id == TypeTableEntryIdFloat || expr_type->id == TypeTableEntryIdComptimeFloat);
if ((expr_type->id == TypeTableEntryIdInt && expr_type->data.integral.is_signed) ||
- expr_type->id == TypeTableEntryIdNumLitInt || (is_float && !is_wrap_op))
+ expr_type->id == TypeTableEntryIdComptimeInt || (is_float && !is_wrap_op))
{
if (instr_is_comptime(value)) {
ConstExprValue *target_const_val = ir_resolve_const(ira, value, UndefBad);
@@ -12603,7 +13726,7 @@ static TypeTableEntry *ir_analyze_negation(IrAnalyze *ira, IrInstructionUnOp *un
} else {
bigint_negate(&out_val->data.x_bigint, &target_const_val->data.x_bigint);
}
- if (is_wrap_op || is_float || expr_type->id == TypeTableEntryIdNumLitInt) {
+ if (is_wrap_op || is_float || expr_type->id == TypeTableEntryIdComptimeInt) {
return expr_type;
}
@@ -12632,7 +13755,7 @@ static TypeTableEntry *ir_analyze_bin_not(IrAnalyze *ira, IrInstructionUnOp *ins
if (expr_type->id == TypeTableEntryIdInt) {
if (instr_is_comptime(value)) {
ConstExprValue *target_const_val = ir_resolve_const(ira, value, UndefBad);
- if (!target_const_val)
+ if (target_const_val == nullptr)
return ira->codegen->builtin_types.entry_invalid;
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
@@ -12662,7 +13785,7 @@ static TypeTableEntry *ir_analyze_instruction_un_op(IrAnalyze *ira, IrInstructio
return ir_analyze_negation(ira, un_op_instruction);
case IrUnOpDereference:
return ir_analyze_dereference(ira, un_op_instruction);
- case IrUnOpMaybe:
+ case IrUnOpOptional:
return ir_analyze_maybe(ira, un_op_instruction);
}
zig_unreachable();
@@ -12794,15 +13917,15 @@ static TypeTableEntry *ir_analyze_instruction_phi(IrAnalyze *ira, IrInstructionP
return first_value->value.type;
}
- TypeTableEntry *resolved_type = ir_resolve_peer_types(ira, phi_instruction->base.source_node,
+ TypeTableEntry *resolved_type = ir_resolve_peer_types(ira, phi_instruction->base.source_node, nullptr,
new_incoming_values.items, new_incoming_values.length);
if (type_is_invalid(resolved_type))
return resolved_type;
- if (resolved_type->id == TypeTableEntryIdNumLitFloat ||
- resolved_type->id == TypeTableEntryIdNumLitInt ||
- resolved_type->id == TypeTableEntryIdNullLit ||
- resolved_type->id == TypeTableEntryIdUndefLit)
+ if (resolved_type->id == TypeTableEntryIdComptimeFloat ||
+ resolved_type->id == TypeTableEntryIdComptimeInt ||
+ resolved_type->id == TypeTableEntryIdNull ||
+ resolved_type->id == TypeTableEntryIdUndefined)
{
ir_add_error_node(ira, phi_instruction->base.source_node,
buf_sprintf("unable to infer expression type"));
@@ -12847,17 +13970,16 @@ static TypeTableEntry *ir_analyze_instruction_phi(IrAnalyze *ira, IrInstructionP
}
static TypeTableEntry *ir_analyze_var_ptr(IrAnalyze *ira, IrInstruction *instruction,
- VariableTableEntry *var, bool is_const_ptr, bool is_volatile_ptr)
+ VariableTableEntry *var)
{
- IrInstruction *result = ir_get_var_ptr(ira, instruction, var, is_const_ptr, is_volatile_ptr);
+ IrInstruction *result = ir_get_var_ptr(ira, instruction, var);
ir_link_new_instruction(result, instruction);
return result->value.type;
}
static TypeTableEntry *ir_analyze_instruction_var_ptr(IrAnalyze *ira, IrInstructionVarPtr *var_ptr_instruction) {
VariableTableEntry *var = var_ptr_instruction->var;
- return ir_analyze_var_ptr(ira, &var_ptr_instruction->base, var, var_ptr_instruction->is_const,
- var_ptr_instruction->is_volatile);
+ return ir_analyze_var_ptr(ira, &var_ptr_instruction->base, var);
}
static TypeTableEntry *adjust_ptr_align(CodeGen *g, TypeTableEntry *ptr_type, uint32_t new_align) {
@@ -12865,25 +13987,40 @@ static TypeTableEntry *adjust_ptr_align(CodeGen *g, TypeTableEntry *ptr_type, ui
return get_pointer_to_type_extra(g,
ptr_type->data.pointer.child_type,
ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
+ ptr_type->data.pointer.ptr_len,
new_align,
ptr_type->data.pointer.bit_offset, ptr_type->data.pointer.unaligned_bit_count);
}
+static TypeTableEntry *adjust_slice_align(CodeGen *g, TypeTableEntry *slice_type, uint32_t new_align) {
+ assert(is_slice(slice_type));
+ TypeTableEntry *ptr_type = adjust_ptr_align(g, slice_type->data.structure.fields[slice_ptr_index].type_entry,
+ new_align);
+ return get_slice_type(g, ptr_type);
+}
+
+static TypeTableEntry *adjust_ptr_len(CodeGen *g, TypeTableEntry *ptr_type, PtrLen ptr_len) {
+ assert(ptr_type->id == TypeTableEntryIdPointer);
+ return get_pointer_to_type_extra(g,
+ ptr_type->data.pointer.child_type,
+ ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
+ ptr_len,
+ ptr_type->data.pointer.alignment,
+ ptr_type->data.pointer.bit_offset, ptr_type->data.pointer.unaligned_bit_count);
+}
+
static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstructionElemPtr *elem_ptr_instruction) {
IrInstruction *array_ptr = elem_ptr_instruction->array_ptr->other;
if (type_is_invalid(array_ptr->value.type))
return ira->codegen->builtin_types.entry_invalid;
+ ConstExprValue *orig_array_ptr_val = &array_ptr->value;
+
IrInstruction *elem_index = elem_ptr_instruction->elem_index->other;
if (type_is_invalid(elem_index->value.type))
return ira->codegen->builtin_types.entry_invalid;
- TypeTableEntry *ptr_type = array_ptr->value.type;
- if (ptr_type->id == TypeTableEntryIdMetaType) {
- ir_add_error(ira, &elem_ptr_instruction->base,
- buf_sprintf("array access of non-array type '%s'", buf_ptr(&ptr_type->name)));
- return ira->codegen->builtin_types.entry_invalid;
- }
+ TypeTableEntry *ptr_type = orig_array_ptr_val->type;
assert(ptr_type->id == TypeTableEntryIdPointer);
TypeTableEntry *array_type = ptr_type->data.pointer.child_type;
@@ -12894,7 +14031,18 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
if (type_is_invalid(array_type)) {
return array_type;
- } else if (array_type->id == TypeTableEntryIdArray) {
+ } else if (array_type->id == TypeTableEntryIdArray ||
+ (array_type->id == TypeTableEntryIdPointer &&
+ array_type->data.pointer.ptr_len == PtrLenSingle &&
+ array_type->data.pointer.child_type->id == TypeTableEntryIdArray))
+ {
+ if (array_type->id == TypeTableEntryIdPointer) {
+ array_type = array_type->data.pointer.child_type;
+ ptr_type = ptr_type->data.pointer.child_type;
+ if (orig_array_ptr_val->special != ConstValSpecialRuntime) {
+ orig_array_ptr_val = const_ptr_pointee(ira->codegen, orig_array_ptr_val);
+ }
+ }
if (array_type->data.array.len == 0) {
ir_add_error_node(ira, elem_ptr_instruction->base.source_node,
buf_sprintf("index 0 outside array of size 0"));
@@ -12904,6 +14052,7 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
if (ptr_type->data.pointer.unaligned_bit_count == 0) {
return_type = get_pointer_to_type_extra(ira->codegen, child_type,
ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
+ elem_ptr_instruction->ptr_len,
ptr_type->data.pointer.alignment, 0, 0);
} else {
uint64_t elem_val_scalar;
@@ -12915,12 +14064,19 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
return_type = get_pointer_to_type_extra(ira->codegen, child_type,
ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
+ elem_ptr_instruction->ptr_len,
1, (uint32_t)bit_offset, (uint32_t)bit_width);
}
} else if (array_type->id == TypeTableEntryIdPointer) {
- return_type = array_type;
+ if (array_type->data.pointer.ptr_len == PtrLenSingle) {
+ ir_add_error_node(ira, elem_ptr_instruction->base.source_node,
+ buf_sprintf("index of single-item pointer"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ return_type = adjust_ptr_len(ira->codegen, array_type, elem_ptr_instruction->ptr_len);
} else if (is_slice(array_type)) {
- return_type = array_type->data.structure.fields[slice_ptr_index].type_entry;
+ return_type = adjust_ptr_len(ira->codegen, array_type->data.structure.fields[slice_ptr_index].type_entry,
+ elem_ptr_instruction->ptr_len);
} else if (array_type->id == TypeTableEntryIdArgTuple) {
ConstExprValue *ptr_val = ir_resolve_const(ira, array_ptr, UndefBad);
if (!ptr_val)
@@ -12945,8 +14101,7 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
bool is_const = true;
bool is_volatile = false;
if (var) {
- return ir_analyze_var_ptr(ira, &elem_ptr_instruction->base, var,
- is_const, is_volatile);
+ return ir_analyze_var_ptr(ira, &elem_ptr_instruction->base, var);
} else {
return ir_analyze_const_ptr(ira, &elem_ptr_instruction->base, &ira->codegen->const_void_val,
ira->codegen->builtin_types.entry_void, ConstPtrMutComptimeConst, is_const, is_volatile);
@@ -12964,6 +14119,9 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
bool safety_check_on = elem_ptr_instruction->safety_check_on;
ensure_complete_type(ira->codegen, return_type->data.pointer.child_type);
+ if (type_is_invalid(return_type->data.pointer.child_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
uint64_t elem_size = type_size(ira->codegen, return_type->data.pointer.child_type);
uint64_t abi_align = get_abi_alignment(ira->codegen, return_type->data.pointer.child_type);
uint64_t ptr_align = return_type->data.pointer.alignment;
@@ -13001,9 +14159,9 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
}
ConstExprValue *array_ptr_val;
- if (array_ptr->value.special != ConstValSpecialRuntime &&
- (array_ptr->value.data.x_ptr.mut != ConstPtrMutRuntimeVar || array_type->id == TypeTableEntryIdArray) &&
- (array_ptr_val = const_ptr_pointee(ira->codegen, &array_ptr->value)) &&
+ if (orig_array_ptr_val->special != ConstValSpecialRuntime &&
+ (orig_array_ptr_val->data.x_ptr.mut != ConstPtrMutRuntimeVar || array_type->id == TypeTableEntryIdArray) &&
+ (array_ptr_val = const_ptr_pointee(ira->codegen, orig_array_ptr_val)) &&
array_ptr_val->special != ConstValSpecialRuntime &&
(array_type->id != TypeTableEntryIdPointer ||
array_ptr_val->data.x_ptr.special != ConstPtrSpecialHardCodedAddr))
@@ -13060,8 +14218,10 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
} else if (is_slice(array_type)) {
ConstExprValue *ptr_field = &array_ptr_val->data.x_struct.fields[slice_ptr_index];
if (ptr_field->data.x_ptr.special == ConstPtrSpecialHardCodedAddr) {
- ir_build_elem_ptr_from(&ira->new_irb, &elem_ptr_instruction->base, array_ptr,
- casted_elem_index, false);
+ IrInstruction *result = ir_build_elem_ptr(&ira->new_irb, elem_ptr_instruction->base.scope, elem_ptr_instruction->base.source_node,
+ array_ptr, casted_elem_index, false, elem_ptr_instruction->ptr_len);
+ result->value.type = return_type;
+ ir_link_new_instruction(result, &elem_ptr_instruction->base);
return return_type;
}
ConstExprValue *len_field = &array_ptr_val->data.x_struct.fields[slice_len_index];
@@ -13106,7 +14266,7 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
} else if (array_type->id == TypeTableEntryIdArray) {
ConstExprValue *out_val = ir_build_const_from(ira, &elem_ptr_instruction->base);
out_val->data.x_ptr.special = ConstPtrSpecialBaseArray;
- out_val->data.x_ptr.mut = array_ptr->value.data.x_ptr.mut;
+ out_val->data.x_ptr.mut = orig_array_ptr_val->data.x_ptr.mut;
out_val->data.x_ptr.data.base_array.array_val = array_ptr_val;
out_val->data.x_ptr.data.base_array.elem_index = index;
return return_type;
@@ -13129,8 +14289,10 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc
}
}
- ir_build_elem_ptr_from(&ira->new_irb, &elem_ptr_instruction->base, array_ptr,
- casted_elem_index, safety_check_on);
+ IrInstruction *result = ir_build_elem_ptr(&ira->new_irb, elem_ptr_instruction->base.scope, elem_ptr_instruction->base.source_node,
+ array_ptr, casted_elem_index, safety_check_on, elem_ptr_instruction->ptr_len);
+ result->value.type = return_type;
+ ir_link_new_instruction(result, &elem_ptr_instruction->base);
return return_type;
}
@@ -13174,7 +14336,6 @@ static IrInstruction *ir_analyze_container_member_access_inner(IrAnalyze *ira,
return ira->codegen->invalid_instruction;
}
-
static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_name,
IrInstruction *source_instr, IrInstruction *container_ptr, TypeTableEntry *container_type)
{
@@ -13206,7 +14367,7 @@ static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_
return ira->codegen->invalid_instruction;
ConstExprValue *field_val = &struct_val->data.x_struct.fields[field->src_index];
TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, field_val->type,
- is_const, is_volatile, align_bytes,
+ is_const, is_volatile, PtrLenSingle, align_bytes,
(uint32_t)(ptr_bit_offset + field->packed_bits_offset),
(uint32_t)unaligned_bit_count_for_result_type);
IrInstruction *result = ir_get_const(ira, source_instr);
@@ -13222,6 +14383,7 @@ static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_
IrInstruction *result = ir_build_struct_field_ptr(&ira->new_irb, source_instr->scope, source_instr->source_node,
container_ptr, field);
result->value.type = get_pointer_to_type_extra(ira->codegen, field->type_entry, is_const, is_volatile,
+ PtrLenSingle,
align_bytes,
(uint32_t)(ptr_bit_offset + field->packed_bits_offset),
(uint32_t)unaligned_bit_count_for_result_type);
@@ -13236,9 +14398,56 @@ static IrInstruction *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field_
} else if (bare_type->id == TypeTableEntryIdUnion) {
TypeUnionField *field = find_union_type_field(bare_type, field_name);
if (field) {
+ if (instr_is_comptime(container_ptr)) {
+ ConstExprValue *ptr_val = ir_resolve_const(ira, container_ptr, UndefBad);
+ if (!ptr_val)
+ return ira->codegen->invalid_instruction;
+
+ if (ptr_val->data.x_ptr.special != ConstPtrSpecialHardCodedAddr) {
+ ConstExprValue *union_val = const_ptr_pointee(ira->codegen, ptr_val);
+ if (type_is_invalid(union_val->type))
+ return ira->codegen->invalid_instruction;
+
+ TypeUnionField *actual_field = find_union_field_by_tag(bare_type, &union_val->data.x_union.tag);
+ if (actual_field == nullptr)
+ zig_unreachable();
+
+ if (field != actual_field) {
+ ir_add_error_node(ira, source_instr->source_node,
+ buf_sprintf("accessing union field '%s' while field '%s' is set", buf_ptr(field_name),
+ buf_ptr(actual_field->name)));
+ return ira->codegen->invalid_instruction;
+ }
+
+ ConstExprValue *payload_val = union_val->data.x_union.payload;
+
+ TypeTableEntry *field_type = field->type_entry;
+ if (field_type->id == TypeTableEntryIdVoid)
+ {
+ assert(payload_val == nullptr);
+ payload_val = create_const_vals(1);
+ payload_val->special = ConstValSpecialStatic;
+ payload_val->type = field_type;
+ }
+
+ TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, field_type,
+ is_const, is_volatile,
+ PtrLenSingle,
+ get_abi_alignment(ira->codegen, field_type), 0, 0);
+
+ IrInstruction *result = ir_get_const(ira, source_instr);
+ ConstExprValue *const_val = &result->value;
+ const_val->data.x_ptr.special = ConstPtrSpecialRef;
+ const_val->data.x_ptr.mut = container_ptr->value.data.x_ptr.mut;
+ const_val->data.x_ptr.data.ref.pointee = payload_val;
+ const_val->type = ptr_type;
+ return result;
+ }
+ }
+
IrInstruction *result = ir_build_union_field_ptr(&ira->new_irb, source_instr->scope, source_instr->source_node, container_ptr, field);
result->value.type = get_pointer_to_type_extra(ira->codegen, field->type_entry, is_const, is_volatile,
- get_abi_alignment(ira->codegen, field->type_entry), 0, 0);
+ PtrLenSingle, get_abi_alignment(ira->codegen, field->type_entry), 0, 0);
return result;
} else {
return ir_analyze_container_member_access_inner(ira, bare_type, field_name,
@@ -13286,7 +14495,7 @@ static TypeTableEntry *ir_analyze_decl_ref(IrAnalyze *ira, IrInstruction *source
add_link_lib_symbol(ira, tld_var->extern_lib_name, &var->name, source_instruction->source_node);
}
- return ir_analyze_var_ptr(ira, source_instruction, var, false, false);
+ return ir_analyze_var_ptr(ira, source_instruction, var);
}
case TldIdFn:
{
@@ -13335,16 +14544,18 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
if (type_is_invalid(container_ptr->value.type))
return ira->codegen->builtin_types.entry_invalid;
- TypeTableEntry *container_type;
- if (container_ptr->value.type->id == TypeTableEntryIdPointer) {
- container_type = container_ptr->value.type->data.pointer.child_type;
- } else if (container_ptr->value.type->id == TypeTableEntryIdMetaType) {
- container_type = container_ptr->value.type;
- } else {
- zig_unreachable();
+ TypeTableEntry *container_type = container_ptr->value.type->data.pointer.child_type;
+ assert(container_ptr->value.type->id == TypeTableEntryIdPointer);
+
+ Buf *field_name = field_ptr_instruction->field_name_buffer;
+ if (!field_name) {
+ IrInstruction *field_name_expr = field_ptr_instruction->field_name_expr->other;
+ field_name = ir_resolve_str(ira, field_name_expr);
+ if (!field_name)
+ return ira->codegen->builtin_types.entry_invalid;
}
- Buf *field_name = field_ptr_instruction->field_name;
+
AstNode *source_node = field_ptr_instruction->base.source_node;
if (type_is_invalid(container_type)) {
@@ -13362,10 +14573,14 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
ir_link_new_instruction(result, &field_ptr_instruction->base);
return result->value.type;
}
- } else if (container_type->id == TypeTableEntryIdArray) {
+ } else if (is_array_ref(container_type)) {
if (buf_eql_str(field_name, "len")) {
ConstExprValue *len_val = create_const_vals(1);
- init_const_usize(ira->codegen, len_val, container_type->data.array.len);
+ if (container_type->id == TypeTableEntryIdPointer) {
+ init_const_usize(ira->codegen, len_val, container_type->data.pointer.child_type->data.array.len);
+ } else {
+ init_const_usize(ira->codegen, len_val, container_type->data.array.len);
+ }
TypeTableEntry *usize = ira->codegen->builtin_types.entry_usize;
bool ptr_is_const = true;
@@ -13407,17 +14622,9 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
if (!container_ptr_val)
return ira->codegen->builtin_types.entry_invalid;
- TypeTableEntry *child_type;
- if (container_ptr->value.type->id == TypeTableEntryIdMetaType) {
- TypeTableEntry *ptr_type = container_ptr_val->data.x_type;
- assert(ptr_type->id == TypeTableEntryIdPointer);
- child_type = ptr_type->data.pointer.child_type;
- } else if (container_ptr->value.type->id == TypeTableEntryIdPointer) {
- ConstExprValue *child_val = const_ptr_pointee(ira->codegen, container_ptr_val);
- child_type = child_val->data.x_type;
- } else {
- zig_unreachable();
- }
+ assert(container_ptr->value.type->id == TypeTableEntryIdPointer);
+ ConstExprValue *child_val = const_ptr_pointee(ira->codegen, container_ptr_val);
+ TypeTableEntry *child_type = child_val->data.x_type;
if (type_is_invalid(child_type)) {
return ira->codegen->builtin_types.entry_invalid;
@@ -13435,7 +14642,7 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
}
if (child_type->id == TypeTableEntryIdEnum) {
ensure_complete_type(ira->codegen, child_type);
- if (child_type->data.enumeration.is_invalid)
+ if (type_is_invalid(child_type))
return ira->codegen->builtin_types.entry_invalid;
TypeEnumField *field = find_enum_type_field(child_type, field_name);
@@ -13446,7 +14653,16 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
create_const_enum(child_type, &field->value), child_type,
ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
}
- } else if (child_type->id == TypeTableEntryIdUnion &&
+ }
+ ScopeDecls *container_scope = get_container_scope(child_type);
+ if (container_scope != nullptr) {
+ auto entry = container_scope->decl_table.maybe_get(field_name);
+ Tld *tld = entry ? entry->value : nullptr;
+ if (tld) {
+ return ir_analyze_decl_ref(ira, &field_ptr_instruction->base, tld);
+ }
+ }
+ if (child_type->id == TypeTableEntryIdUnion &&
(child_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr ||
child_type->data.unionation.decl_node->data.container_decl.auto_enum))
{
@@ -13463,14 +14679,6 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
ConstPtrMutComptimeConst, ptr_is_const, ptr_is_volatile);
}
}
- ScopeDecls *container_scope = get_container_scope(child_type);
- if (container_scope != nullptr) {
- auto entry = container_scope->decl_table.maybe_get(field_name);
- Tld *tld = entry ? entry->value : nullptr;
- if (tld) {
- return ir_analyze_decl_ref(ira, &field_ptr_instruction->base, tld);
- }
- }
ir_add_error(ira, &field_ptr_instruction->base,
buf_sprintf("container '%s' has no member called '%s'",
buf_ptr(&child_type->name), buf_ptr(field_name)));
@@ -13501,7 +14709,7 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
}
err_set_type = err_entry->set_with_only_this_in_it;
} else {
- if (!resolve_inferred_error_set(ira, child_type, field_ptr_instruction->base.source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, child_type, field_ptr_instruction->base.source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
err_entry = find_err_table_entry(child_type, field_name);
@@ -13623,7 +14831,7 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
buf_ptr(&child_type->name), buf_ptr(field_name)));
return ira->codegen->builtin_types.entry_invalid;
}
- } else if (child_type->id == TypeTableEntryIdMaybe) {
+ } else if (child_type->id == TypeTableEntryIdOptional) {
if (buf_eql_str(field_name, "Child")) {
bool ptr_is_const = true;
bool ptr_is_volatile = false;
@@ -13639,6 +14847,15 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
}
} else if (child_type->id == TypeTableEntryIdFn) {
if (buf_eql_str(field_name, "ReturnType")) {
+ if (child_type->data.fn.fn_type_id.return_type == nullptr) {
+ // Return type can only ever be null, if the function is generic
+ assert(child_type->data.fn.is_generic);
+
+ ir_add_error(ira, &field_ptr_instruction->base,
+ buf_sprintf("ReturnType has not been resolved because '%s' is generic", buf_ptr(&child_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
bool ptr_is_const = true;
bool ptr_is_volatile = false;
return ir_analyze_const_ptr(ira, &field_ptr_instruction->base,
@@ -13707,6 +14924,9 @@ static TypeTableEntry *ir_analyze_instruction_field_ptr(IrAnalyze *ira, IrInstru
static TypeTableEntry *ir_analyze_instruction_load_ptr(IrAnalyze *ira, IrInstructionLoadPtr *load_ptr_instruction) {
IrInstruction *ptr = load_ptr_instruction->ptr->other;
+ if (type_is_invalid(ptr->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
IrInstruction *result = ir_get_deref(ira, &load_ptr_instruction->base, ptr);
ir_link_new_instruction(result, &load_ptr_instruction->base);
assert(result->value.type);
@@ -13779,10 +14999,10 @@ static TypeTableEntry *ir_analyze_instruction_typeof(IrAnalyze *ira, IrInstructi
switch (type_entry->id) {
case TypeTableEntryIdInvalid:
zig_unreachable(); // handled above
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
@@ -13795,7 +15015,7 @@ static TypeTableEntry *ir_analyze_instruction_typeof(IrAnalyze *ira, IrInstructi
case TypeTableEntryIdPointer:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
@@ -13827,7 +15047,7 @@ static TypeTableEntry *ir_analyze_instruction_to_ptr_type(IrAnalyze *ira,
if (type_entry->id == TypeTableEntryIdArray) {
ptr_type = get_pointer_to_type(ira->codegen, type_entry->data.array.child_type, false);
} else if (is_slice(type_entry)) {
- ptr_type = type_entry->data.structure.fields[0].type_entry;
+ ptr_type = adjust_ptr_len(ira->codegen, type_entry->data.structure.fields[0].type_entry, PtrLenSingle);
} else if (type_entry->id == TypeTableEntryIdArgTuple) {
ConstExprValue *arg_tuple_val = ir_resolve_const(ira, value, UndefBad);
if (!arg_tuple_val)
@@ -14045,8 +15265,8 @@ static TypeTableEntry *ir_analyze_instruction_slice_type(IrAnalyze *ira,
case TypeTableEntryIdInvalid: // handled above
zig_unreachable();
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdBlock:
case TypeTableEntryIdArgTuple:
case TypeTableEntryIdOpaque:
@@ -14061,9 +15281,9 @@ static TypeTableEntry *ir_analyze_instruction_slice_type(IrAnalyze *ira,
case TypeTableEntryIdPointer:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
@@ -14075,7 +15295,7 @@ static TypeTableEntry *ir_analyze_instruction_slice_type(IrAnalyze *ira,
{
type_ensure_zero_bits_known(ira->codegen, child_type);
TypeTableEntry *slice_ptr_type = get_pointer_to_type_extra(ira->codegen, child_type,
- is_const, is_volatile, align_bytes, 0, 0);
+ is_const, is_volatile, PtrLenUnknown, align_bytes, 0, 0);
TypeTableEntry *result_type = get_slice_type(ira->codegen, slice_ptr_type);
ConstExprValue *out_val = ir_build_const_from(ira, &slice_type_instruction->base);
out_val->data.x_type = result_type;
@@ -14153,8 +15373,8 @@ static TypeTableEntry *ir_analyze_instruction_array_type(IrAnalyze *ira,
case TypeTableEntryIdInvalid: // handled above
zig_unreachable();
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdBlock:
case TypeTableEntryIdArgTuple:
case TypeTableEntryIdOpaque:
@@ -14169,9 +15389,9 @@ static TypeTableEntry *ir_analyze_instruction_array_type(IrAnalyze *ira,
case TypeTableEntryIdPointer:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
@@ -14222,11 +15442,11 @@ static TypeTableEntry *ir_analyze_instruction_size_of(IrAnalyze *ira,
case TypeTableEntryIdInvalid: // handled above
zig_unreachable();
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdBlock:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
case TypeTableEntryIdBoundFn:
case TypeTableEntryIdMetaType:
case TypeTableEntryIdNamespace:
@@ -14242,7 +15462,7 @@ static TypeTableEntry *ir_analyze_instruction_size_of(IrAnalyze *ira,
case TypeTableEntryIdPointer:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
@@ -14266,20 +15486,20 @@ static TypeTableEntry *ir_analyze_instruction_test_non_null(IrAnalyze *ira, IrIn
TypeTableEntry *type_entry = value->value.type;
- if (type_entry->id == TypeTableEntryIdMaybe) {
+ if (type_entry->id == TypeTableEntryIdOptional) {
if (instr_is_comptime(value)) {
ConstExprValue *maybe_val = ir_resolve_const(ira, value, UndefBad);
if (!maybe_val)
return ira->codegen->builtin_types.entry_invalid;
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
- out_val->data.x_bool = (maybe_val->data.x_maybe != nullptr);
+ out_val->data.x_bool = !optional_value_is_null(maybe_val);
return ira->codegen->builtin_types.entry_bool;
}
ir_build_test_nonnull_from(&ira->new_irb, &instruction->base, value);
return ira->codegen->builtin_types.entry_bool;
- } else if (type_entry->id == TypeTableEntryIdNullLit) {
+ } else if (type_entry->id == TypeTableEntryIdNull) {
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
out_val->data.x_bool = false;
return ira->codegen->builtin_types.entry_bool;
@@ -14291,42 +15511,27 @@ static TypeTableEntry *ir_analyze_instruction_test_non_null(IrAnalyze *ira, IrIn
}
static TypeTableEntry *ir_analyze_instruction_unwrap_maybe(IrAnalyze *ira,
- IrInstructionUnwrapMaybe *unwrap_maybe_instruction)
+ IrInstructionUnwrapOptional *unwrap_maybe_instruction)
{
IrInstruction *value = unwrap_maybe_instruction->value->other;
if (type_is_invalid(value->value.type))
return ira->codegen->builtin_types.entry_invalid;
TypeTableEntry *ptr_type = value->value.type;
- if (ptr_type->id == TypeTableEntryIdMetaType) {
- // surprise! actually this is just ??T not an unwrap maybe instruction
- TypeTableEntry *ptr_type_ptr = ir_resolve_type(ira, value);
- assert(ptr_type_ptr->id == TypeTableEntryIdPointer);
- TypeTableEntry *child_type = ptr_type_ptr->data.pointer.child_type;
- type_ensure_zero_bits_known(ira->codegen, child_type);
- TypeTableEntry *layer1 = get_maybe_type(ira->codegen, child_type);
- TypeTableEntry *layer2 = get_maybe_type(ira->codegen, layer1);
- TypeTableEntry *result_type = get_pointer_to_type(ira->codegen, layer2, true);
-
- IrInstruction *const_instr = ir_build_const_type(&ira->new_irb, unwrap_maybe_instruction->base.scope,
- unwrap_maybe_instruction->base.source_node, result_type);
- ir_link_new_instruction(const_instr, &unwrap_maybe_instruction->base);
- return const_instr->value.type;
- }
-
assert(ptr_type->id == TypeTableEntryIdPointer);
TypeTableEntry *type_entry = ptr_type->data.pointer.child_type;
if (type_is_invalid(type_entry)) {
return ira->codegen->builtin_types.entry_invalid;
- } else if (type_entry->id != TypeTableEntryIdMaybe) {
+ } else if (type_entry->id != TypeTableEntryIdOptional) {
ir_add_error_node(ira, unwrap_maybe_instruction->value->source_node,
- buf_sprintf("expected nullable type, found '%s'", buf_ptr(&type_entry->name)));
+ buf_sprintf("expected optional type, found '%s'", buf_ptr(&type_entry->name)));
return ira->codegen->builtin_types.entry_invalid;
}
TypeTableEntry *child_type = type_entry->data.maybe.child_type;
TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, child_type,
ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
+ PtrLenSingle,
get_abi_alignment(ira->codegen, child_type), 0, 0);
if (instr_is_comptime(value)) {
@@ -14336,13 +15541,18 @@ static TypeTableEntry *ir_analyze_instruction_unwrap_maybe(IrAnalyze *ira,
ConstExprValue *maybe_val = const_ptr_pointee(ira->codegen, val);
if (val->data.x_ptr.mut != ConstPtrMutRuntimeVar) {
- if (!maybe_val->data.x_maybe) {
+ if (optional_value_is_null(maybe_val)) {
ir_add_error(ira, &unwrap_maybe_instruction->base, buf_sprintf("unable to unwrap null"));
return ira->codegen->builtin_types.entry_invalid;
}
ConstExprValue *out_val = ir_build_const_from(ira, &unwrap_maybe_instruction->base);
out_val->data.x_ptr.special = ConstPtrSpecialRef;
- out_val->data.x_ptr.data.ref.pointee = maybe_val->data.x_maybe;
+ out_val->data.x_ptr.mut = val->data.x_ptr.mut;
+ if (type_is_codegen_pointer(child_type)) {
+ out_val->data.x_ptr.data.ref.pointee = maybe_val;
+ } else {
+ out_val->data.x_ptr.data.ref.pointee = maybe_val->data.x_optional;
+ }
return result_type;
}
}
@@ -14400,6 +15610,48 @@ static TypeTableEntry *ir_analyze_instruction_clz(IrAnalyze *ira, IrInstructionC
}
}
+static TypeTableEntry *ir_analyze_instruction_pop_count(IrAnalyze *ira, IrInstructionPopCount *instruction) {
+ IrInstruction *value = instruction->value->other;
+ if (type_is_invalid(value->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (value->value.type->id != TypeTableEntryIdInt && value->value.type->id != TypeTableEntryIdComptimeInt) {
+ ir_add_error(ira, value,
+ buf_sprintf("expected integer type, found '%s'", buf_ptr(&value->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ if (instr_is_comptime(value)) {
+ ConstExprValue *val = ir_resolve_const(ira, value, UndefBad);
+ if (!val)
+ return ira->codegen->builtin_types.entry_invalid;
+ if (bigint_cmp_zero(&val->data.x_bigint) != CmpLT) {
+ size_t result = bigint_popcount_unsigned(&val->data.x_bigint);
+ ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+ bigint_init_unsigned(&out_val->data.x_bigint, result);
+ return ira->codegen->builtin_types.entry_num_lit_int;
+ }
+ if (value->value.type->id == TypeTableEntryIdComptimeInt) {
+ Buf *val_buf = buf_alloc();
+ bigint_append_buf(val_buf, &val->data.x_bigint, 10);
+ ir_add_error(ira, &instruction->base,
+ buf_sprintf("@popCount on negative %s value %s",
+ buf_ptr(&value->value.type->name), buf_ptr(val_buf)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ size_t result = bigint_popcount_signed(&val->data.x_bigint, value->value.type->data.integral.bit_count);
+ ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+ bigint_init_unsigned(&out_val->data.x_bigint, result);
+ return ira->codegen->builtin_types.entry_num_lit_int;
+ }
+
+ IrInstruction *result = ir_build_pop_count(&ira->new_irb, instruction->base.scope,
+ instruction->base.source_node, value);
+ result->value.type = get_smallest_unsigned_int_type(ira->codegen, value->value.type->data.integral.bit_count);
+ ir_link_new_instruction(result, &instruction->base);
+ return result->value.type;
+}
+
static IrInstruction *ir_analyze_union_tag(IrAnalyze *ira, IrInstruction *source_instr, IrInstruction *value) {
if (type_is_invalid(value->value.type))
return ira->codegen->invalid_instruction;
@@ -14409,7 +15661,7 @@ static IrInstruction *ir_analyze_union_tag(IrAnalyze *ira, IrInstruction *source
}
if (value->value.type->id != TypeTableEntryIdUnion) {
- ir_add_error(ira, source_instr,
+ ir_add_error(ira, value,
buf_sprintf("expected enum or union type, found '%s'", buf_ptr(&value->value.type->name)));
return ira->codegen->invalid_instruction;
}
@@ -14450,6 +15702,13 @@ static TypeTableEntry *ir_analyze_instruction_switch_br(IrAnalyze *ira,
if (type_is_invalid(target_value->value.type))
return ir_unreach_error(ira);
+ if (switch_br_instruction->switch_prongs_void != nullptr) {
+ if (type_is_invalid(switch_br_instruction->switch_prongs_void->other->value.type)) {
+ return ir_unreach_error(ira);
+ }
+ }
+
+
size_t case_count = switch_br_instruction->case_count;
bool is_comptime;
@@ -14540,7 +15799,7 @@ static TypeTableEntry *ir_analyze_instruction_switch_br(IrAnalyze *ira,
IrBasicBlock *new_else_block = ir_get_new_bb(ira, switch_br_instruction->else_block, &switch_br_instruction->base);
ir_build_switch_br_from(&ira->new_irb, &switch_br_instruction->base,
- target_value, new_else_block, case_count, cases, nullptr);
+ target_value, new_else_block, case_count, cases, nullptr, nullptr);
return ir_finish_anal(ira, ira->codegen->builtin_types.entry_unreachable);
}
@@ -14561,7 +15820,10 @@ static TypeTableEntry *ir_analyze_instruction_switch_target(IrAnalyze *ira,
return out_val->type;
}
- assert(target_value_ptr->value.type->id == TypeTableEntryIdPointer);
+ if (target_value_ptr->value.type->id != TypeTableEntryIdPointer) {
+ ir_add_error(ira, target_value_ptr, buf_sprintf("invalid deref on switch target"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
TypeTableEntry *target_type = target_value_ptr->value.type->data.pointer.child_type;
ConstExprValue *pointee_val = nullptr;
@@ -14582,8 +15844,8 @@ static TypeTableEntry *ir_analyze_instruction_switch_target(IrAnalyze *ira,
case TypeTableEntryIdBool:
case TypeTableEntryIdInt:
case TypeTableEntryIdFloat:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
case TypeTableEntryIdPointer:
case TypeTableEntryIdPromise:
case TypeTableEntryIdFn:
@@ -14661,9 +15923,9 @@ static TypeTableEntry *ir_analyze_instruction_switch_target(IrAnalyze *ira,
case TypeTableEntryIdUnreachable:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
case TypeTableEntryIdArgTuple:
@@ -14841,6 +16103,8 @@ static TypeTableEntry *ir_analyze_container_init_fields_union(IrAnalyze *ira, Ir
assert(container_type->id == TypeTableEntryIdUnion);
ensure_complete_type(ira->codegen, container_type);
+ if (type_is_invalid(container_type))
+ return ira->codegen->builtin_types.entry_invalid;
if (instr_field_count != 1) {
ir_add_error(ira, instruction,
@@ -14868,8 +16132,14 @@ static TypeTableEntry *ir_analyze_container_init_fields_union(IrAnalyze *ira, Ir
if (casted_field_value == ira->codegen->invalid_instruction)
return ira->codegen->builtin_types.entry_invalid;
+ type_ensure_zero_bits_known(ira->codegen, casted_field_value->value.type);
+ if (type_is_invalid(casted_field_value->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
bool is_comptime = ir_should_inline(ira->new_irb.exec, instruction->scope);
- if (is_comptime || casted_field_value->value.special != ConstValSpecialRuntime) {
+ if (is_comptime || casted_field_value->value.special != ConstValSpecialRuntime ||
+ !type_has_bits(casted_field_value->value.type))
+ {
ConstExprValue *field_val = ir_resolve_const(ira, casted_field_value, UndefOk);
if (!field_val)
return ira->codegen->builtin_types.entry_invalid;
@@ -14908,6 +16178,8 @@ static TypeTableEntry *ir_analyze_container_init_fields(IrAnalyze *ira, IrInstru
}
ensure_complete_type(ira->codegen, container_type);
+ if (type_is_invalid(container_type))
+ return ira->codegen->builtin_types.entry_invalid;
size_t actual_field_count = container_type->data.structure.src_field_count;
@@ -15176,11 +16448,11 @@ static TypeTableEntry *ir_analyze_min_max(IrAnalyze *ira, IrInstruction *source_
case TypeTableEntryIdPromise:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdUnion:
@@ -15267,7 +16539,8 @@ static TypeTableEntry *ir_analyze_instruction_err_name(IrAnalyze *ira, IrInstruc
if (type_is_invalid(casted_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
- TypeTableEntry *u8_ptr_type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, true);
+ TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(ira->codegen, ira->codegen->builtin_types.entry_u8,
+ true, false, PtrLenUnknown, get_abi_alignment(ira->codegen, ira->codegen->builtin_types.entry_u8), 0, 0);
TypeTableEntry *str_type = get_slice_type(ira->codegen, u8_ptr_type);
if (casted_value->value.special == ConstValSpecialStatic) {
ErrorTableEntry *err = casted_value->value.data.x_err_set;
@@ -15293,6 +16566,9 @@ static TypeTableEntry *ir_analyze_instruction_enum_tag_name(IrAnalyze *ira, IrIn
assert(target->value.type->id == TypeTableEntryIdEnum);
if (instr_is_comptime(target)) {
+ type_ensure_zero_bits_known(ira->codegen, target->value.type);
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
TypeEnumField *field = find_enum_field_by_tag(target->value.type, &target->value.data.x_bigint);
ConstExprValue *array_val = create_const_str_lit(ira->codegen, field->name);
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
@@ -15300,15 +16576,14 @@ static TypeTableEntry *ir_analyze_instruction_enum_tag_name(IrAnalyze *ira, IrIn
return out_val->type;
}
- if (!target->value.type->data.enumeration.generate_name_table) {
- target->value.type->data.enumeration.generate_name_table = true;
- ira->codegen->name_table_enums.append(target->value.type);
- }
-
IrInstruction *result = ir_build_tag_name(&ira->new_irb, instruction->base.scope,
instruction->base.source_node, target);
ir_link_new_instruction(result, &instruction->base);
- TypeTableEntry *u8_ptr_type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, true);
+ TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(
+ ira->codegen, ira->codegen->builtin_types.entry_u8,
+ true, false, PtrLenUnknown,
+ get_abi_alignment(ira->codegen, ira->codegen->builtin_types.entry_u8),
+ 0, 0);
result->value.type = get_slice_type(ira->codegen, u8_ptr_type);
return result->value.type;
}
@@ -15337,6 +16612,8 @@ static TypeTableEntry *ir_analyze_instruction_field_parent_ptr(IrAnalyze *ira,
}
ensure_complete_type(ira->codegen, container_type);
+ if (type_is_invalid(container_type))
+ return ira->codegen->builtin_types.entry_invalid;
TypeStructField *field = find_struct_type_field(container_type, field_name);
if (field == nullptr) {
@@ -15359,6 +16636,7 @@ static TypeTableEntry *ir_analyze_instruction_field_parent_ptr(IrAnalyze *ira,
TypeTableEntry *field_ptr_type = get_pointer_to_type_extra(ira->codegen, field->type_entry,
field_ptr->value.type->data.pointer.is_const,
field_ptr->value.type->data.pointer.is_volatile,
+ PtrLenSingle,
field_ptr_align, 0, 0);
IrInstruction *casted_field_ptr = ir_implicit_cast(ira, field_ptr, field_ptr_type);
if (type_is_invalid(casted_field_ptr->value.type))
@@ -15367,6 +16645,7 @@ static TypeTableEntry *ir_analyze_instruction_field_parent_ptr(IrAnalyze *ira,
TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, container_type,
casted_field_ptr->value.type->data.pointer.is_const,
casted_field_ptr->value.type->data.pointer.is_volatile,
+ PtrLenSingle,
parent_ptr_align, 0, 0);
if (instr_is_comptime(casted_field_ptr)) {
@@ -15411,6 +16690,8 @@ static TypeTableEntry *ir_analyze_instruction_offset_of(IrAnalyze *ira,
return ira->codegen->builtin_types.entry_invalid;
ensure_complete_type(ira->codegen, container_type);
+ if (type_is_invalid(container_type))
+ return ira->codegen->builtin_types.entry_invalid;
IrInstruction *field_name_value = instruction->field_name->other;
Buf *field_name = ir_resolve_str(ira, field_name_value);
@@ -15443,6 +16724,974 @@ static TypeTableEntry *ir_analyze_instruction_offset_of(IrAnalyze *ira,
return ira->codegen->builtin_types.entry_num_lit_int;
}
+static void ensure_field_index(TypeTableEntry *type, const char *field_name, size_t index)
+{
+ Buf *field_name_buf;
+
+ assert(type != nullptr && !type_is_invalid(type));
+ // Check for our field by creating a buffer in place then using the comma operator to free it so that we don't
+ // leak memory in debug mode.
+ assert(find_struct_type_field(type, field_name_buf = buf_create_from_str(field_name))->src_index == index &&
+ (buf_deinit(field_name_buf), true));
+}
+
+static TypeTableEntry *ir_type_info_get_type(IrAnalyze *ira, const char *type_name, TypeTableEntry *root = nullptr)
+{
+ static ConstExprValue *type_info_var = nullptr;
+ static TypeTableEntry *type_info_type = nullptr;
+ if (type_info_var == nullptr)
+ {
+ type_info_var = get_builtin_value(ira->codegen, "TypeInfo");
+ assert(type_info_var->type->id == TypeTableEntryIdMetaType);
+
+ ensure_complete_type(ira->codegen, type_info_var->data.x_type);
+ if (type_is_invalid(type_info_var->data.x_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ type_info_type = type_info_var->data.x_type;
+ assert(type_info_type->id == TypeTableEntryIdUnion);
+ }
+
+ if (type_name == nullptr && root == nullptr)
+ return type_info_type;
+ else if (type_name == nullptr)
+ return root;
+
+ TypeTableEntry *root_type = (root == nullptr) ? type_info_type : root;
+
+ ScopeDecls *type_info_scope = get_container_scope(root_type);
+ assert(type_info_scope != nullptr);
+
+ Buf field_name = BUF_INIT;
+ buf_init_from_str(&field_name, type_name);
+ auto entry = type_info_scope->decl_table.get(&field_name);
+ buf_deinit(&field_name);
+
+ TldVar *tld = (TldVar *)entry;
+ assert(tld->base.id == TldIdVar);
+
+ VariableTableEntry *var = tld->var;
+
+ ensure_complete_type(ira->codegen, var->value->type);
+ if (type_is_invalid(var->value->type))
+ return ira->codegen->builtin_types.entry_invalid;
+ assert(var->value->type->id == TypeTableEntryIdMetaType);
+ return var->value->data.x_type;
+}
+
+static bool ir_make_type_info_defs(IrAnalyze *ira, ConstExprValue *out_val, ScopeDecls *decls_scope)
+{
+ TypeTableEntry *type_info_definition_type = ir_type_info_get_type(ira, "Definition");
+ ensure_complete_type(ira->codegen, type_info_definition_type);
+ if (type_is_invalid(type_info_definition_type))
+ return false;
+
+ ensure_field_index(type_info_definition_type, "name", 0);
+ ensure_field_index(type_info_definition_type, "is_pub", 1);
+ ensure_field_index(type_info_definition_type, "data", 2);
+
+ TypeTableEntry *type_info_definition_data_type = ir_type_info_get_type(ira, "Data", type_info_definition_type);
+ ensure_complete_type(ira->codegen, type_info_definition_data_type);
+ if (type_is_invalid(type_info_definition_data_type))
+ return false;
+
+ TypeTableEntry *type_info_fn_def_type = ir_type_info_get_type(ira, "FnDef", type_info_definition_data_type);
+ ensure_complete_type(ira->codegen, type_info_fn_def_type);
+ if (type_is_invalid(type_info_fn_def_type))
+ return false;
+
+ TypeTableEntry *type_info_fn_def_inline_type = ir_type_info_get_type(ira, "Inline", type_info_fn_def_type);
+ ensure_complete_type(ira->codegen, type_info_fn_def_inline_type);
+ if (type_is_invalid(type_info_fn_def_inline_type))
+ return false;
+
+ // Loop through our definitions once to figure out how many definitions we will generate info for.
+ auto decl_it = decls_scope->decl_table.entry_iterator();
+ decltype(decls_scope->decl_table)::Entry *curr_entry = nullptr;
+ int definition_count = 0;
+
+ while ((curr_entry = decl_it.next()) != nullptr)
+ {
+ // If the definition is unresolved, force it to be resolved again.
+ if (curr_entry->value->resolution == TldResolutionUnresolved)
+ {
+ resolve_top_level_decl(ira->codegen, curr_entry->value, false, curr_entry->value->source_node);
+ if (curr_entry->value->resolution != TldResolutionOk)
+ {
+ return false;
+ }
+ }
+
+ // Skip comptime blocks and test functions.
+ if (curr_entry->value->id != TldIdCompTime)
+ {
+ if (curr_entry->value->id == TldIdFn)
+ {
+ FnTableEntry *fn_entry = ((TldFn *)curr_entry->value)->fn_entry;
+ if (fn_entry->is_test)
+ continue;
+ }
+
+ definition_count += 1;
+ }
+ }
+
+ ConstExprValue *definition_array = create_const_vals(1);
+ definition_array->special = ConstValSpecialStatic;
+ definition_array->type = get_array_type(ira->codegen, type_info_definition_type, definition_count);
+ definition_array->data.x_array.special = ConstArraySpecialNone;
+ definition_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ definition_array->data.x_array.s_none.elements = create_const_vals(definition_count);
+ init_const_slice(ira->codegen, out_val, definition_array, 0, definition_count, false);
+
+ // Loop through the definitions and generate info.
+ decl_it = decls_scope->decl_table.entry_iterator();
+ curr_entry = nullptr;
+ int definition_index = 0;
+ while ((curr_entry = decl_it.next()) != nullptr)
+ {
+ // Skip comptime blocks and test functions.
+ if (curr_entry->value->id == TldIdCompTime)
+ continue;
+ else if (curr_entry->value->id == TldIdFn)
+ {
+ FnTableEntry *fn_entry = ((TldFn *)curr_entry->value)->fn_entry;
+ if (fn_entry->is_test)
+ continue;
+ }
+
+ ConstExprValue *definition_val = &definition_array->data.x_array.s_none.elements[definition_index];
+
+ definition_val->special = ConstValSpecialStatic;
+ definition_val->type = type_info_definition_type;
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ ConstExprValue *name = create_const_str_lit(ira->codegen, curr_entry->key);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(curr_entry->key), true);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_bool;
+ inner_fields[1].data.x_bool = curr_entry->value->visib_mod == VisibModPub;
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = type_info_definition_data_type;
+ inner_fields[2].data.x_union.parent.id = ConstParentIdStruct;
+ inner_fields[2].data.x_union.parent.data.p_struct.struct_val = definition_val;
+ inner_fields[2].data.x_union.parent.data.p_struct.field_index = 1;
+
+ switch (curr_entry->value->id)
+ {
+ case TldIdVar:
+ {
+ VariableTableEntry *var = ((TldVar *)curr_entry->value)->var;
+ ensure_complete_type(ira->codegen, var->value->type);
+ if (type_is_invalid(var->value->type))
+ return false;
+
+ if (var->value->type->id == TypeTableEntryIdMetaType)
+ {
+ // We have a variable of type 'type', so it's actually a type definition.
+ // 0: Data.Type: type
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 0);
+ inner_fields[2].data.x_union.payload = var->value;
+ }
+ else
+ {
+ // We have a variable of another type, so we store the type of the variable.
+ // 1: Data.Var: type
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 1);
+
+ ConstExprValue *payload = create_const_vals(1);
+ payload->type = ira->codegen->builtin_types.entry_type;
+ payload->data.x_type = var->value->type;
+
+ inner_fields[2].data.x_union.payload = payload;
+ }
+
+ break;
+ }
+ case TldIdFn:
+ {
+ // 2: Data.Fn: Data.FnDef
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 2);
+
+ FnTableEntry *fn_entry = ((TldFn *)curr_entry->value)->fn_entry;
+ assert(!fn_entry->is_test);
+
+ AstNodeFnProto *fn_node = (AstNodeFnProto *)(fn_entry->proto_node);
+
+ ConstExprValue *fn_def_val = create_const_vals(1);
+ fn_def_val->special = ConstValSpecialStatic;
+ fn_def_val->type = type_info_fn_def_type;
+ fn_def_val->data.x_struct.parent.id = ConstParentIdUnion;
+ fn_def_val->data.x_struct.parent.data.p_union.union_val = &inner_fields[2];
+
+ ConstExprValue *fn_def_fields = create_const_vals(9);
+ fn_def_val->data.x_struct.fields = fn_def_fields;
+
+ // fn_type: type
+ ensure_field_index(fn_def_val->type, "fn_type", 0);
+ fn_def_fields[0].special = ConstValSpecialStatic;
+ fn_def_fields[0].type = ira->codegen->builtin_types.entry_type;
+ fn_def_fields[0].data.x_type = fn_entry->type_entry;
+ // inline_type: Data.FnDef.Inline
+ ensure_field_index(fn_def_val->type, "inline_type", 1);
+ fn_def_fields[1].special = ConstValSpecialStatic;
+ fn_def_fields[1].type = type_info_fn_def_inline_type;
+ bigint_init_unsigned(&fn_def_fields[1].data.x_enum_tag, fn_entry->fn_inline);
+ // calling_convention: TypeInfo.CallingConvention
+ ensure_field_index(fn_def_val->type, "calling_convention", 2);
+ fn_def_fields[2].special = ConstValSpecialStatic;
+ fn_def_fields[2].type = ir_type_info_get_type(ira, "CallingConvention");
+ bigint_init_unsigned(&fn_def_fields[2].data.x_enum_tag, fn_node->cc);
+ // is_var_args: bool
+ ensure_field_index(fn_def_val->type, "is_var_args", 3);
+ bool is_varargs = fn_node->is_var_args;
+ fn_def_fields[3].special = ConstValSpecialStatic;
+ fn_def_fields[3].type = ira->codegen->builtin_types.entry_bool;
+ fn_def_fields[3].data.x_bool = is_varargs;
+ // is_extern: bool
+ ensure_field_index(fn_def_val->type, "is_extern", 4);
+ fn_def_fields[4].special = ConstValSpecialStatic;
+ fn_def_fields[4].type = ira->codegen->builtin_types.entry_bool;
+ fn_def_fields[4].data.x_bool = fn_node->is_extern;
+ // is_export: bool
+ ensure_field_index(fn_def_val->type, "is_export", 5);
+ fn_def_fields[5].special = ConstValSpecialStatic;
+ fn_def_fields[5].type = ira->codegen->builtin_types.entry_bool;
+ fn_def_fields[5].data.x_bool = fn_node->is_export;
+ // lib_name: ?[]const u8
+ ensure_field_index(fn_def_val->type, "lib_name", 6);
+ fn_def_fields[6].special = ConstValSpecialStatic;
+ TypeTableEntry *u8_ptr = get_pointer_to_type_extra(
+ ira->codegen, ira->codegen->builtin_types.entry_u8,
+ true, false, PtrLenUnknown,
+ get_abi_alignment(ira->codegen, ira->codegen->builtin_types.entry_u8),
+ 0, 0);
+ fn_def_fields[6].type = get_optional_type(ira->codegen, get_slice_type(ira->codegen, u8_ptr));
+ if (fn_node->is_extern && buf_len(fn_node->lib_name) > 0) {
+ fn_def_fields[6].data.x_optional = create_const_vals(1);
+ ConstExprValue *lib_name = create_const_str_lit(ira->codegen, fn_node->lib_name);
+ init_const_slice(ira->codegen, fn_def_fields[6].data.x_optional, lib_name, 0, buf_len(fn_node->lib_name), true);
+ } else {
+ fn_def_fields[6].data.x_optional = nullptr;
+ }
+ // return_type: type
+ ensure_field_index(fn_def_val->type, "return_type", 7);
+ fn_def_fields[7].special = ConstValSpecialStatic;
+ fn_def_fields[7].type = ira->codegen->builtin_types.entry_type;
+ if (fn_entry->src_implicit_return_type != nullptr)
+ fn_def_fields[7].data.x_type = fn_entry->src_implicit_return_type;
+ else if (fn_entry->type_entry->data.fn.gen_return_type != nullptr)
+ fn_def_fields[7].data.x_type = fn_entry->type_entry->data.fn.gen_return_type;
+ else
+ fn_def_fields[7].data.x_type = fn_entry->type_entry->data.fn.fn_type_id.return_type;
+ // arg_names: [][] const u8
+ ensure_field_index(fn_def_val->type, "arg_names", 8);
+ size_t fn_arg_count = fn_entry->variable_list.length;
+ ConstExprValue *fn_arg_name_array = create_const_vals(1);
+ fn_arg_name_array->special = ConstValSpecialStatic;
+ fn_arg_name_array->type = get_array_type(ira->codegen,
+ get_slice_type(ira->codegen, u8_ptr), fn_arg_count);
+ fn_arg_name_array->data.x_array.special = ConstArraySpecialNone;
+ fn_arg_name_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ fn_arg_name_array->data.x_array.s_none.elements = create_const_vals(fn_arg_count);
+
+ init_const_slice(ira->codegen, &fn_def_fields[8], fn_arg_name_array, 0, fn_arg_count, false);
+
+ for (size_t fn_arg_index = 0; fn_arg_index < fn_arg_count; fn_arg_index++)
+ {
+ VariableTableEntry *arg_var = fn_entry->variable_list.at(fn_arg_index);
+ ConstExprValue *fn_arg_name_val = &fn_arg_name_array->data.x_array.s_none.elements[fn_arg_index];
+ ConstExprValue *arg_name = create_const_str_lit(ira->codegen, &arg_var->name);
+ init_const_slice(ira->codegen, fn_arg_name_val, arg_name, 0, buf_len(&arg_var->name), true);
+ fn_arg_name_val->data.x_struct.parent.id = ConstParentIdArray;
+ fn_arg_name_val->data.x_struct.parent.data.p_array.array_val = fn_arg_name_array;
+ fn_arg_name_val->data.x_struct.parent.data.p_array.elem_index = fn_arg_index;
+ }
+
+ inner_fields[2].data.x_union.payload = fn_def_val;
+ break;
+ }
+ case TldIdContainer:
+ {
+ TypeTableEntry *type_entry = ((TldContainer *)curr_entry->value)->type_entry;
+ ensure_complete_type(ira->codegen, type_entry);
+ if (type_is_invalid(type_entry))
+ return false;
+
+ // This is a type.
+ bigint_init_unsigned(&inner_fields[2].data.x_union.tag, 0);
+
+ ConstExprValue *payload = create_const_vals(1);
+ payload->type = ira->codegen->builtin_types.entry_type;
+ payload->data.x_type = type_entry;
+
+ inner_fields[2].data.x_union.payload = payload;
+
+ break;
+ }
+ default:
+ zig_unreachable();
+ }
+
+ definition_val->data.x_struct.fields = inner_fields;
+ definition_index++;
+ }
+
+ assert(definition_index == definition_count);
+ return true;
+}
+
+static ConstExprValue *ir_make_type_info_value(IrAnalyze *ira, TypeTableEntry *type_entry) {
+ assert(type_entry != nullptr);
+ assert(!type_is_invalid(type_entry));
+
+ ensure_complete_type(ira->codegen, type_entry);
+ if (type_is_invalid(type_entry))
+ return nullptr;
+
+ const auto make_enum_field_val = [ira](ConstExprValue *enum_field_val, TypeEnumField *enum_field,
+ TypeTableEntry *type_info_enum_field_type) {
+ enum_field_val->special = ConstValSpecialStatic;
+ enum_field_val->type = type_info_enum_field_type;
+
+ ConstExprValue *inner_fields = create_const_vals(2);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_usize;
+
+ ConstExprValue *name = create_const_str_lit(ira->codegen, enum_field->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(enum_field->name), true);
+
+ bigint_init_bigint(&inner_fields[1].data.x_bigint, &enum_field->value);
+
+ enum_field_val->data.x_struct.fields = inner_fields;
+ };
+
+ const auto create_ptr_like_type_info = [ira](TypeTableEntry *ptr_type_entry) {
+ TypeTableEntry *attrs_type;
+ uint32_t size_enum_index;
+ if (is_slice(ptr_type_entry)) {
+ attrs_type = ptr_type_entry->data.structure.fields[slice_ptr_index].type_entry;
+ size_enum_index = 2;
+ } else if (ptr_type_entry->id == TypeTableEntryIdPointer) {
+ attrs_type = ptr_type_entry;
+ size_enum_index = (ptr_type_entry->data.pointer.ptr_len == PtrLenSingle) ? 0 : 1;
+ } else {
+ zig_unreachable();
+ }
+
+ TypeTableEntry *type_info_pointer_type = ir_type_info_get_type(ira, "Pointer");
+ ensure_complete_type(ira->codegen, type_info_pointer_type);
+ assert(!type_is_invalid(type_info_pointer_type));
+
+ ConstExprValue *result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = type_info_pointer_type;
+
+ ConstExprValue *fields = create_const_vals(5);
+ result->data.x_struct.fields = fields;
+
+ // size: Size
+ ensure_field_index(result->type, "size", 0);
+ TypeTableEntry *type_info_pointer_size_type = ir_type_info_get_type(ira, "Size", type_info_pointer_type);
+ ensure_complete_type(ira->codegen, type_info_pointer_size_type);
+ assert(!type_is_invalid(type_info_pointer_size_type));
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = type_info_pointer_size_type;
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, size_enum_index);
+
+ // is_const: bool
+ ensure_field_index(result->type, "is_const", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_bool;
+ fields[1].data.x_bool = attrs_type->data.pointer.is_const;
+ // is_volatile: bool
+ ensure_field_index(result->type, "is_volatile", 2);
+ fields[2].special = ConstValSpecialStatic;
+ fields[2].type = ira->codegen->builtin_types.entry_bool;
+ fields[2].data.x_bool = attrs_type->data.pointer.is_volatile;
+ // alignment: u32
+ ensure_field_index(result->type, "alignment", 3);
+ fields[3].special = ConstValSpecialStatic;
+ fields[3].type = ira->codegen->builtin_types.entry_u32;
+ bigint_init_unsigned(&fields[3].data.x_bigint, attrs_type->data.pointer.alignment);
+ // child: type
+ ensure_field_index(result->type, "child", 4);
+ fields[4].special = ConstValSpecialStatic;
+ fields[4].type = ira->codegen->builtin_types.entry_type;
+ fields[4].data.x_type = attrs_type->data.pointer.child_type;
+
+ return result;
+ };
+
+ if (type_entry == ira->codegen->builtin_types.entry_global_error_set) {
+ zig_panic("TODO implement @typeInfo for global error set");
+ }
+
+ ConstExprValue *result = nullptr;
+ switch (type_entry->id)
+ {
+ case TypeTableEntryIdInvalid:
+ zig_unreachable();
+ case TypeTableEntryIdMetaType:
+ case TypeTableEntryIdVoid:
+ case TypeTableEntryIdBool:
+ case TypeTableEntryIdUnreachable:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
+ case TypeTableEntryIdNamespace:
+ case TypeTableEntryIdBlock:
+ case TypeTableEntryIdArgTuple:
+ case TypeTableEntryIdOpaque:
+ return nullptr;
+ default:
+ {
+ // Lookup an available value in our cache.
+ auto entry = ira->codegen->type_info_cache.maybe_get(type_entry);
+ if (entry != nullptr)
+ return entry->value;
+
+ // Fallthrough if we don't find one.
+ }
+ case TypeTableEntryIdInt:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Int");
+
+ ConstExprValue *fields = create_const_vals(2);
+ result->data.x_struct.fields = fields;
+
+ // is_signed: bool
+ ensure_field_index(result->type, "is_signed", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_bool;
+ fields[0].data.x_bool = type_entry->data.integral.is_signed;
+ // bits: u8
+ ensure_field_index(result->type, "bits", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_u8;
+ bigint_init_unsigned(&fields[1].data.x_bigint, type_entry->data.integral.bit_count);
+
+ break;
+ }
+ case TypeTableEntryIdFloat:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Float");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // bits: u8
+ ensure_field_index(result->type, "bits", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_u8;
+ bigint_init_unsigned(&fields->data.x_bigint, type_entry->data.floating.bit_count);
+
+ break;
+ }
+ case TypeTableEntryIdPointer:
+ {
+ result = create_ptr_like_type_info(type_entry);
+ break;
+ }
+ case TypeTableEntryIdArray:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Array");
+
+ ConstExprValue *fields = create_const_vals(2);
+ result->data.x_struct.fields = fields;
+
+ // len: usize
+ ensure_field_index(result->type, "len", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_usize;
+ bigint_init_unsigned(&fields[0].data.x_bigint, type_entry->data.array.len);
+ // child: type
+ ensure_field_index(result->type, "child", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_type;
+ fields[1].data.x_type = type_entry->data.array.child_type;
+
+ break;
+ }
+ case TypeTableEntryIdOptional:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Optional");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // child: type
+ ensure_field_index(result->type, "child", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_type;
+ fields[0].data.x_type = type_entry->data.maybe.child_type;
+
+ break;
+ }
+ case TypeTableEntryIdPromise:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Promise");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // child: ?type
+ ensure_field_index(result->type, "child", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = get_optional_type(ira->codegen, ira->codegen->builtin_types.entry_type);
+
+ if (type_entry->data.promise.result_type == nullptr)
+ fields[0].data.x_optional = nullptr;
+ else {
+ ConstExprValue *child_type = create_const_vals(1);
+ child_type->special = ConstValSpecialStatic;
+ child_type->type = ira->codegen->builtin_types.entry_type;
+ child_type->data.x_type = type_entry->data.promise.result_type;
+ fields[0].data.x_optional = child_type;
+ }
+
+ break;
+ }
+ case TypeTableEntryIdEnum:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Enum");
+
+ ConstExprValue *fields = create_const_vals(4);
+ result->data.x_struct.fields = fields;
+
+ // layout: ContainerLayout
+ ensure_field_index(result->type, "layout", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "ContainerLayout");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.enumeration.layout);
+ // tag_type: type
+ ensure_field_index(result->type, "tag_type", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_type;
+ fields[1].data.x_type = type_entry->data.enumeration.tag_int_type;
+ // fields: []TypeInfo.EnumField
+ ensure_field_index(result->type, "fields", 2);
+
+ TypeTableEntry *type_info_enum_field_type = ir_type_info_get_type(ira, "EnumField");
+ uint32_t enum_field_count = type_entry->data.enumeration.src_field_count;
+
+ ConstExprValue *enum_field_array = create_const_vals(1);
+ enum_field_array->special = ConstValSpecialStatic;
+ enum_field_array->type = get_array_type(ira->codegen, type_info_enum_field_type, enum_field_count);
+ enum_field_array->data.x_array.special = ConstArraySpecialNone;
+ enum_field_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ enum_field_array->data.x_array.s_none.elements = create_const_vals(enum_field_count);
+
+ init_const_slice(ira->codegen, &fields[2], enum_field_array, 0, enum_field_count, false);
+
+ for (uint32_t enum_field_index = 0; enum_field_index < enum_field_count; enum_field_index++)
+ {
+ TypeEnumField *enum_field = &type_entry->data.enumeration.fields[enum_field_index];
+ ConstExprValue *enum_field_val = &enum_field_array->data.x_array.s_none.elements[enum_field_index];
+ make_enum_field_val(enum_field_val, enum_field, type_info_enum_field_type);
+ enum_field_val->data.x_struct.parent.id = ConstParentIdArray;
+ enum_field_val->data.x_struct.parent.data.p_array.array_val = enum_field_array;
+ enum_field_val->data.x_struct.parent.data.p_array.elem_index = enum_field_index;
+ }
+ // defs: []TypeInfo.Definition
+ ensure_field_index(result->type, "defs", 3);
+ if (!ir_make_type_info_defs(ira, &fields[3], type_entry->data.enumeration.decls_scope))
+ return nullptr;
+
+ break;
+ }
+ case TypeTableEntryIdErrorSet:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "ErrorSet");
+
+ ConstExprValue *fields = create_const_vals(1);
+ result->data.x_struct.fields = fields;
+
+ // errors: []TypeInfo.Error
+ ensure_field_index(result->type, "errors", 0);
+
+ TypeTableEntry *type_info_error_type = ir_type_info_get_type(ira, "Error");
+ uint32_t error_count = type_entry->data.error_set.err_count;
+ ConstExprValue *error_array = create_const_vals(1);
+ error_array->special = ConstValSpecialStatic;
+ error_array->type = get_array_type(ira->codegen, type_info_error_type, error_count);
+ error_array->data.x_array.special = ConstArraySpecialNone;
+ error_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ error_array->data.x_array.s_none.elements = create_const_vals(error_count);
+
+ init_const_slice(ira->codegen, &fields[0], error_array, 0, error_count, false);
+ for (uint32_t error_index = 0; error_index < error_count; error_index++)
+ {
+ ErrorTableEntry *error = type_entry->data.error_set.errors[error_index];
+ ConstExprValue *error_val = &error_array->data.x_array.s_none.elements[error_index];
+
+ error_val->special = ConstValSpecialStatic;
+ error_val->type = type_info_error_type;
+
+ ConstExprValue *inner_fields = create_const_vals(2);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_usize;
+
+ ConstExprValue *name = nullptr;
+ if (error->cached_error_name_val != nullptr)
+ name = error->cached_error_name_val;
+ if (name == nullptr)
+ name = create_const_str_lit(ira->codegen, &error->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(&error->name), true);
+ bigint_init_unsigned(&inner_fields[1].data.x_bigint, error->value);
+
+ error_val->data.x_struct.fields = inner_fields;
+ error_val->data.x_struct.parent.id = ConstParentIdArray;
+ error_val->data.x_struct.parent.data.p_array.array_val = error_array;
+ error_val->data.x_struct.parent.data.p_array.elem_index = error_index;
+ }
+
+ break;
+ }
+ case TypeTableEntryIdErrorUnion:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "ErrorUnion");
+
+ ConstExprValue *fields = create_const_vals(2);
+ result->data.x_struct.fields = fields;
+
+ // error_set: type
+ ensure_field_index(result->type, "error_set", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ira->codegen->builtin_types.entry_type;
+ fields[0].data.x_type = type_entry->data.error_union.err_set_type;
+
+ // payload: type
+ ensure_field_index(result->type, "payload", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_type;
+ fields[1].data.x_type = type_entry->data.error_union.payload_type;
+
+ break;
+ }
+ case TypeTableEntryIdUnion:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Union");
+
+ ConstExprValue *fields = create_const_vals(4);
+ result->data.x_struct.fields = fields;
+
+ // layout: ContainerLayout
+ ensure_field_index(result->type, "layout", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "ContainerLayout");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.unionation.layout);
+ // tag_type: ?type
+ ensure_field_index(result->type, "tag_type", 1);
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = get_optional_type(ira->codegen, ira->codegen->builtin_types.entry_type);
+
+ AstNode *union_decl_node = type_entry->data.unionation.decl_node;
+ if (union_decl_node->data.container_decl.auto_enum ||
+ union_decl_node->data.container_decl.init_arg_expr != nullptr)
+ {
+ ConstExprValue *tag_type = create_const_vals(1);
+ tag_type->special = ConstValSpecialStatic;
+ tag_type->type = ira->codegen->builtin_types.entry_type;
+ tag_type->data.x_type = type_entry->data.unionation.tag_type;
+ fields[1].data.x_optional = tag_type;
+ }
+ else
+ fields[1].data.x_optional = nullptr;
+ // fields: []TypeInfo.UnionField
+ ensure_field_index(result->type, "fields", 2);
+
+ TypeTableEntry *type_info_union_field_type = ir_type_info_get_type(ira, "UnionField");
+ uint32_t union_field_count = type_entry->data.unionation.src_field_count;
+
+ ConstExprValue *union_field_array = create_const_vals(1);
+ union_field_array->special = ConstValSpecialStatic;
+ union_field_array->type = get_array_type(ira->codegen, type_info_union_field_type, union_field_count);
+ union_field_array->data.x_array.special = ConstArraySpecialNone;
+ union_field_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ union_field_array->data.x_array.s_none.elements = create_const_vals(union_field_count);
+
+ init_const_slice(ira->codegen, &fields[2], union_field_array, 0, union_field_count, false);
+
+ TypeTableEntry *type_info_enum_field_type = ir_type_info_get_type(ira, "EnumField");
+
+ for (uint32_t union_field_index = 0; union_field_index < union_field_count; union_field_index++) {
+ TypeUnionField *union_field = &type_entry->data.unionation.fields[union_field_index];
+ ConstExprValue *union_field_val = &union_field_array->data.x_array.s_none.elements[union_field_index];
+
+ union_field_val->special = ConstValSpecialStatic;
+ union_field_val->type = type_info_union_field_type;
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = get_optional_type(ira->codegen, type_info_enum_field_type);
+
+ if (fields[1].data.x_optional == nullptr) {
+ inner_fields[1].data.x_optional = nullptr;
+ } else {
+ inner_fields[1].data.x_optional = create_const_vals(1);
+ make_enum_field_val(inner_fields[1].data.x_optional, union_field->enum_field, type_info_enum_field_type);
+ }
+
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = ira->codegen->builtin_types.entry_type;
+ inner_fields[2].data.x_type = union_field->type_entry;
+
+ ConstExprValue *name = create_const_str_lit(ira->codegen, union_field->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(union_field->name), true);
+
+ union_field_val->data.x_struct.fields = inner_fields;
+ union_field_val->data.x_struct.parent.id = ConstParentIdArray;
+ union_field_val->data.x_struct.parent.data.p_array.array_val = union_field_array;
+ union_field_val->data.x_struct.parent.data.p_array.elem_index = union_field_index;
+ }
+ // defs: []TypeInfo.Definition
+ ensure_field_index(result->type, "defs", 3);
+ if (!ir_make_type_info_defs(ira, &fields[3], type_entry->data.unionation.decls_scope))
+ return nullptr;
+
+ break;
+ }
+ case TypeTableEntryIdStruct:
+ {
+ if (type_entry->data.structure.is_slice) {
+ result = create_ptr_like_type_info(type_entry);
+ break;
+ }
+
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Struct");
+
+ ConstExprValue *fields = create_const_vals(3);
+ result->data.x_struct.fields = fields;
+
+ // layout: ContainerLayout
+ ensure_field_index(result->type, "layout", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "ContainerLayout");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.structure.layout);
+ // fields: []TypeInfo.StructField
+ ensure_field_index(result->type, "fields", 1);
+
+ TypeTableEntry *type_info_struct_field_type = ir_type_info_get_type(ira, "StructField");
+ uint32_t struct_field_count = type_entry->data.structure.src_field_count;
+
+ ConstExprValue *struct_field_array = create_const_vals(1);
+ struct_field_array->special = ConstValSpecialStatic;
+ struct_field_array->type = get_array_type(ira->codegen, type_info_struct_field_type, struct_field_count);
+ struct_field_array->data.x_array.special = ConstArraySpecialNone;
+ struct_field_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ struct_field_array->data.x_array.s_none.elements = create_const_vals(struct_field_count);
+
+ init_const_slice(ira->codegen, &fields[1], struct_field_array, 0, struct_field_count, false);
+
+ for (uint32_t struct_field_index = 0; struct_field_index < struct_field_count; struct_field_index++) {
+ TypeStructField *struct_field = &type_entry->data.structure.fields[struct_field_index];
+ ConstExprValue *struct_field_val = &struct_field_array->data.x_array.s_none.elements[struct_field_index];
+
+ struct_field_val->special = ConstValSpecialStatic;
+ struct_field_val->type = type_info_struct_field_type;
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = get_optional_type(ira->codegen, ira->codegen->builtin_types.entry_usize);
+
+ if (!type_has_bits(struct_field->type_entry)) {
+ inner_fields[1].data.x_optional = nullptr;
+ } else {
+ size_t byte_offset = LLVMOffsetOfElement(ira->codegen->target_data_ref, type_entry->type_ref, struct_field->gen_index);
+ inner_fields[1].data.x_optional = create_const_vals(1);
+ inner_fields[1].data.x_optional->special = ConstValSpecialStatic;
+ inner_fields[1].data.x_optional->type = ira->codegen->builtin_types.entry_usize;
+ bigint_init_unsigned(&inner_fields[1].data.x_optional->data.x_bigint, byte_offset);
+ }
+
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = ira->codegen->builtin_types.entry_type;
+ inner_fields[2].data.x_type = struct_field->type_entry;
+
+ ConstExprValue *name = create_const_str_lit(ira->codegen, struct_field->name);
+ init_const_slice(ira->codegen, &inner_fields[0], name, 0, buf_len(struct_field->name), true);
+
+ struct_field_val->data.x_struct.fields = inner_fields;
+ struct_field_val->data.x_struct.parent.id = ConstParentIdArray;
+ struct_field_val->data.x_struct.parent.data.p_array.array_val = struct_field_array;
+ struct_field_val->data.x_struct.parent.data.p_array.elem_index = struct_field_index;
+ }
+ // defs: []TypeInfo.Definition
+ ensure_field_index(result->type, "defs", 2);
+ if (!ir_make_type_info_defs(ira, &fields[2], type_entry->data.structure.decls_scope))
+ return nullptr;
+
+ break;
+ }
+ case TypeTableEntryIdFn:
+ {
+ result = create_const_vals(1);
+ result->special = ConstValSpecialStatic;
+ result->type = ir_type_info_get_type(ira, "Fn");
+
+ ConstExprValue *fields = create_const_vals(6);
+ result->data.x_struct.fields = fields;
+
+ // calling_convention: TypeInfo.CallingConvention
+ ensure_field_index(result->type, "calling_convention", 0);
+ fields[0].special = ConstValSpecialStatic;
+ fields[0].type = ir_type_info_get_type(ira, "CallingConvention");
+ bigint_init_unsigned(&fields[0].data.x_enum_tag, type_entry->data.fn.fn_type_id.cc);
+ // is_generic: bool
+ ensure_field_index(result->type, "is_generic", 1);
+ bool is_generic = type_entry->data.fn.is_generic;
+ fields[1].special = ConstValSpecialStatic;
+ fields[1].type = ira->codegen->builtin_types.entry_bool;
+ fields[1].data.x_bool = is_generic;
+ // is_varargs: bool
+ ensure_field_index(result->type, "is_var_args", 2);
+ bool is_varargs = type_entry->data.fn.fn_type_id.is_var_args;
+ fields[2].special = ConstValSpecialStatic;
+ fields[2].type = ira->codegen->builtin_types.entry_bool;
+ fields[2].data.x_bool = type_entry->data.fn.fn_type_id.is_var_args;
+ // return_type: ?type
+ ensure_field_index(result->type, "return_type", 3);
+ fields[3].special = ConstValSpecialStatic;
+ fields[3].type = get_optional_type(ira->codegen, ira->codegen->builtin_types.entry_type);
+ if (type_entry->data.fn.fn_type_id.return_type == nullptr)
+ fields[3].data.x_optional = nullptr;
+ else {
+ ConstExprValue *return_type = create_const_vals(1);
+ return_type->special = ConstValSpecialStatic;
+ return_type->type = ira->codegen->builtin_types.entry_type;
+ return_type->data.x_type = type_entry->data.fn.fn_type_id.return_type;
+ fields[3].data.x_optional = return_type;
+ }
+ // async_allocator_type: type
+ ensure_field_index(result->type, "async_allocator_type", 4);
+ fields[4].special = ConstValSpecialStatic;
+ fields[4].type = get_optional_type(ira->codegen, ira->codegen->builtin_types.entry_type);
+ if (type_entry->data.fn.fn_type_id.async_allocator_type == nullptr)
+ fields[4].data.x_optional = nullptr;
+ else {
+ ConstExprValue *async_alloc_type = create_const_vals(1);
+ async_alloc_type->special = ConstValSpecialStatic;
+ async_alloc_type->type = ira->codegen->builtin_types.entry_type;
+ async_alloc_type->data.x_type = type_entry->data.fn.fn_type_id.async_allocator_type;
+ fields[4].data.x_optional = async_alloc_type;
+ }
+ // args: []TypeInfo.FnArg
+ TypeTableEntry *type_info_fn_arg_type = ir_type_info_get_type(ira, "FnArg");
+ size_t fn_arg_count = type_entry->data.fn.fn_type_id.param_count -
+ (is_varargs && type_entry->data.fn.fn_type_id.cc != CallingConventionC);
+
+ ConstExprValue *fn_arg_array = create_const_vals(1);
+ fn_arg_array->special = ConstValSpecialStatic;
+ fn_arg_array->type = get_array_type(ira->codegen, type_info_fn_arg_type, fn_arg_count);
+ fn_arg_array->data.x_array.special = ConstArraySpecialNone;
+ fn_arg_array->data.x_array.s_none.parent.id = ConstParentIdNone;
+ fn_arg_array->data.x_array.s_none.elements = create_const_vals(fn_arg_count);
+
+ init_const_slice(ira->codegen, &fields[5], fn_arg_array, 0, fn_arg_count, false);
+
+ for (size_t fn_arg_index = 0; fn_arg_index < fn_arg_count; fn_arg_index++)
+ {
+ FnTypeParamInfo *fn_param_info = &type_entry->data.fn.fn_type_id.param_info[fn_arg_index];
+ ConstExprValue *fn_arg_val = &fn_arg_array->data.x_array.s_none.elements[fn_arg_index];
+
+ fn_arg_val->special = ConstValSpecialStatic;
+ fn_arg_val->type = type_info_fn_arg_type;
+
+ bool arg_is_generic = fn_param_info->type == nullptr;
+ if (arg_is_generic) assert(is_generic);
+
+ ConstExprValue *inner_fields = create_const_vals(3);
+ inner_fields[0].special = ConstValSpecialStatic;
+ inner_fields[0].type = ira->codegen->builtin_types.entry_bool;
+ inner_fields[0].data.x_bool = arg_is_generic;
+ inner_fields[1].special = ConstValSpecialStatic;
+ inner_fields[1].type = ira->codegen->builtin_types.entry_bool;
+ inner_fields[1].data.x_bool = fn_param_info->is_noalias;
+ inner_fields[2].special = ConstValSpecialStatic;
+ inner_fields[2].type = get_optional_type(ira->codegen, ira->codegen->builtin_types.entry_type);
+
+ if (arg_is_generic)
+ inner_fields[2].data.x_optional = nullptr;
+ else {
+ ConstExprValue *arg_type = create_const_vals(1);
+ arg_type->special = ConstValSpecialStatic;
+ arg_type->type = ira->codegen->builtin_types.entry_type;
+ arg_type->data.x_type = fn_param_info->type;
+ inner_fields[2].data.x_optional = arg_type;
+ }
+
+ fn_arg_val->data.x_struct.fields = inner_fields;
+ fn_arg_val->data.x_struct.parent.id = ConstParentIdArray;
+ fn_arg_val->data.x_struct.parent.data.p_array.array_val = fn_arg_array;
+ fn_arg_val->data.x_struct.parent.data.p_array.elem_index = fn_arg_index;
+ }
+
+ break;
+ }
+ case TypeTableEntryIdBoundFn:
+ {
+ TypeTableEntry *fn_type = type_entry->data.bound_fn.fn_type;
+ assert(fn_type->id == TypeTableEntryIdFn);
+ result = ir_make_type_info_value(ira, fn_type);
+
+ break;
+ }
+ }
+
+ assert(result != nullptr);
+ ira->codegen->type_info_cache.put(type_entry, result);
+ return result;
+}
+
+static TypeTableEntry *ir_analyze_instruction_type_info(IrAnalyze *ira,
+ IrInstructionTypeInfo *instruction)
+{
+ IrInstruction *type_value = instruction->type_value->other;
+ TypeTableEntry *type_entry = ir_resolve_type(ira, type_value);
+ if (type_is_invalid(type_entry))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ TypeTableEntry *result_type = ir_type_info_get_type(ira, nullptr);
+
+ ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+ out_val->type = result_type;
+ bigint_init_unsigned(&out_val->data.x_union.tag, type_id_index(type_entry));
+
+ ConstExprValue *payload = ir_make_type_info_value(ira, type_entry);
+ out_val->data.x_union.payload = payload;
+
+ if (payload != nullptr)
+ {
+ assert(payload->type->id == TypeTableEntryIdStruct);
+ payload->data.x_struct.parent.id = ConstParentIdUnion;
+ payload->data.x_struct.parent.data.p_union.union_val = out_val;
+ }
+
+ return result_type;
+}
+
static TypeTableEntry *ir_analyze_instruction_type_id(IrAnalyze *ira,
IrInstructionTypeId *instruction)
{
@@ -15456,7 +17705,7 @@ static TypeTableEntry *ir_analyze_instruction_type_id(IrAnalyze *ira,
TypeTableEntry *result_type = var_value->data.x_type;
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
- bigint_init_unsigned(&out_val->data.x_enum_tag, type_id_index(type_entry->id));
+ bigint_init_unsigned(&out_val->data.x_enum_tag, type_id_index(type_entry));
return result_type;
}
@@ -15659,10 +17908,20 @@ static TypeTableEntry *ir_analyze_instruction_embed_file(IrAnalyze *ira, IrInstr
}
static TypeTableEntry *ir_analyze_instruction_cmpxchg(IrAnalyze *ira, IrInstructionCmpxchg *instruction) {
+ TypeTableEntry *operand_type = ir_resolve_atomic_operand_type(ira, instruction->type_value->other);
+ if (type_is_invalid(operand_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
IrInstruction *ptr = instruction->ptr->other;
if (type_is_invalid(ptr->value.type))
return ira->codegen->builtin_types.entry_invalid;
+ // TODO let this be volatile
+ TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, operand_type, false);
+ IrInstruction *casted_ptr = ir_implicit_cast(ira, ptr, ptr_type);
+ if (type_is_invalid(casted_ptr->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
IrInstruction *cmp_value = instruction->cmp_value->other;
if (type_is_invalid(cmp_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
@@ -15687,28 +17946,11 @@ static TypeTableEntry *ir_analyze_instruction_cmpxchg(IrAnalyze *ira, IrInstruct
if (!ir_resolve_atomic_order(ira, failure_order_value, &failure_order))
return ira->codegen->builtin_types.entry_invalid;
- if (ptr->value.type->id != TypeTableEntryIdPointer) {
- ir_add_error(ira, instruction->ptr,
- buf_sprintf("expected pointer argument, found '%s'", buf_ptr(&ptr->value.type->name)));
- return ira->codegen->builtin_types.entry_invalid;
- }
-
- TypeTableEntry *child_type = ptr->value.type->data.pointer.child_type;
-
- uint32_t align_bytes = ptr->value.type->data.pointer.alignment;
- uint64_t size_bytes = type_size(ira->codegen, child_type);
- if (align_bytes < size_bytes) {
- ir_add_error(ira, instruction->ptr,
- buf_sprintf("expected pointer alignment of at least %" ZIG_PRI_u64 ", found %" PRIu32,
- size_bytes, align_bytes));
- return ira->codegen->builtin_types.entry_invalid;
- }
-
- IrInstruction *casted_cmp_value = ir_implicit_cast(ira, cmp_value, child_type);
+ IrInstruction *casted_cmp_value = ir_implicit_cast(ira, cmp_value, operand_type);
if (type_is_invalid(casted_cmp_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
- IrInstruction *casted_new_value = ir_implicit_cast(ira, new_value, child_type);
+ IrInstruction *casted_new_value = ir_implicit_cast(ira, new_value, operand_type);
if (type_is_invalid(casted_new_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
@@ -15733,9 +17975,17 @@ static TypeTableEntry *ir_analyze_instruction_cmpxchg(IrAnalyze *ira, IrInstruct
return ira->codegen->builtin_types.entry_invalid;
}
- ir_build_cmpxchg_from(&ira->new_irb, &instruction->base, ptr, casted_cmp_value, casted_new_value,
- success_order_value, failure_order_value, success_order, failure_order);
- return ira->codegen->builtin_types.entry_bool;
+ if (instr_is_comptime(casted_ptr) && instr_is_comptime(casted_cmp_value) && instr_is_comptime(casted_new_value)) {
+ zig_panic("TODO compile-time execution of cmpxchg");
+ }
+
+ IrInstruction *result = ir_build_cmpxchg(&ira->new_irb, instruction->base.scope, instruction->base.source_node,
+ nullptr, casted_ptr, casted_cmp_value, casted_new_value, nullptr, nullptr, instruction->is_weak,
+ operand_type, success_order, failure_order);
+ result->value.type = get_optional_type(ira->codegen, operand_type);
+ ir_link_new_instruction(result, &instruction->base);
+ ir_add_alloca(ira, result, result->value.type);
+ return result->value.type;
}
static TypeTableEntry *ir_analyze_instruction_fence(IrAnalyze *ira, IrInstructionFence *instruction) {
@@ -15758,7 +18008,7 @@ static TypeTableEntry *ir_analyze_instruction_truncate(IrAnalyze *ira, IrInstruc
return ira->codegen->builtin_types.entry_invalid;
if (dest_type->id != TypeTableEntryIdInt &&
- dest_type->id != TypeTableEntryIdNumLitInt)
+ dest_type->id != TypeTableEntryIdComptimeInt)
{
ir_add_error(ira, dest_type_value, buf_sprintf("expected integer type, found '%s'", buf_ptr(&dest_type->name)));
return ira->codegen->builtin_types.entry_invalid;
@@ -15770,7 +18020,7 @@ static TypeTableEntry *ir_analyze_instruction_truncate(IrAnalyze *ira, IrInstruc
return ira->codegen->builtin_types.entry_invalid;
if (src_type->id != TypeTableEntryIdInt &&
- src_type->id != TypeTableEntryIdNumLitInt)
+ src_type->id != TypeTableEntryIdComptimeInt)
{
ir_add_error(ira, target, buf_sprintf("expected integer type, found '%s'", buf_ptr(&src_type->name)));
return ira->codegen->builtin_types.entry_invalid;
@@ -15793,10 +18043,326 @@ static TypeTableEntry *ir_analyze_instruction_truncate(IrAnalyze *ira, IrInstruc
return dest_type;
}
- ir_build_truncate_from(&ira->new_irb, &instruction->base, dest_type_value, target);
+ IrInstruction *new_instruction = ir_build_truncate(&ira->new_irb, instruction->base.scope,
+ instruction->base.source_node, dest_type_value, target);
+ ir_link_new_instruction(new_instruction, &instruction->base);
return dest_type;
}
+static TypeTableEntry *ir_analyze_instruction_int_cast(IrAnalyze *ira, IrInstructionIntCast *instruction) {
+ TypeTableEntry *dest_type = ir_resolve_type(ira, instruction->dest_type->other);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (dest_type->id != TypeTableEntryIdInt) {
+ ir_add_error(ira, instruction->dest_type, buf_sprintf("expected integer type, found '%s'", buf_ptr(&dest_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (target->value.type->id == TypeTableEntryIdComptimeInt) {
+ if (ir_num_lit_fits_in_other_type(ira, target, dest_type, true)) {
+ IrInstruction *result = ir_resolve_cast(ira, &instruction->base, target, dest_type,
+ CastOpNumLitToConcrete, false);
+ if (type_is_invalid(result->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_type;
+ } else {
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ }
+
+ if (target->value.type->id != TypeTableEntryIdInt) {
+ ir_add_error(ira, instruction->target, buf_sprintf("expected integer type, found '%s'",
+ buf_ptr(&target->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *result = ir_analyze_widen_or_shorten(ira, &instruction->base, target, dest_type);
+ if (type_is_invalid(result->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_float_cast(IrAnalyze *ira, IrInstructionFloatCast *instruction) {
+ TypeTableEntry *dest_type = ir_resolve_type(ira, instruction->dest_type->other);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (dest_type->id != TypeTableEntryIdFloat) {
+ ir_add_error(ira, instruction->dest_type,
+ buf_sprintf("expected float type, found '%s'", buf_ptr(&dest_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (target->value.type->id == TypeTableEntryIdComptimeInt ||
+ target->value.type->id == TypeTableEntryIdComptimeFloat)
+ {
+ if (ir_num_lit_fits_in_other_type(ira, target, dest_type, true)) {
+ CastOp op;
+ if (target->value.type->id == TypeTableEntryIdComptimeInt) {
+ op = CastOpIntToFloat;
+ } else {
+ op = CastOpNumLitToConcrete;
+ }
+ IrInstruction *result = ir_resolve_cast(ira, &instruction->base, target, dest_type, op, false);
+ if (type_is_invalid(result->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_type;
+ } else {
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ }
+
+ if (target->value.type->id != TypeTableEntryIdFloat) {
+ ir_add_error(ira, instruction->target, buf_sprintf("expected float type, found '%s'",
+ buf_ptr(&target->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *result = ir_analyze_widen_or_shorten(ira, &instruction->base, target, dest_type);
+ if (type_is_invalid(result->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_err_set_cast(IrAnalyze *ira, IrInstructionErrSetCast *instruction) {
+ TypeTableEntry *dest_type = ir_resolve_type(ira, instruction->dest_type->other);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (dest_type->id != TypeTableEntryIdErrorSet) {
+ ir_add_error(ira, instruction->dest_type,
+ buf_sprintf("expected error set type, found '%s'", buf_ptr(&dest_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (target->value.type->id != TypeTableEntryIdErrorSet) {
+ ir_add_error(ira, instruction->target,
+ buf_sprintf("expected error set type, found '%s'", buf_ptr(&target->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *result = ir_analyze_err_set_cast(ira, &instruction->base, target, dest_type);
+ if (type_is_invalid(result->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_from_bytes(IrAnalyze *ira, IrInstructionFromBytes *instruction) {
+ TypeTableEntry *dest_child_type = ir_resolve_type(ira, instruction->dest_child_type->other);
+ if (type_is_invalid(dest_child_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ bool src_ptr_const;
+ bool src_ptr_volatile;
+ uint32_t src_ptr_align;
+ if (target->value.type->id == TypeTableEntryIdPointer) {
+ src_ptr_const = target->value.type->data.pointer.is_const;
+ src_ptr_volatile = target->value.type->data.pointer.is_volatile;
+ src_ptr_align = target->value.type->data.pointer.alignment;
+ } else if (is_slice(target->value.type)) {
+ TypeTableEntry *src_ptr_type = target->value.type->data.structure.fields[slice_ptr_index].type_entry;
+ src_ptr_const = src_ptr_type->data.pointer.is_const;
+ src_ptr_volatile = src_ptr_type->data.pointer.is_volatile;
+ src_ptr_align = src_ptr_type->data.pointer.alignment;
+ } else {
+ src_ptr_const = true;
+ src_ptr_volatile = false;
+ src_ptr_align = get_abi_alignment(ira->codegen, target->value.type);
+ }
+
+ TypeTableEntry *dest_ptr_type = get_pointer_to_type_extra(ira->codegen, dest_child_type,
+ src_ptr_const, src_ptr_volatile, PtrLenUnknown,
+ src_ptr_align, 0, 0);
+ TypeTableEntry *dest_slice_type = get_slice_type(ira->codegen, dest_ptr_type);
+
+ TypeTableEntry *u8_ptr = get_pointer_to_type_extra(ira->codegen, ira->codegen->builtin_types.entry_u8,
+ src_ptr_const, src_ptr_volatile, PtrLenUnknown,
+ src_ptr_align, 0, 0);
+ TypeTableEntry *u8_slice = get_slice_type(ira->codegen, u8_ptr);
+
+ IrInstruction *casted_value = ir_implicit_cast(ira, target, u8_slice);
+ if (type_is_invalid(casted_value->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ bool have_known_len = false;
+ uint64_t known_len;
+
+ if (instr_is_comptime(casted_value)) {
+ ConstExprValue *val = ir_resolve_const(ira, casted_value, UndefBad);
+ if (!val)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *len_val = &val->data.x_struct.fields[slice_len_index];
+ if (value_is_comptime(len_val)) {
+ known_len = bigint_as_unsigned(&len_val->data.x_bigint);
+ have_known_len = true;
+ }
+ }
+
+ if (casted_value->value.data.rh_slice.id == RuntimeHintSliceIdLen) {
+ known_len = casted_value->value.data.rh_slice.len;
+ have_known_len = true;
+ }
+
+ if (have_known_len) {
+ uint64_t child_type_size = type_size(ira->codegen, dest_child_type);
+ uint64_t remainder = known_len % child_type_size;
+ if (remainder != 0) {
+ ErrorMsg *msg = ir_add_error(ira, &instruction->base,
+ buf_sprintf("unable to convert [%" ZIG_PRI_u64 "]u8 to %s: size mismatch",
+ known_len, buf_ptr(&dest_slice_type->name)));
+ add_error_note(ira->codegen, msg, instruction->dest_child_type->source_node,
+ buf_sprintf("%s has size %" ZIG_PRI_u64 "; remaining bytes: %" ZIG_PRI_u64,
+ buf_ptr(&dest_child_type->name), child_type_size, remainder));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ }
+
+ IrInstruction *result = ir_resolve_cast(ira, &instruction->base, casted_value, dest_slice_type, CastOpResizeSlice, true);
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_slice_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_to_bytes(IrAnalyze *ira, IrInstructionToBytes *instruction) {
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (!is_slice(target->value.type)) {
+ ir_add_error(ira, instruction->target,
+ buf_sprintf("expected slice, found '%s'", buf_ptr(&target->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ TypeTableEntry *src_ptr_type = target->value.type->data.structure.fields[slice_ptr_index].type_entry;
+
+ TypeTableEntry *dest_ptr_type = get_pointer_to_type_extra(ira->codegen, ira->codegen->builtin_types.entry_u8,
+ src_ptr_type->data.pointer.is_const, src_ptr_type->data.pointer.is_volatile, PtrLenUnknown,
+ src_ptr_type->data.pointer.alignment, 0, 0);
+ TypeTableEntry *dest_slice_type = get_slice_type(ira->codegen, dest_ptr_type);
+
+ IrInstruction *result = ir_resolve_cast(ira, &instruction->base, target, dest_slice_type, CastOpResizeSlice, true);
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_slice_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_int_to_float(IrAnalyze *ira, IrInstructionIntToFloat *instruction) {
+ TypeTableEntry *dest_type = ir_resolve_type(ira, instruction->dest_type->other);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (target->value.type->id != TypeTableEntryIdInt && target->value.type->id != TypeTableEntryIdComptimeInt) {
+ ir_add_error(ira, instruction->target, buf_sprintf("expected int type, found '%s'",
+ buf_ptr(&target->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *result = ir_resolve_cast(ira, &instruction->base, target, dest_type, CastOpIntToFloat, false);
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_float_to_int(IrAnalyze *ira, IrInstructionFloatToInt *instruction) {
+ TypeTableEntry *dest_type = ir_resolve_type(ira, instruction->dest_type->other);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *result = ir_resolve_cast(ira, &instruction->base, target, dest_type, CastOpFloatToInt, false);
+ ir_link_new_instruction(result, &instruction->base);
+ return dest_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_err_to_int(IrAnalyze *ira, IrInstructionErrToInt *instruction) {
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *casted_target;
+ if (target->value.type->id == TypeTableEntryIdErrorSet) {
+ casted_target = target;
+ } else {
+ casted_target = ir_implicit_cast(ira, target, ira->codegen->builtin_types.entry_global_error_set);
+ if (type_is_invalid(casted_target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *result = ir_analyze_err_to_int(ira, &instruction->base, casted_target, ira->codegen->err_tag_type);
+ ir_link_new_instruction(result, &instruction->base);
+ return result->value.type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_int_to_err(IrAnalyze *ira, IrInstructionIntToErr *instruction) {
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *casted_target = ir_implicit_cast(ira, target, ira->codegen->err_tag_type);
+ if (type_is_invalid(casted_target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *result = ir_analyze_int_to_err(ira, &instruction->base, casted_target, ira->codegen->builtin_types.entry_global_error_set);
+ ir_link_new_instruction(result, &instruction->base);
+ return result->value.type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_bool_to_int(IrAnalyze *ira, IrInstructionBoolToInt *instruction) {
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (target->value.type->id != TypeTableEntryIdBool) {
+ ir_add_error(ira, instruction->target, buf_sprintf("expected bool, found '%s'",
+ buf_ptr(&target->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ if (instr_is_comptime(target)) {
+ bool is_true;
+ if (!ir_resolve_bool(ira, target, &is_true))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+ bigint_init_unsigned(&out_val->data.x_bigint, is_true ? 1 : 0);
+ return ira->codegen->builtin_types.entry_num_lit_int;
+ }
+
+ TypeTableEntry *u1_type = get_int_type(ira->codegen, false, 1);
+ IrInstruction *result = ir_resolve_cast(ira, &instruction->base, target, u1_type, CastOpBoolToInt, false);
+ ir_link_new_instruction(result, &instruction->base);
+ return u1_type;
+}
+
static TypeTableEntry *ir_analyze_instruction_int_type(IrAnalyze *ira, IrInstructionIntType *instruction) {
IrInstruction *is_signed_value = instruction->is_signed->other;
bool is_signed;
@@ -15805,7 +18371,7 @@ static TypeTableEntry *ir_analyze_instruction_int_type(IrAnalyze *ira, IrInstruc
IrInstruction *bit_count_value = instruction->bit_count->other;
uint64_t bit_count;
- if (!ir_resolve_usize(ira, bit_count_value, &bit_count))
+ if (!ir_resolve_unsigned(ira, bit_count_value, ira->codegen->builtin_types.entry_u32, &bit_count))
return ira->codegen->builtin_types.entry_invalid;
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
@@ -15824,9 +18390,13 @@ static TypeTableEntry *ir_analyze_instruction_bool_not(IrAnalyze *ira, IrInstruc
if (type_is_invalid(casted_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
- if (casted_value->value.special != ConstValSpecialRuntime) {
+ if (instr_is_comptime(casted_value)) {
+ ConstExprValue *value = ir_resolve_const(ira, casted_value, UndefBad);
+ if (value == nullptr)
+ return ira->codegen->builtin_types.entry_invalid;
+
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
- out_val->data.x_bool = !casted_value->value.data.x_bool;
+ out_val->data.x_bool = !value->data.x_bool;
return bool_type;
}
@@ -15855,7 +18425,8 @@ static TypeTableEntry *ir_analyze_instruction_memset(IrAnalyze *ira, IrInstructi
TypeTableEntry *u8 = ira->codegen->builtin_types.entry_u8;
uint32_t dest_align = (dest_uncasted_type->id == TypeTableEntryIdPointer) ?
dest_uncasted_type->data.pointer.alignment : get_abi_alignment(ira->codegen, u8);
- TypeTableEntry *u8_ptr = get_pointer_to_type_extra(ira->codegen, u8, false, dest_is_volatile, dest_align, 0, 0);
+ TypeTableEntry *u8_ptr = get_pointer_to_type_extra(ira->codegen, u8, false, dest_is_volatile,
+ PtrLenUnknown, dest_align, 0, 0);
IrInstruction *casted_dest_ptr = ir_implicit_cast(ira, dest_ptr, u8_ptr);
if (type_is_invalid(casted_dest_ptr->value.type))
@@ -15951,8 +18522,10 @@ static TypeTableEntry *ir_analyze_instruction_memcpy(IrAnalyze *ira, IrInstructi
src_uncasted_type->data.pointer.alignment : get_abi_alignment(ira->codegen, u8);
TypeTableEntry *usize = ira->codegen->builtin_types.entry_usize;
- TypeTableEntry *u8_ptr_mut = get_pointer_to_type_extra(ira->codegen, u8, false, dest_is_volatile, dest_align, 0, 0);
- TypeTableEntry *u8_ptr_const = get_pointer_to_type_extra(ira->codegen, u8, true, src_is_volatile, src_align, 0, 0);
+ TypeTableEntry *u8_ptr_mut = get_pointer_to_type_extra(ira->codegen, u8, false, dest_is_volatile,
+ PtrLenUnknown, dest_align, 0, 0);
+ TypeTableEntry *u8_ptr_const = get_pointer_to_type_extra(ira->codegen, u8, true, src_is_volatile,
+ PtrLenUnknown, src_align, 0, 0);
IrInstruction *casted_dest_ptr = ir_implicit_cast(ira, dest_ptr, u8_ptr_mut);
if (type_is_invalid(casted_dest_ptr->value.type))
@@ -16095,18 +18668,38 @@ static TypeTableEntry *ir_analyze_instruction_slice(IrAnalyze *ira, IrInstructio
if (array_type->data.array.len == 0 && byte_alignment == 0) {
byte_alignment = get_abi_alignment(ira->codegen, array_type->data.array.child_type);
}
+ bool is_comptime_const = ptr_ptr->value.special == ConstValSpecialStatic &&
+ ptr_ptr->value.data.x_ptr.mut == ConstPtrMutComptimeConst;
TypeTableEntry *slice_ptr_type = get_pointer_to_type_extra(ira->codegen, array_type->data.array.child_type,
- ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
+ ptr_type->data.pointer.is_const || is_comptime_const,
+ ptr_type->data.pointer.is_volatile,
+ PtrLenUnknown,
byte_alignment, 0, 0);
return_type = get_slice_type(ira->codegen, slice_ptr_type);
} else if (array_type->id == TypeTableEntryIdPointer) {
- TypeTableEntry *slice_ptr_type = get_pointer_to_type_extra(ira->codegen, array_type->data.pointer.child_type,
- array_type->data.pointer.is_const, array_type->data.pointer.is_volatile,
- array_type->data.pointer.alignment, 0, 0);
- return_type = get_slice_type(ira->codegen, slice_ptr_type);
- if (!end) {
- ir_add_error(ira, &instruction->base, buf_sprintf("slice of pointer must include end value"));
- return ira->codegen->builtin_types.entry_invalid;
+ if (array_type->data.pointer.ptr_len == PtrLenSingle) {
+ TypeTableEntry *main_type = array_type->data.pointer.child_type;
+ if (main_type->id == TypeTableEntryIdArray) {
+ TypeTableEntry *slice_ptr_type = get_pointer_to_type_extra(ira->codegen,
+ main_type->data.pointer.child_type,
+ array_type->data.pointer.is_const, array_type->data.pointer.is_volatile,
+ PtrLenUnknown,
+ array_type->data.pointer.alignment, 0, 0);
+ return_type = get_slice_type(ira->codegen, slice_ptr_type);
+ } else {
+ ir_add_error(ira, &instruction->base, buf_sprintf("slice of single-item pointer"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+ } else {
+ TypeTableEntry *slice_ptr_type = get_pointer_to_type_extra(ira->codegen, array_type->data.pointer.child_type,
+ array_type->data.pointer.is_const, array_type->data.pointer.is_volatile,
+ PtrLenUnknown,
+ array_type->data.pointer.alignment, 0, 0);
+ return_type = get_slice_type(ira->codegen, slice_ptr_type);
+ if (!end) {
+ ir_add_error(ira, &instruction->base, buf_sprintf("slice of pointer must include end value"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
}
} else if (is_slice(array_type)) {
TypeTableEntry *ptr_type = array_type->data.structure.fields[slice_ptr_index].type_entry;
@@ -16126,12 +18719,24 @@ static TypeTableEntry *ir_analyze_instruction_slice(IrAnalyze *ira, IrInstructio
size_t abs_offset;
size_t rel_end;
bool ptr_is_undef = false;
- if (array_type->id == TypeTableEntryIdArray) {
- array_val = const_ptr_pointee(ira->codegen, &ptr_ptr->value);
- abs_offset = 0;
- rel_end = array_type->data.array.len;
- parent_ptr = nullptr;
+ if (array_type->id == TypeTableEntryIdArray ||
+ (array_type->id == TypeTableEntryIdPointer && array_type->data.pointer.ptr_len == PtrLenSingle))
+ {
+ if (array_type->id == TypeTableEntryIdPointer) {
+ TypeTableEntry *child_array_type = array_type->data.pointer.child_type;
+ assert(child_array_type->id == TypeTableEntryIdArray);
+ parent_ptr = const_ptr_pointee(ira->codegen, &ptr_ptr->value);
+ array_val = const_ptr_pointee(ira->codegen, parent_ptr);
+ rel_end = child_array_type->data.array.len;
+ abs_offset = 0;
+ } else {
+ array_val = const_ptr_pointee(ira->codegen, &ptr_ptr->value);
+ rel_end = array_type->data.array.len;
+ parent_ptr = nullptr;
+ abs_offset = 0;
+ }
} else if (array_type->id == TypeTableEntryIdPointer) {
+ assert(array_type->data.pointer.ptr_len == PtrLenUnknown);
parent_ptr = const_ptr_pointee(ira->codegen, &ptr_ptr->value);
if (parent_ptr->special == ConstValSpecialUndef) {
array_val = nullptr;
@@ -16230,7 +18835,7 @@ static TypeTableEntry *ir_analyze_instruction_slice(IrAnalyze *ira, IrInstructio
if (array_val) {
size_t index = abs_offset + start_scalar;
bool is_const = slice_is_const(return_type);
- init_const_ptr_array(ira->codegen, ptr_val, array_val, index, is_const);
+ init_const_ptr_array(ira->codegen, ptr_val, array_val, index, is_const, PtrLenUnknown);
if (array_type->id == TypeTableEntryIdArray) {
ptr_val->data.x_ptr.mut = ptr_ptr->value.data.x_ptr.mut;
} else if (is_slice(array_type)) {
@@ -16283,6 +18888,10 @@ static TypeTableEntry *ir_analyze_instruction_member_count(IrAnalyze *ira, IrIns
return ira->codegen->builtin_types.entry_invalid;
TypeTableEntry *container_type = ir_resolve_type(ira, container);
+ ensure_complete_type(ira->codegen, container_type);
+ if (type_is_invalid(container_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
uint64_t result;
if (type_is_invalid(container_type)) {
return ira->codegen->builtin_types.entry_invalid;
@@ -16293,7 +18902,7 @@ static TypeTableEntry *ir_analyze_instruction_member_count(IrAnalyze *ira, IrIns
} else if (container_type->id == TypeTableEntryIdUnion) {
result = container_type->data.unionation.src_field_count;
} else if (container_type->id == TypeTableEntryIdErrorSet) {
- if (!resolve_inferred_error_set(ira, container_type, instruction->base.source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, container_type, instruction->base.source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
if (type_is_global_error_set(container_type)) {
@@ -16317,6 +18926,11 @@ static TypeTableEntry *ir_analyze_instruction_member_type(IrAnalyze *ira, IrInst
if (type_is_invalid(container_type))
return ira->codegen->builtin_types.entry_invalid;
+ ensure_complete_type(ira->codegen, container_type);
+ if (type_is_invalid(container_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+
uint64_t member_index;
IrInstruction *index_value = instruction->member_index->other;
if (!ir_resolve_usize(ira, index_value, &member_index))
@@ -16359,6 +18973,10 @@ static TypeTableEntry *ir_analyze_instruction_member_name(IrAnalyze *ira, IrInst
if (type_is_invalid(container_type))
return ira->codegen->builtin_types.entry_invalid;
+ ensure_complete_type(ira->codegen, container_type);
+ if (type_is_invalid(container_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
uint64_t member_index;
IrInstruction *index_value = instruction->member_index->other;
if (!ir_resolve_usize(ira, index_value, &member_index))
@@ -16428,6 +19046,14 @@ static TypeTableEntry *ir_analyze_instruction_frame_address(IrAnalyze *ira, IrIn
return u8_ptr_const;
}
+static TypeTableEntry *ir_analyze_instruction_handle(IrAnalyze *ira, IrInstructionHandle *instruction) {
+ ir_build_handle_from(&ira->new_irb, &instruction->base);
+
+ FnTableEntry *fn_entry = exec_fn_entry(ira->new_irb.exec);
+ assert(fn_entry != nullptr);
+ return get_promise_type(ira->codegen, fn_entry->type_entry->data.fn.fn_type_id.return_type);
+}
+
static TypeTableEntry *ir_analyze_instruction_align_of(IrAnalyze *ira, IrInstructionAlignOf *instruction) {
IrInstruction *type_value = instruction->type_value->other;
if (type_is_invalid(type_value->value.type))
@@ -16443,10 +19069,10 @@ static TypeTableEntry *ir_analyze_instruction_align_of(IrAnalyze *ira, IrInstruc
zig_unreachable();
case TypeTableEntryIdMetaType:
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
@@ -16463,7 +19089,7 @@ static TypeTableEntry *ir_analyze_instruction_align_of(IrAnalyze *ira, IrInstruc
case TypeTableEntryIdPromise:
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdOptional:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdErrorSet:
case TypeTableEntryIdEnum:
@@ -16525,6 +19151,7 @@ static TypeTableEntry *ir_analyze_instruction_overflow_op(IrAnalyze *ira, IrInst
if (result_ptr->value.type->id == TypeTableEntryIdPointer) {
expected_ptr_type = get_pointer_to_type_extra(ira->codegen, dest_type,
false, result_ptr->value.type->data.pointer.is_volatile,
+ PtrLenSingle,
result_ptr->value.type->data.pointer.alignment, 0, 0);
} else {
expected_ptr_type = get_pointer_to_type(ira->codegen, dest_type, false);
@@ -16597,7 +19224,7 @@ static TypeTableEntry *ir_analyze_instruction_test_err(IrAnalyze *ira, IrInstruc
}
TypeTableEntry *err_set_type = type_entry->data.error_union.err_set_type;
- if (!resolve_inferred_error_set(ira, err_set_type, instruction->base.source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, err_set_type, instruction->base.source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
if (!type_is_global_error_set(err_set_type) &&
@@ -16678,8 +19305,12 @@ static TypeTableEntry *ir_analyze_instruction_unwrap_err_payload(IrAnalyze *ira,
return ira->codegen->builtin_types.entry_invalid;
} else if (type_entry->id == TypeTableEntryIdErrorUnion) {
TypeTableEntry *payload_type = type_entry->data.error_union.payload_type;
+ if (type_is_invalid(payload_type)) {
+ return ira->codegen->builtin_types.entry_invalid;
+ }
TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, payload_type,
ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile,
+ PtrLenSingle,
get_abi_alignment(ira->codegen, payload_type), 0, 0);
if (instr_is_comptime(value)) {
ConstExprValue *ptr_val = ir_resolve_const(ira, value, UndefBad);
@@ -16769,6 +19400,11 @@ static TypeTableEntry *ir_analyze_instruction_fn_proto(IrAnalyze *ira, IrInstruc
fn_type_id.return_type = ir_resolve_type(ira, return_type_value);
if (type_is_invalid(fn_type_id.return_type))
return ira->codegen->builtin_types.entry_invalid;
+ if (fn_type_id.return_type->id == TypeTableEntryIdOpaque) {
+ ir_add_error(ira, instruction->return_type,
+ buf_sprintf("return type cannot be opaque"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
if (fn_type_id.cc == CallingConventionAsync) {
if (instruction->async_allocator_type_value == nullptr) {
@@ -16820,7 +19456,11 @@ static TypeTableEntry *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira
if (type_is_invalid(end_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
- assert(start_value->value.type->id == TypeTableEntryIdEnum);
+ if (start_value->value.type->id != TypeTableEntryIdEnum) {
+ ir_add_error(ira, range->start, buf_sprintf("not an enum type"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
BigInt start_index;
bigint_init_bigint(&start_index, &start_value->value.data.x_enum_tag);
@@ -16861,7 +19501,7 @@ static TypeTableEntry *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira
}
}
} else if (switch_type->id == TypeTableEntryIdErrorSet) {
- if (!resolve_inferred_error_set(ira, switch_type, target_value->source_node)) {
+ if (!resolve_inferred_error_set(ira->codegen, switch_type, target_value->source_node)) {
return ira->codegen->builtin_types.entry_invalid;
}
@@ -16925,21 +19565,27 @@ static TypeTableEntry *ir_analyze_instruction_check_switch_prongs(IrAnalyze *ira
IrInstruction *start_value = range->start->other;
if (type_is_invalid(start_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
+ IrInstruction *casted_start_value = ir_implicit_cast(ira, start_value, switch_type);
+ if (type_is_invalid(casted_start_value->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
IrInstruction *end_value = range->end->other;
if (type_is_invalid(end_value->value.type))
return ira->codegen->builtin_types.entry_invalid;
+ IrInstruction *casted_end_value = ir_implicit_cast(ira, end_value, switch_type);
+ if (type_is_invalid(casted_end_value->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
- ConstExprValue *start_val = ir_resolve_const(ira, start_value, UndefBad);
+ ConstExprValue *start_val = ir_resolve_const(ira, casted_start_value, UndefBad);
if (!start_val)
return ira->codegen->builtin_types.entry_invalid;
- ConstExprValue *end_val = ir_resolve_const(ira, end_value, UndefBad);
+ ConstExprValue *end_val = ir_resolve_const(ira, casted_end_value, UndefBad);
if (!end_val)
return ira->codegen->builtin_types.entry_invalid;
- assert(start_val->type->id == TypeTableEntryIdInt || start_val->type->id == TypeTableEntryIdNumLitInt);
- assert(end_val->type->id == TypeTableEntryIdInt || end_val->type->id == TypeTableEntryIdNumLitInt);
+ assert(start_val->type->id == TypeTableEntryIdInt || start_val->type->id == TypeTableEntryIdComptimeInt);
+ assert(end_val->type->id == TypeTableEntryIdInt || end_val->type->id == TypeTableEntryIdComptimeInt);
AstNode *prev_node = rangeset_add_range(&rs, &start_val->data.x_bigint, &end_val->data.x_bigint,
start_value->source_node);
if (prev_node != nullptr) {
@@ -16983,30 +19629,6 @@ static TypeTableEntry *ir_analyze_instruction_check_statement_is_void(IrAnalyze
return ira->codegen->builtin_types.entry_void;
}
-static TypeTableEntry *ir_analyze_instruction_can_implicit_cast(IrAnalyze *ira,
- IrInstructionCanImplicitCast *instruction)
-{
- IrInstruction *type_value = instruction->type_value->other;
- TypeTableEntry *type_entry = ir_resolve_type(ira, type_value);
- if (type_is_invalid(type_entry))
- return ira->codegen->builtin_types.entry_invalid;
-
- IrInstruction *target_value = instruction->target_value->other;
- if (type_is_invalid(target_value->value.type))
- return ira->codegen->builtin_types.entry_invalid;
-
- ImplicitCastMatchResult result = ir_types_match_with_implicit_cast(ira, type_entry, target_value->value.type,
- target_value);
-
- if (result == ImplicitCastMatchResultReportedError) {
- zig_panic("TODO refactor implicit cast tester to return bool without reporting errors");
- }
-
- ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
- out_val->data.x_bool = (result == ImplicitCastMatchResultYes);
- return ira->codegen->builtin_types.entry_bool;
-}
-
static TypeTableEntry *ir_analyze_instruction_panic(IrAnalyze *ira, IrInstructionPanic *instruction) {
IrInstruction *msg = instruction->msg->other;
if (type_is_invalid(msg->value.type))
@@ -17017,7 +19639,8 @@ static TypeTableEntry *ir_analyze_instruction_panic(IrAnalyze *ira, IrInstructio
return ir_unreach_error(ira);
}
- TypeTableEntry *u8_ptr_type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, true);
+ TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(ira->codegen, ira->codegen->builtin_types.entry_u8,
+ true, false, PtrLenUnknown, get_abi_alignment(ira->codegen, ira->codegen->builtin_types.entry_u8), 0, 0);
TypeTableEntry *str_type = get_slice_type(ira->codegen, u8_ptr_type);
IrInstruction *casted_msg = ir_implicit_cast(ira, msg, str_type);
if (type_is_invalid(casted_msg->value.type))
@@ -17044,22 +19667,22 @@ static IrInstruction *ir_align_cast(IrAnalyze *ira, IrInstruction *target, uint3
old_align_bytes = fn_type_id.alignment;
fn_type_id.alignment = align_bytes;
result_type = get_fn_type(ira->codegen, &fn_type_id);
- } else if (target_type->id == TypeTableEntryIdMaybe &&
+ } else if (target_type->id == TypeTableEntryIdOptional &&
target_type->data.maybe.child_type->id == TypeTableEntryIdPointer)
{
TypeTableEntry *ptr_type = target_type->data.maybe.child_type;
old_align_bytes = ptr_type->data.pointer.alignment;
TypeTableEntry *better_ptr_type = adjust_ptr_align(ira->codegen, ptr_type, align_bytes);
- result_type = get_maybe_type(ira->codegen, better_ptr_type);
- } else if (target_type->id == TypeTableEntryIdMaybe &&
+ result_type = get_optional_type(ira->codegen, better_ptr_type);
+ } else if (target_type->id == TypeTableEntryIdOptional &&
target_type->data.maybe.child_type->id == TypeTableEntryIdFn)
{
FnTypeId fn_type_id = target_type->data.maybe.child_type->data.fn.fn_type_id;
old_align_bytes = fn_type_id.alignment;
fn_type_id.alignment = align_bytes;
TypeTableEntry *fn_type = get_fn_type(ira->codegen, &fn_type_id);
- result_type = get_maybe_type(ira->codegen, fn_type);
+ result_type = get_optional_type(ira->codegen, fn_type);
} else if (is_slice(target_type)) {
TypeTableEntry *slice_ptr_type = target_type->data.structure.fields[slice_ptr_index].type_entry;
old_align_bytes = slice_ptr_type->data.pointer.alignment;
@@ -17076,6 +19699,15 @@ static IrInstruction *ir_align_cast(IrAnalyze *ira, IrInstruction *target, uint3
if (!val)
return ira->codegen->invalid_instruction;
+ if (val->data.x_ptr.special == ConstPtrSpecialHardCodedAddr &&
+ val->data.x_ptr.data.hard_coded_addr.addr % align_bytes != 0)
+ {
+ ir_add_error(ira, target,
+ buf_sprintf("pointer address 0x%" ZIG_PRI_x64 " is not aligned to %" PRIu32 " bytes",
+ val->data.x_ptr.data.hard_coded_addr.addr, align_bytes));
+ return ira->codegen->invalid_instruction;
+ }
+
IrInstruction *result = ir_create_const(&ira->new_irb, target->scope, target->source_node, result_type);
copy_const_val(&result->value, val, false);
result->value.type = result_type;
@@ -17170,10 +19802,10 @@ static void buf_write_value_bytes(CodeGen *codegen, uint8_t *buf, ConstExprValue
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdPromise:
zig_unreachable();
case TypeTableEntryIdVoid:
@@ -17210,7 +19842,7 @@ static void buf_write_value_bytes(CodeGen *codegen, uint8_t *buf, ConstExprValue
return;
case TypeTableEntryIdStruct:
zig_panic("TODO buf_write_value_bytes struct type");
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdOptional:
zig_panic("TODO buf_write_value_bytes maybe type");
case TypeTableEntryIdErrorUnion:
zig_panic("TODO buf_write_value_bytes error union");
@@ -17237,10 +19869,10 @@ static void buf_read_value_bytes(CodeGen *codegen, uint8_t *buf, ConstExprValue
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
case TypeTableEntryIdPromise:
zig_unreachable();
case TypeTableEntryIdVoid:
@@ -17268,7 +19900,7 @@ static void buf_read_value_bytes(CodeGen *codegen, uint8_t *buf, ConstExprValue
zig_panic("TODO buf_read_value_bytes array type");
case TypeTableEntryIdStruct:
zig_panic("TODO buf_read_value_bytes struct type");
- case TypeTableEntryIdMaybe:
+ case TypeTableEntryIdOptional:
zig_panic("TODO buf_read_value_bytes maybe type");
case TypeTableEntryIdErrorUnion:
zig_panic("TODO buf_read_value_bytes error union");
@@ -17296,7 +19928,12 @@ static TypeTableEntry *ir_analyze_instruction_bit_cast(IrAnalyze *ira, IrInstruc
return ira->codegen->builtin_types.entry_invalid;
ensure_complete_type(ira->codegen, dest_type);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
ensure_complete_type(ira->codegen, src_type);
+ if (type_is_invalid(src_type))
+ return ira->codegen->builtin_types.entry_invalid;
if (get_codegen_ptr_type(src_type) != nullptr) {
ir_add_error(ira, value,
@@ -17313,10 +19950,10 @@ static TypeTableEntry *ir_analyze_instruction_bit_cast(IrAnalyze *ira, IrInstruc
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
ir_add_error(ira, dest_type_value,
buf_sprintf("unable to @bitCast from type '%s'", buf_ptr(&src_type->name)));
return ira->codegen->builtin_types.entry_invalid;
@@ -17339,10 +19976,10 @@ static TypeTableEntry *ir_analyze_instruction_bit_cast(IrAnalyze *ira, IrInstruc
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdUnreachable:
- case TypeTableEntryIdNumLitFloat:
- case TypeTableEntryIdNumLitInt:
- case TypeTableEntryIdUndefLit:
- case TypeTableEntryIdNullLit:
+ case TypeTableEntryIdComptimeFloat:
+ case TypeTableEntryIdComptimeInt:
+ case TypeTableEntryIdUndefined:
+ case TypeTableEntryIdNull:
ir_add_error(ira, dest_type_value,
buf_sprintf("unable to @bitCast to type '%s'", buf_ptr(&dest_type->name)));
return ira->codegen->builtin_types.entry_invalid;
@@ -17429,7 +20066,7 @@ static TypeTableEntry *ir_analyze_instruction_decl_ref(IrAnalyze *ira,
Tld *tld = instruction->tld;
LVal lval = instruction->lval;
- resolve_top_level_decl(ira->codegen, tld, lval.is_ptr, instruction->base.source_node);
+ resolve_top_level_decl(ira->codegen, tld, lval == LValPtr, instruction->base.source_node);
if (tld->resolution == TldResolutionInvalid)
return ira->codegen->builtin_types.entry_invalid;
@@ -17442,8 +20079,7 @@ static TypeTableEntry *ir_analyze_instruction_decl_ref(IrAnalyze *ira,
TldVar *tld_var = (TldVar *)tld;
VariableTableEntry *var = tld_var->var;
- IrInstruction *var_ptr = ir_get_var_ptr(ira, &instruction->base, var,
- !lval.is_ptr || lval.is_const, lval.is_ptr && lval.is_volatile);
+ IrInstruction *var_ptr = ir_get_var_ptr(ira, &instruction->base, var);
if (type_is_invalid(var_ptr->value.type))
return ira->codegen->builtin_types.entry_invalid;
@@ -17451,7 +20087,7 @@ static TypeTableEntry *ir_analyze_instruction_decl_ref(IrAnalyze *ira,
add_link_lib_symbol(ira, tld_var->extern_lib_name, &var->name, instruction->base.source_node);
}
- if (lval.is_ptr) {
+ if (lval == LValPtr) {
ir_link_new_instruction(var_ptr, &instruction->base);
return var_ptr->value.type;
} else {
@@ -17472,7 +20108,7 @@ static TypeTableEntry *ir_analyze_instruction_decl_ref(IrAnalyze *ira,
IrInstruction *ref_instruction = ir_create_const_fn(&ira->new_irb, instruction->base.scope,
instruction->base.source_node, fn_entry);
- if (lval.is_ptr) {
+ if (lval == LValPtr) {
IrInstruction *ptr_instr = ir_get_ref(ira, &instruction->base, ref_instruction, true, false);
ir_link_new_instruction(ptr_instr, &instruction->base);
return ptr_instr->value.type;
@@ -17498,13 +20134,16 @@ static TypeTableEntry *ir_analyze_instruction_ptr_to_int(IrAnalyze *ira, IrInstr
return ira->codegen->builtin_types.entry_invalid;
}
+ if (!type_has_bits(target->value.type)) {
+ ir_add_error(ira, target,
+ buf_sprintf("pointer to size 0 type has no address"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
if (instr_is_comptime(target)) {
ConstExprValue *val = ir_resolve_const(ira, target, UndefBad);
if (!val)
return ira->codegen->builtin_types.entry_invalid;
- if (target->value.type->id == TypeTableEntryIdMaybe) {
- val = val->data.x_maybe;
- }
if (val->type->id == TypeTableEntryIdPointer && val->data.x_ptr.special == ConstPtrSpecialHardCodedAddr) {
IrInstruction *result = ir_create_const(&ira->new_irb, instruction->base.scope,
instruction->base.source_node, usize);
@@ -17521,22 +20160,34 @@ static TypeTableEntry *ir_analyze_instruction_ptr_to_int(IrAnalyze *ira, IrInstr
return usize;
}
-static TypeTableEntry *ir_analyze_instruction_ptr_type_of(IrAnalyze *ira, IrInstructionPtrTypeOf *instruction) {
+static TypeTableEntry *ir_analyze_instruction_ptr_type(IrAnalyze *ira, IrInstructionPtrType *instruction) {
TypeTableEntry *child_type = ir_resolve_type(ira, instruction->child_type->other);
if (type_is_invalid(child_type))
return ira->codegen->builtin_types.entry_invalid;
+ if (child_type->id == TypeTableEntryIdUnreachable) {
+ ir_add_error(ira, &instruction->base, buf_sprintf("pointer to noreturn not allowed"));
+ return ira->codegen->builtin_types.entry_invalid;
+ } else if (child_type->id == TypeTableEntryIdOpaque && instruction->ptr_len == PtrLenUnknown) {
+ ir_add_error(ira, &instruction->base, buf_sprintf("unknown-length pointer to opaque"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
uint32_t align_bytes;
if (instruction->align_value != nullptr) {
if (!ir_resolve_align(ira, instruction->align_value->other, &align_bytes))
return ira->codegen->builtin_types.entry_invalid;
} else {
+ type_ensure_zero_bits_known(ira->codegen, child_type);
+ if (type_is_invalid(child_type))
+ return ira->codegen->builtin_types.entry_invalid;
align_bytes = get_abi_alignment(ira->codegen, child_type);
}
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
out_val->data.x_type = get_pointer_to_type_extra(ira->codegen, child_type,
- instruction->is_const, instruction->is_volatile, align_bytes,
+ instruction->is_const, instruction->is_volatile,
+ instruction->ptr_len, align_bytes,
instruction->bit_offset_start, instruction->bit_offset_end - instruction->bit_offset_start);
return ira->codegen->builtin_types.entry_type;
@@ -17634,6 +20285,16 @@ static TypeTableEntry *ir_analyze_instruction_arg_type(IrAnalyze *ira, IrInstruc
ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
out_val->data.x_type = fn_type_id->param_info[arg_index].type;
+ if (out_val->data.x_type == nullptr) {
+ // Args are only unresolved if our function is generic.
+ assert(fn_type->data.fn.is_generic);
+
+ ir_add_error(ira, arg_index_inst,
+ buf_sprintf("@ArgType could not resolve the type of arg %" ZIG_PRI_u64 " because '%s' is generic",
+ arg_index, buf_ptr(&fn_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
return ira->codegen->builtin_types.entry_type;
}
@@ -17797,7 +20458,7 @@ static TypeTableEntry *ir_analyze_instruction_coro_free(IrAnalyze *ira, IrInstru
instruction->base.source_node, coro_id, coro_handle);
ir_link_new_instruction(result, &instruction->base);
TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, false);
- result->value.type = get_maybe_type(ira->codegen, ptr_type);
+ result->value.type = get_optional_type(ira->codegen, ptr_type);
return result->value.type;
}
@@ -17865,43 +20526,52 @@ static TypeTableEntry *ir_analyze_instruction_coro_alloc_helper(IrAnalyze *ira,
instruction->base.source_node, alloc_fn, coro_size);
ir_link_new_instruction(result, &instruction->base);
TypeTableEntry *u8_ptr_type = get_pointer_to_type(ira->codegen, ira->codegen->builtin_types.entry_u8, false);
- result->value.type = get_maybe_type(ira->codegen, u8_ptr_type);
+ result->value.type = get_optional_type(ira->codegen, u8_ptr_type);
return result->value.type;
}
-static TypeTableEntry *ir_analyze_instruction_atomic_rmw(IrAnalyze *ira, IrInstructionAtomicRmw *instruction) {
- TypeTableEntry *operand_type = ir_resolve_type(ira, instruction->operand_type->other);
- if (type_is_invalid(operand_type)) {
+static TypeTableEntry *ir_resolve_atomic_operand_type(IrAnalyze *ira, IrInstruction *op) {
+ TypeTableEntry *operand_type = ir_resolve_type(ira, op);
+ if (type_is_invalid(operand_type))
return ira->codegen->builtin_types.entry_invalid;
- }
+
if (operand_type->id == TypeTableEntryIdInt) {
if (operand_type->data.integral.bit_count < 8) {
- ir_add_error(ira, &instruction->base,
+ ir_add_error(ira, op,
buf_sprintf("expected integer type 8 bits or larger, found %" PRIu32 "-bit integer type",
operand_type->data.integral.bit_count));
return ira->codegen->builtin_types.entry_invalid;
}
if (operand_type->data.integral.bit_count > ira->codegen->pointer_size_bytes * 8) {
- ir_add_error(ira, &instruction->base,
+ ir_add_error(ira, op,
buf_sprintf("expected integer type pointer size or smaller, found %" PRIu32 "-bit integer type",
operand_type->data.integral.bit_count));
return ira->codegen->builtin_types.entry_invalid;
}
if (!is_power_of_2(operand_type->data.integral.bit_count)) {
- ir_add_error(ira, &instruction->base,
+ ir_add_error(ira, op,
buf_sprintf("%" PRIu32 "-bit integer type is not a power of 2", operand_type->data.integral.bit_count));
return ira->codegen->builtin_types.entry_invalid;
}
} else if (get_codegen_ptr_type(operand_type) == nullptr) {
- ir_add_error(ira, &instruction->base,
+ ir_add_error(ira, op,
buf_sprintf("expected integer or pointer type, found '%s'", buf_ptr(&operand_type->name)));
return ira->codegen->builtin_types.entry_invalid;
}
+ return operand_type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_atomic_rmw(IrAnalyze *ira, IrInstructionAtomicRmw *instruction) {
+ TypeTableEntry *operand_type = ir_resolve_atomic_operand_type(ira, instruction->operand_type->other);
+ if (type_is_invalid(operand_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
IrInstruction *ptr_inst = instruction->ptr->other;
if (type_is_invalid(ptr_inst->value.type))
return ira->codegen->builtin_types.entry_invalid;
+ // TODO let this be volatile
TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, operand_type, false);
IrInstruction *casted_ptr = ir_implicit_cast(ira, ptr_inst, ptr_type);
if (type_is_invalid(casted_ptr->value.type))
@@ -17930,6 +20600,11 @@ static TypeTableEntry *ir_analyze_instruction_atomic_rmw(IrAnalyze *ira, IrInstr
} else {
if (!ir_resolve_atomic_order(ira, instruction->ordering->other, &ordering))
return ira->codegen->builtin_types.entry_invalid;
+ if (ordering == AtomicOrderUnordered) {
+ ir_add_error(ira, instruction->ordering,
+ buf_sprintf("@atomicRmw atomic ordering must not be Unordered"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
}
if (instr_is_comptime(casted_operand) && instr_is_comptime(casted_ptr) && casted_ptr->value.data.x_ptr.mut == ConstPtrMutComptimeVar)
@@ -17945,6 +20620,49 @@ static TypeTableEntry *ir_analyze_instruction_atomic_rmw(IrAnalyze *ira, IrInstr
return result->value.type;
}
+static TypeTableEntry *ir_analyze_instruction_atomic_load(IrAnalyze *ira, IrInstructionAtomicLoad *instruction) {
+ TypeTableEntry *operand_type = ir_resolve_atomic_operand_type(ira, instruction->operand_type->other);
+ if (type_is_invalid(operand_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *ptr_inst = instruction->ptr->other;
+ if (type_is_invalid(ptr_inst->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ TypeTableEntry *ptr_type = get_pointer_to_type(ira->codegen, operand_type, true);
+ IrInstruction *casted_ptr = ir_implicit_cast(ira, ptr_inst, ptr_type);
+ if (type_is_invalid(casted_ptr->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ AtomicOrder ordering;
+ if (instruction->ordering == nullptr) {
+ ordering = instruction->resolved_ordering;
+ } else {
+ if (!ir_resolve_atomic_order(ira, instruction->ordering->other, &ordering))
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ if (ordering == AtomicOrderRelease || ordering == AtomicOrderAcqRel) {
+ assert(instruction->ordering != nullptr);
+ ir_add_error(ira, instruction->ordering,
+ buf_sprintf("@atomicLoad atomic ordering must not be Release or AcqRel"));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ if (instr_is_comptime(casted_ptr)) {
+ IrInstruction *result = ir_get_deref(ira, &instruction->base, casted_ptr);
+ ir_link_new_instruction(result, &instruction->base);
+ assert(result->value.type != nullptr);
+ return result->value.type;
+ }
+
+ IrInstruction *result = ir_build_atomic_load(&ira->new_irb, instruction->base.scope,
+ instruction->base.source_node, nullptr, casted_ptr, nullptr, ordering);
+ ir_link_new_instruction(result, &instruction->base);
+ result->value.type = operand_type;
+ return result->value.type;
+}
+
static TypeTableEntry *ir_analyze_instruction_promise_result_type(IrAnalyze *ira, IrInstructionPromiseResultType *instruction) {
TypeTableEntry *promise_type = ir_resolve_type(ira, instruction->promise_type->other);
if (type_is_invalid(promise_type))
@@ -18031,18 +20749,135 @@ static TypeTableEntry *ir_analyze_instruction_mark_err_ret_trace_ptr(IrAnalyze *
return result->value.type;
}
+static TypeTableEntry *ir_analyze_instruction_sqrt(IrAnalyze *ira, IrInstructionSqrt *instruction) {
+ TypeTableEntry *float_type = ir_resolve_type(ira, instruction->type->other);
+ if (type_is_invalid(float_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *op = instruction->op->other;
+ if (type_is_invalid(op->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ bool ok_type = float_type->id == TypeTableEntryIdComptimeFloat || float_type->id == TypeTableEntryIdFloat;
+ if (!ok_type) {
+ ir_add_error(ira, instruction->type, buf_sprintf("@sqrt does not support type '%s'", buf_ptr(&float_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *casted_op = ir_implicit_cast(ira, op, float_type);
+ if (type_is_invalid(casted_op->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (instr_is_comptime(casted_op)) {
+ ConstExprValue *val = ir_resolve_const(ira, casted_op, UndefBad);
+ if (!val)
+ return ira->codegen->builtin_types.entry_invalid;
+
+ ConstExprValue *out_val = ir_build_const_from(ira, &instruction->base);
+
+ if (float_type->id == TypeTableEntryIdComptimeFloat) {
+ bigfloat_sqrt(&out_val->data.x_bigfloat, &val->data.x_bigfloat);
+ } else if (float_type->id == TypeTableEntryIdFloat) {
+ switch (float_type->data.floating.bit_count) {
+ case 16:
+ out_val->data.x_f16 = f16_sqrt(val->data.x_f16);
+ break;
+ case 32:
+ out_val->data.x_f32 = sqrtf(val->data.x_f32);
+ break;
+ case 64:
+ out_val->data.x_f64 = sqrt(val->data.x_f64);
+ break;
+ case 128:
+ f128M_sqrt(&val->data.x_f128, &out_val->data.x_f128);
+ break;
+ default:
+ zig_unreachable();
+ }
+ } else {
+ zig_unreachable();
+ }
+
+ return float_type;
+ }
+
+ assert(float_type->id == TypeTableEntryIdFloat);
+ if (float_type->data.floating.bit_count != 16 &&
+ float_type->data.floating.bit_count != 32 &&
+ float_type->data.floating.bit_count != 64) {
+ ir_add_error(ira, instruction->type, buf_sprintf("compiler TODO: add implementation of sqrt for '%s'", buf_ptr(&float_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ IrInstruction *result = ir_build_sqrt(&ira->new_irb, instruction->base.scope,
+ instruction->base.source_node, nullptr, casted_op);
+ ir_link_new_instruction(result, &instruction->base);
+ result->value.type = float_type;
+ return result->value.type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_enum_to_int(IrAnalyze *ira, IrInstructionEnumToInt *instruction) {
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (target->value.type->id != TypeTableEntryIdEnum) {
+ ir_add_error(ira, instruction->target,
+ buf_sprintf("expected enum, found type '%s'", buf_ptr(&target->value.type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ type_ensure_zero_bits_known(ira->codegen, target->value.type);
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ TypeTableEntry *tag_type = target->value.type->data.enumeration.tag_int_type;
+
+ IrInstruction *result = ir_analyze_enum_to_int(ira, &instruction->base, target, tag_type);
+ ir_link_new_instruction(result, &instruction->base);
+ return result->value.type;
+}
+
+static TypeTableEntry *ir_analyze_instruction_int_to_enum(IrAnalyze *ira, IrInstructionIntToEnum *instruction) {
+ IrInstruction *dest_type_value = instruction->dest_type->other;
+ TypeTableEntry *dest_type = ir_resolve_type(ira, dest_type_value);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ if (dest_type->id != TypeTableEntryIdEnum) {
+ ir_add_error(ira, instruction->dest_type,
+ buf_sprintf("expected enum, found type '%s'", buf_ptr(&dest_type->name)));
+ return ira->codegen->builtin_types.entry_invalid;
+ }
+
+ type_ensure_zero_bits_known(ira->codegen, dest_type);
+ if (type_is_invalid(dest_type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ TypeTableEntry *tag_type = dest_type->data.enumeration.tag_int_type;
+
+ IrInstruction *target = instruction->target->other;
+ if (type_is_invalid(target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *casted_target = ir_implicit_cast(ira, target, tag_type);
+ if (type_is_invalid(casted_target->value.type))
+ return ira->codegen->builtin_types.entry_invalid;
+
+ IrInstruction *result = ir_analyze_int_to_enum(ira, &instruction->base, casted_target, dest_type);
+ ir_link_new_instruction(result, &instruction->base);
+ return result->value.type;
+}
+
static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstruction *instruction) {
switch (instruction->id) {
case IrInstructionIdInvalid:
case IrInstructionIdWidenOrShorten:
- case IrInstructionIdIntToEnum:
- case IrInstructionIdIntToErr:
- case IrInstructionIdErrToInt:
case IrInstructionIdStructInit:
case IrInstructionIdUnionInit:
case IrInstructionIdStructFieldPtr:
case IrInstructionIdUnionFieldPtr:
- case IrInstructionIdMaybeWrap:
+ case IrInstructionIdOptionalWrap:
case IrInstructionIdErrWrapCode:
case IrInstructionIdErrWrapPayload:
case IrInstructionIdCast:
@@ -18102,12 +20937,14 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_size_of(ira, (IrInstructionSizeOf *)instruction);
case IrInstructionIdTestNonNull:
return ir_analyze_instruction_test_non_null(ira, (IrInstructionTestNonNull *)instruction);
- case IrInstructionIdUnwrapMaybe:
- return ir_analyze_instruction_unwrap_maybe(ira, (IrInstructionUnwrapMaybe *)instruction);
+ case IrInstructionIdUnwrapOptional:
+ return ir_analyze_instruction_unwrap_maybe(ira, (IrInstructionUnwrapOptional *)instruction);
case IrInstructionIdClz:
return ir_analyze_instruction_clz(ira, (IrInstructionClz *)instruction);
case IrInstructionIdCtz:
return ir_analyze_instruction_ctz(ira, (IrInstructionCtz *)instruction);
+ case IrInstructionIdPopCount:
+ return ir_analyze_instruction_pop_count(ira, (IrInstructionPopCount *)instruction);
case IrInstructionIdSwitchBr:
return ir_analyze_instruction_switch_br(ira, (IrInstructionSwitchBr *)instruction);
case IrInstructionIdSwitchTarget:
@@ -18154,6 +20991,22 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_fence(ira, (IrInstructionFence *)instruction);
case IrInstructionIdTruncate:
return ir_analyze_instruction_truncate(ira, (IrInstructionTruncate *)instruction);
+ case IrInstructionIdIntCast:
+ return ir_analyze_instruction_int_cast(ira, (IrInstructionIntCast *)instruction);
+ case IrInstructionIdFloatCast:
+ return ir_analyze_instruction_float_cast(ira, (IrInstructionFloatCast *)instruction);
+ case IrInstructionIdErrSetCast:
+ return ir_analyze_instruction_err_set_cast(ira, (IrInstructionErrSetCast *)instruction);
+ case IrInstructionIdFromBytes:
+ return ir_analyze_instruction_from_bytes(ira, (IrInstructionFromBytes *)instruction);
+ case IrInstructionIdToBytes:
+ return ir_analyze_instruction_to_bytes(ira, (IrInstructionToBytes *)instruction);
+ case IrInstructionIdIntToFloat:
+ return ir_analyze_instruction_int_to_float(ira, (IrInstructionIntToFloat *)instruction);
+ case IrInstructionIdFloatToInt:
+ return ir_analyze_instruction_float_to_int(ira, (IrInstructionFloatToInt *)instruction);
+ case IrInstructionIdBoolToInt:
+ return ir_analyze_instruction_bool_to_int(ira, (IrInstructionBoolToInt *)instruction);
case IrInstructionIdIntType:
return ir_analyze_instruction_int_type(ira, (IrInstructionIntType *)instruction);
case IrInstructionIdBoolNot:
@@ -18176,6 +21029,8 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_return_address(ira, (IrInstructionReturnAddress *)instruction);
case IrInstructionIdFrameAddress:
return ir_analyze_instruction_frame_address(ira, (IrInstructionFrameAddress *)instruction);
+ case IrInstructionIdHandle:
+ return ir_analyze_instruction_handle(ira, (IrInstructionHandle *)instruction);
case IrInstructionIdAlignOf:
return ir_analyze_instruction_align_of(ira, (IrInstructionAlignOf *)instruction);
case IrInstructionIdOverflowOp:
@@ -18194,8 +21049,6 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_check_switch_prongs(ira, (IrInstructionCheckSwitchProngs *)instruction);
case IrInstructionIdCheckStatementIsVoid:
return ir_analyze_instruction_check_statement_is_void(ira, (IrInstructionCheckStatementIsVoid *)instruction);
- case IrInstructionIdCanImplicitCast:
- return ir_analyze_instruction_can_implicit_cast(ira, (IrInstructionCanImplicitCast *)instruction);
case IrInstructionIdDeclRef:
return ir_analyze_instruction_decl_ref(ira, (IrInstructionDeclRef *)instruction);
case IrInstructionIdPanic:
@@ -18214,12 +21067,14 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_field_parent_ptr(ira, (IrInstructionFieldParentPtr *)instruction);
case IrInstructionIdOffsetOf:
return ir_analyze_instruction_offset_of(ira, (IrInstructionOffsetOf *)instruction);
+ case IrInstructionIdTypeInfo:
+ return ir_analyze_instruction_type_info(ira, (IrInstructionTypeInfo *) instruction);
case IrInstructionIdTypeId:
return ir_analyze_instruction_type_id(ira, (IrInstructionTypeId *)instruction);
case IrInstructionIdSetEvalBranchQuota:
return ir_analyze_instruction_set_eval_branch_quota(ira, (IrInstructionSetEvalBranchQuota *)instruction);
- case IrInstructionIdPtrTypeOf:
- return ir_analyze_instruction_ptr_type_of(ira, (IrInstructionPtrTypeOf *)instruction);
+ case IrInstructionIdPtrType:
+ return ir_analyze_instruction_ptr_type(ira, (IrInstructionPtrType *)instruction);
case IrInstructionIdAlignCast:
return ir_analyze_instruction_align_cast(ira, (IrInstructionAlignCast *)instruction);
case IrInstructionIdOpaqueType:
@@ -18266,6 +21121,8 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_coro_alloc_helper(ira, (IrInstructionCoroAllocHelper *)instruction);
case IrInstructionIdAtomicRmw:
return ir_analyze_instruction_atomic_rmw(ira, (IrInstructionAtomicRmw *)instruction);
+ case IrInstructionIdAtomicLoad:
+ return ir_analyze_instruction_atomic_load(ira, (IrInstructionAtomicLoad *)instruction);
case IrInstructionIdPromiseResultType:
return ir_analyze_instruction_promise_result_type(ira, (IrInstructionPromiseResultType *)instruction);
case IrInstructionIdAwaitBookkeeping:
@@ -18278,6 +21135,16 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
return ir_analyze_instruction_merge_err_ret_traces(ira, (IrInstructionMergeErrRetTraces *)instruction);
case IrInstructionIdMarkErrRetTracePtr:
return ir_analyze_instruction_mark_err_ret_trace_ptr(ira, (IrInstructionMarkErrRetTracePtr *)instruction);
+ case IrInstructionIdSqrt:
+ return ir_analyze_instruction_sqrt(ira, (IrInstructionSqrt *)instruction);
+ case IrInstructionIdIntToErr:
+ return ir_analyze_instruction_int_to_err(ira, (IrInstructionIntToErr *)instruction);
+ case IrInstructionIdErrToInt:
+ return ir_analyze_instruction_err_to_int(ira, (IrInstructionErrToInt *)instruction);
+ case IrInstructionIdIntToEnum:
+ return ir_analyze_instruction_int_to_enum(ira, (IrInstructionIntToEnum *)instruction);
+ case IrInstructionIdEnumToInt:
+ return ir_analyze_instruction_enum_to_int(ira, (IrInstructionEnumToInt *)instruction);
}
zig_unreachable();
}
@@ -18285,6 +21152,7 @@ static TypeTableEntry *ir_analyze_instruction_nocast(IrAnalyze *ira, IrInstructi
static TypeTableEntry *ir_analyze_instruction(IrAnalyze *ira, IrInstruction *instruction) {
TypeTableEntry *instruction_type = ir_analyze_instruction_nocast(ira, instruction);
instruction->value.type = instruction_type;
+
if (instruction->other) {
instruction->other->value.type = instruction_type;
} else {
@@ -18354,7 +21222,7 @@ TypeTableEntry *ir_analyze(CodeGen *codegen, IrExecutable *old_exec, IrExecutabl
} else if (ira->src_implicit_return_type_list.length == 0) {
return codegen->builtin_types.entry_unreachable;
} else {
- return ir_resolve_peer_types(ira, expected_type_source_node, ira->src_implicit_return_type_list.items,
+ return ir_resolve_peer_types(ira, expected_type_source_node, expected_type, ira->src_implicit_return_type_list.items,
ira->src_implicit_return_type_list.length);
}
}
@@ -18391,7 +21259,7 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdCheckStatementIsVoid:
case IrInstructionIdPanic:
case IrInstructionIdSetEvalBranchQuota:
- case IrInstructionIdPtrTypeOf:
+ case IrInstructionIdPtrType:
case IrInstructionIdSetAlignStack:
case IrInstructionIdExport:
case IrInstructionIdCancel:
@@ -18407,6 +21275,7 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdAddImplicitReturnType:
case IrInstructionIdMergeErrRetTraces:
case IrInstructionIdMarkErrRetTracePtr:
+ case IrInstructionIdAtomicRmw:
return true;
case IrInstructionIdPhi:
@@ -18433,9 +21302,10 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdSliceType:
case IrInstructionIdSizeOf:
case IrInstructionIdTestNonNull:
- case IrInstructionIdUnwrapMaybe:
+ case IrInstructionIdUnwrapOptional:
case IrInstructionIdClz:
case IrInstructionIdCtz:
+ case IrInstructionIdPopCount:
case IrInstructionIdSwitchVar:
case IrInstructionIdSwitchTarget:
case IrInstructionIdUnionTag:
@@ -18453,9 +21323,10 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdAlignOf:
case IrInstructionIdReturnAddress:
case IrInstructionIdFrameAddress:
+ case IrInstructionIdHandle:
case IrInstructionIdTestErr:
case IrInstructionIdUnwrapErrCode:
- case IrInstructionIdMaybeWrap:
+ case IrInstructionIdOptionalWrap:
case IrInstructionIdErrWrapCode:
case IrInstructionIdErrWrapPayload:
case IrInstructionIdFnProto:
@@ -18468,13 +21339,13 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdIntToEnum:
case IrInstructionIdIntToErr:
case IrInstructionIdErrToInt:
- case IrInstructionIdCanImplicitCast:
case IrInstructionIdDeclRef:
case IrInstructionIdErrName:
case IrInstructionIdTypeName:
case IrInstructionIdTagName:
case IrInstructionIdFieldParentPtr:
case IrInstructionIdOffsetOf:
+ case IrInstructionIdTypeInfo:
case IrInstructionIdTypeId:
case IrInstructionIdAlignCast:
case IrInstructionIdOpaqueType:
@@ -18487,9 +21358,19 @@ bool ir_has_side_effects(IrInstruction *instruction) {
case IrInstructionIdCoroSize:
case IrInstructionIdCoroSuspend:
case IrInstructionIdCoroFree:
- case IrInstructionIdAtomicRmw:
case IrInstructionIdCoroPromise:
case IrInstructionIdPromiseResultType:
+ case IrInstructionIdSqrt:
+ case IrInstructionIdAtomicLoad:
+ case IrInstructionIdIntCast:
+ case IrInstructionIdFloatCast:
+ case IrInstructionIdErrSetCast:
+ case IrInstructionIdIntToFloat:
+ case IrInstructionIdFloatToInt:
+ case IrInstructionIdBoolToInt:
+ case IrInstructionIdFromBytes:
+ case IrInstructionIdToBytes:
+ case IrInstructionIdEnumToInt:
return false;
case IrInstructionIdAsm: