diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2022-02-03 20:23:46 -0500 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2022-02-03 20:23:46 -0500 |
| commit | 71e0cca7a7957e2f024d2985318e478aa6fb1451 (patch) | |
| tree | 8a85869adb92126d3fbaca52d4b0c0607ef3d7de /src | |
| parent | 4ca9a8d192f4c800f10cdb3bd39c94922b6fb9b8 (diff) | |
| parent | 588b88b98753f02061e562a9c15c2396bcd95dee (diff) | |
| download | zig-71e0cca7a7957e2f024d2985318e478aa6fb1451.tar.gz zig-71e0cca7a7957e2f024d2985318e478aa6fb1451.zip | |
Merge pull request #10780 from Luukdegram/wasm-behavior-tests
stage2: Wasm - Account for stack alignment
Diffstat (limited to 'src')
| -rw-r--r-- | src/arch/wasm/CodeGen.zig | 349 | ||||
| -rw-r--r-- | src/arch/wasm/Emit.zig | 10 | ||||
| -rw-r--r-- | src/arch/wasm/Mir.zig | 7 | ||||
| -rw-r--r-- | src/link/Wasm.zig | 12 |
4 files changed, 287 insertions, 91 deletions
diff --git a/src/arch/wasm/CodeGen.zig b/src/arch/wasm/CodeGen.zig index 7d2046b90b..67aa9a6c88 100644 --- a/src/arch/wasm/CodeGen.zig +++ b/src/arch/wasm/CodeGen.zig @@ -39,6 +39,14 @@ const WValue = union(enum) { /// Note: The value contains the symbol index, rather than the actual address /// as we use this to perform the relocation. memory: u32, + /// A value that represents a parent pointer and an offset + /// from that pointer. i.e. when slicing with constant values. + memory_offset: struct { + /// The symbol of the parent pointer + pointer: u32, + /// Offset will be set as addend when relocating + offset: u32, + }, /// Represents a function pointer /// In wasm function pointers are indexes into a function table, /// rather than an address in the data section. @@ -552,6 +560,9 @@ mir_extra: std.ArrayListUnmanaged(u32) = .{}, /// When a function is executing, we store the the current stack pointer's value within this local. /// This value is then used to restore the stack pointer to the original value at the return of the function. initial_stack_value: WValue = .none, +/// The current stack pointer substracted with the stack size. From this value, we will calculate +/// all offsets of the stack values. +bottom_stack_value: WValue = .none, /// Arguments of this function declaration /// This will be set after `resolveCallingConventionValues` args: []WValue = &.{}, @@ -559,6 +570,14 @@ args: []WValue = &.{}, /// When it returns a pointer to the stack, the `.local` tag will be active and must be populated /// before this function returns its execution to the caller. return_value: WValue = .none, +/// The size of the stack this function occupies. In the function prologue +/// we will move the stack pointer by this number, forward aligned with the `stack_alignment`. +stack_size: u32 = 0, +/// The stack alignment, which is 16 bytes by default. This is specified by the +/// tool-conventions: https://github.com/WebAssembly/tool-conventions/blob/main/BasicCABI.md +/// and also what the llvm backend will emit. +/// However, local variables or the usage of `@setAlignStack` can overwrite this default. +stack_alignment: u32 = 16, const InnerError = error{ OutOfMemory, @@ -598,7 +617,10 @@ fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!WValue { // means we must generate it from a constant. const val = self.air.value(ref).?; const ty = self.air.typeOf(ref); - if (!ty.hasRuntimeBits() and !ty.isInt()) return WValue{ .none = {} }; + if (!ty.hasRuntimeBits() and !ty.isInt()) { + gop.value_ptr.* = WValue{ .none = {} }; + return gop.value_ptr.*; + } // When we need to pass the value by reference (such as a struct), we will // leverage `genTypedValue` to lower the constant to bytes and emit it @@ -643,13 +665,6 @@ fn addInst(self: *Self, inst: Mir.Inst) error{OutOfMemory}!void { try self.mir_instructions.append(self.gpa, inst); } -/// Inserts a Mir instruction at the given `offset`. -/// Asserts offset is within bound. -fn addInstAt(self: *Self, offset: usize, inst: Mir.Inst) error{OutOfMemory}!void { - try self.mir_instructions.ensureUnusedCapacity(self.gpa, 1); - self.mir_instructions.insertAssumeCapacity(offset, inst); -} - fn addTag(self: *Self, tag: Mir.Inst.Tag) error{OutOfMemory}!void { try self.addInst(.{ .tag = tag, .data = .{ .tag = {} } }); } @@ -754,7 +769,14 @@ fn emitWValue(self: *Self, value: WValue) InnerError!void { .imm64 => |val| try self.addImm64(val), .float32 => |val| try self.addInst(.{ .tag = .f32_const, .data = .{ .float32 = val } }), .float64 => |val| try self.addFloat64(val), - .memory => |ptr| try self.addLabel(.memory_address, ptr), // write sybol address and generate relocation + .memory => |ptr| { + const extra_index = try self.addExtra(Mir.Memory{ .pointer = ptr, .offset = 0 }); + try self.addInst(.{ .tag = .memory_address, .data = .{ .payload = extra_index } }); + }, + .memory_offset => |mem_off| { + const extra_index = try self.addExtra(Mir.Memory{ .pointer = mem_off.pointer, .offset = mem_off.offset }); + try self.addInst(.{ .tag = .memory_address, .data = .{ .payload = extra_index } }); + }, .function_index => |index| try self.addLabel(.function_index, index), // write function index and generate relocation } } @@ -827,10 +849,43 @@ pub fn genFunc(self: *Self) InnerError!void { try self.addTag(.@"unreachable"); } } - // End of function body try self.addTag(.end); + // check if we have to initialize and allocate anything into the stack frame. + // If so, create enough stack space and insert the instructions at the front of the list. + if (self.stack_size > 0) { + var prologue = std.ArrayList(Mir.Inst).init(self.gpa); + defer prologue.deinit(); + + // load stack pointer + try prologue.append(.{ .tag = .global_get, .data = .{ .label = 0 } }); + // store stack pointer so we can restore it when we return from the function + try prologue.append(.{ .tag = .local_tee, .data = .{ .label = self.initial_stack_value.local } }); + // get the total stack size + const aligned_stack = std.mem.alignForwardGeneric(u32, self.stack_size, self.stack_alignment); + try prologue.append(.{ .tag = .i32_const, .data = .{ .imm32 = @intCast(i32, aligned_stack) } }); + // substract it from the current stack pointer + try prologue.append(.{ .tag = .i32_sub, .data = .{ .tag = {} } }); + // Get negative stack aligment + try prologue.append(.{ .tag = .i32_const, .data = .{ .imm32 = @intCast(i32, self.stack_alignment) * -1 } }); + // Bit and the value to get the new stack pointer to ensure the pointers are aligned with the abi alignment + try prologue.append(.{ .tag = .i32_and, .data = .{ .tag = {} } }); + // store the current stack pointer as the bottom, which will be used to calculate all stack pointer offsets + try prologue.append(.{ .tag = .local_tee, .data = .{ .label = self.bottom_stack_value.local } }); + // Store the current stack pointer value into the global stack pointer so other function calls will + // start from this value instead and not overwrite the current stack. + try prologue.append(.{ .tag = .global_set, .data = .{ .label = 0 } }); + + // reserve space and insert all prologue instructions at the front of the instruction list + // We insert them in reserve order as there is no insertSlice in multiArrayList. + try self.mir_instructions.ensureUnusedCapacity(self.gpa, prologue.items.len); + for (prologue.items) |_, index| { + const inst = prologue.items[prologue.items.len - 1 - index]; + self.mir_instructions.insertAssumeCapacity(0, inst); + } + } + var mir: Mir = .{ .instructions = self.mir_instructions.toOwnedSlice(), .extra = self.mir_extra.toOwnedSlice(self.gpa), @@ -927,7 +982,7 @@ pub const DeclGen = struct { .function => val.castTag(.function).?.data.owner_decl, else => unreachable, }; - return try self.lowerDeclRef(ty, val, fn_decl); + return try self.lowerDeclRefValue(ty, val, fn_decl, 0); }, .Optional => { var opt_buf: Type.Payload.ElemType = undefined; @@ -1115,11 +1170,11 @@ pub const DeclGen = struct { .Pointer => switch (val.tag()) { .variable => { const decl = val.castTag(.variable).?.data.owner_decl; - return self.lowerDeclRef(ty, val, decl); + return self.lowerDeclRefValue(ty, val, decl, 0); }, .decl_ref => { const decl = val.castTag(.decl_ref).?.data; - return self.lowerDeclRef(ty, val, decl); + return self.lowerDeclRefValue(ty, val, decl, 0); }, .slice => { const slice = val.castTag(.slice).?.data; @@ -1139,6 +1194,13 @@ pub const DeclGen = struct { try writer.writeByteNTimes(0, @divExact(self.target().cpu.arch.ptrBitWidth(), 8)); return Result{ .appended = {} }; }, + .elem_ptr => { + const elem_ptr = val.castTag(.elem_ptr).?.data; + const elem_size = ty.childType().abiSize(self.target()); + const offset = elem_ptr.index * elem_size; + return self.lowerParentPtr(elem_ptr.array_ptr, @intCast(usize, offset)); + }, + .int_u64 => return self.genTypedValue(Type.usize, val), else => return self.fail("TODO: Implement zig decl gen for pointer type value: '{s}'", .{@tagName(val.tag())}), }, .ErrorUnion => { @@ -1179,7 +1241,36 @@ pub const DeclGen = struct { } } - fn lowerDeclRef(self: *DeclGen, ty: Type, val: Value, decl: *Module.Decl) InnerError!Result { + fn lowerParentPtr(self: *DeclGen, ptr_value: Value, offset: usize) InnerError!Result { + switch (ptr_value.tag()) { + .decl_ref => { + const decl = ptr_value.castTag(.decl_ref).?.data; + return self.lowerParentPtrDecl(ptr_value, decl, offset); + }, + else => |tag| return self.fail("TODO: Implement lowerParentPtr for pointer value tag: {s}", .{tag}), + } + } + + fn lowerParentPtrDecl(self: *DeclGen, ptr_val: Value, decl: *Module.Decl, offset: usize) InnerError!Result { + decl.markAlive(); + var ptr_ty_payload: Type.Payload.ElemType = .{ + .base = .{ .tag = .single_mut_pointer }, + .data = decl.ty, + }; + const ptr_ty = Type.initPayload(&ptr_ty_payload.base); + return self.lowerDeclRefValue(ptr_ty, ptr_val, decl, offset); + } + + fn lowerDeclRefValue( + self: *DeclGen, + ty: Type, + val: Value, + /// The target decl that is being pointed to + decl: *Module.Decl, + /// When lowering to an indexed pointer, we can specify the offset + /// which will then be used as 'addend' to the relocation. + offset: usize, + ) InnerError!Result { const writer = self.code.writer(); if (ty.isSlice()) { var buf: Type.SlicePtrFieldTypeBuffer = undefined; @@ -1202,6 +1293,7 @@ pub const DeclGen = struct { self.symbol_index, // source symbol index decl.link.wasm.sym_index, // target symbol index @intCast(u32, self.code.items.len), // offset + @intCast(u32, offset), // addend )); return Result{ .appended = {} }; } @@ -1254,22 +1346,16 @@ fn resolveCallingConventionValues(self: *Self, fn_ty: Type) InnerError!CallWValu return result; } -/// Retrieves the stack pointer's value from the global variable and stores -/// it in a local +/// Creates a local for the initial stack value /// Asserts `initial_stack_value` is `.none` fn initializeStack(self: *Self) !void { assert(self.initial_stack_value == .none); - // reserve space for immediate value - // get stack pointer global - try self.addLabel(.global_get, 0); - // Reserve a local to store the current stack pointer // We can later use this local to set the stack pointer back to the value // we have stored here. - self.initial_stack_value = try self.allocLocal(Type.initTag(.i32)); - - // save the value to the local - try self.addLabel(.local_set, self.initial_stack_value.local); + self.initial_stack_value = try self.allocLocal(Type.usize); + // Also reserve a local to store the bottom stack value + self.bottom_stack_value = try self.allocLocal(Type.usize); } /// Reads the stack pointer from `Context.initial_stack_value` and writes it @@ -1284,36 +1370,75 @@ fn restoreStackPointer(self: *Self) !void { try self.addLabel(.global_set, 0); } -/// Moves the stack pointer by given `offset` -/// It does this by retrieving the stack pointer, subtracting `offset` and storing -/// the result back into the stack pointer. -fn moveStack(self: *Self, offset: u32, local: u32) !void { - if (offset == 0) return; - try self.addLabel(.global_get, 0); - try self.addImm32(@bitCast(i32, offset)); - try self.addTag(.i32_sub); - try self.addLabel(.local_tee, local); - try self.addLabel(.global_set, 0); +/// Saves the current stack size's stack pointer position into a given local +/// It does this by retrieving the bottom stack pointer, adding `self.stack_size` and storing +/// the result back into the local. +fn saveStack(self: *Self) !WValue { + const local = try self.allocLocal(Type.usize); + try self.addLabel(.local_get, self.bottom_stack_value.local); + try self.addImm32(@intCast(i32, self.stack_size)); + try self.addTag(.i32_add); + try self.addLabel(.local_set, local.local); + return local; } /// From a given type, will create space on the virtual stack to store the value of such type. /// This returns a `WValue` with its active tag set to `local`, containing the index to the local /// that points to the position on the virtual stack. This function should be used instead of -/// moveStack unless a local was already created to store the point. +/// moveStack unless a local was already created to store the pointer. /// /// Asserts Type has codegenbits fn allocStack(self: *Self, ty: Type) !WValue { assert(ty.hasRuntimeBits()); + if (self.initial_stack_value == .none) { + try self.initializeStack(); + } - // calculate needed stack space const abi_size = std.math.cast(u32, ty.abiSize(self.target)) catch { - return self.fail("Given type '{}' too big to fit into stack frame", .{ty}); + return self.fail("Type {} with ABI size of {d} exceeds stack frame size", .{ ty, ty.abiSize(self.target) }); }; + const abi_align = ty.abiAlignment(self.target); - // allocate a local using wasm's pointer size - const local = try self.allocLocal(Type.@"usize"); - try self.moveStack(abi_size, local.local); - return local; + if (abi_align > self.stack_alignment) { + self.stack_alignment = abi_align; + } + + const offset = std.mem.alignForwardGeneric(u32, self.stack_size, abi_align); + defer self.stack_size = offset + abi_size; + + // store the stack pointer and return a local to it + return self.saveStack(); +} + +/// From a given AIR instruction generates a pointer to the stack where +/// the value of its type will live. +/// This is different from allocStack where this will use the pointer's alignment +/// if it is set, to ensure the stack alignment will be set correctly. +fn allocStackPtr(self: *Self, inst: Air.Inst.Index) !WValue { + const ptr_ty = self.air.typeOfIndex(inst); + const pointee_ty = ptr_ty.childType(); + + if (self.initial_stack_value == .none) { + try self.initializeStack(); + } + + if (!pointee_ty.hasRuntimeBits()) { + return self.allocStack(Type.usize); // create a value containing just the stack pointer. + } + + const abi_alignment = ptr_ty.ptrAlignment(self.target); + const abi_size = std.math.cast(u32, pointee_ty.abiSize(self.target)) catch { + return self.fail("Type {} with ABI size of {d} exceeds stack frame size", .{ pointee_ty, pointee_ty.abiSize(self.target) }); + }; + if (abi_alignment > self.stack_alignment) { + self.stack_alignment = abi_alignment; + } + + const offset = std.mem.alignForwardGeneric(u32, self.stack_size, abi_alignment); + defer self.stack_size = offset + abi_size; + + // store the stack pointer and return a local to it + return self.saveStack(); } /// From given zig bitsize, returns the wasm bitsize @@ -1592,6 +1717,7 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void { fn airRet(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const un_op = self.air.instructions.items(.data)[inst].un_op; const operand = try self.resolveInst(un_op); + // result must be stored in the stack and we return a pointer // to the stack instead if (self.return_value != .none) { @@ -1601,7 +1727,7 @@ fn airRet(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } try self.restoreStackPointer(); try self.addTag(.@"return"); - return .none; + return WValue{ .none = {} }; } fn airRetPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1611,12 +1737,7 @@ fn airRetPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { if (isByRef(child_type, self.target)) { return self.return_value; } - - // Initialize the stack - if (self.initial_stack_value == .none) { - try self.initializeStack(); - } - return self.allocStack(child_type); + return self.allocStackPtr(inst); } fn airRetLoad(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1708,20 +1829,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } fn airAlloc(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - const pointee_type = self.air.typeOfIndex(inst).childType(); - - // Initialize the stack - if (self.initial_stack_value == .none) { - try self.initializeStack(); - } - - if (!pointee_type.hasRuntimeBits()) { - // when the pointee is zero-sized, we still want to create a pointer. - // but instead use a default pointer type as storage. - const zero_ptr = try self.allocStack(Type.usize); - return zero_ptr; - } - return self.allocStack(pointee_type); + return self.allocStackPtr(inst); } fn airStore(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -1741,11 +1849,10 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro const err_ty = ty.errorUnionSet(); const pl_ty = ty.errorUnionPayload(); if (!pl_ty.hasRuntimeBits()) { - const err_val = try self.load(rhs, err_ty, 0); - return self.store(lhs, err_val, err_ty, 0); + return self.store(lhs, rhs, err_ty, 0); } - return try self.memCopy(ty, lhs, rhs); + return self.memCopy(ty, lhs, rhs); }, .Optional => { if (ty.isPtrLikeOptional()) { @@ -1760,7 +1867,7 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro return self.memCopy(ty, lhs, rhs); }, .Struct, .Array, .Union => { - return try self.memCopy(ty, lhs, rhs); + return self.memCopy(ty, lhs, rhs); }, .Pointer => { if (ty.isSlice()) { @@ -1775,7 +1882,7 @@ fn store(self: *Self, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerErro } }, .Int => if (ty.intInfo(self.target).bits > 64) { - return try self.memCopy(ty, lhs, rhs); + return self.memCopy(ty, lhs, rhs); }, else => {}, } @@ -1974,6 +2081,17 @@ fn lowerConstant(self: *Self, val: Value, ty: Type) InnerError!WValue { return WValue{ .function_index = target_sym_index }; } else return WValue{ .memory = target_sym_index }; }, + .elem_ptr => { + const elem_ptr = val.castTag(.elem_ptr).?.data; + const index = elem_ptr.index; + const offset = index * ty.childType().abiSize(self.target); + const array_ptr = try self.lowerConstant(elem_ptr.array_ptr, ty); + + return WValue{ .memory_offset = .{ + .pointer = array_ptr.memory, + .offset = @intCast(u32, offset), + } }; + }, .int_u64, .one => return WValue{ .imm32 = @intCast(u32, val.toUnsignedInt()) }, .zero, .null_value => return WValue{ .imm32 = 0 }, else => return self.fail("Wasm TODO: lowerConstant for other const pointer tag {s}", .{val.tag()}), @@ -2524,11 +2642,11 @@ fn airUnwrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue if (isByRef(payload_ty, self.target)) { return self.buildPointerOffset(operand, offset, .new); } - return try self.load(operand, payload_ty, offset); + return self.load(operand, payload_ty, offset); } fn airUnwrapErrUnionError(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - if (self.liveness.isUnused(inst)) return WValue.none; + if (self.liveness.isUnused(inst)) return WValue{ .none = {} }; const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); @@ -2538,11 +2656,12 @@ fn airUnwrapErrUnionError(self: *Self, inst: Air.Inst.Index) InnerError!WValue { return operand; } - return try self.load(operand, err_ty.errorUnionSet(), 0); + return self.load(operand, err_ty.errorUnionSet(), 0); } fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - if (self.liveness.isUnused(inst)) return WValue.none; + if (self.liveness.isUnused(inst)) return WValue{ .none = {} }; + const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); @@ -2564,11 +2683,14 @@ fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } fn airWrapErrUnionErr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - if (self.liveness.isUnused(inst)) return WValue.none; + if (self.liveness.isUnused(inst)) return WValue{ .none = {} }; + const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); const err_ty = self.air.getRefType(ty_op.ty); + if (!err_ty.errorUnionPayload().hasRuntimeBits()) return operand; + const err_union = try self.allocStack(err_ty); // TODO: Also write 'undefined' to the payload try self.store(err_union, operand, err_ty.errorUnionSet(), 0); @@ -2750,16 +2872,16 @@ fn airSlice(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } fn airSliceLen(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - if (self.liveness.isUnused(inst)) return WValue.none; + if (self.liveness.isUnused(inst)) return WValue{ .none = {} }; const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); - return try self.load(operand, Type.usize, self.ptrSize()); + return self.load(operand, Type.usize, self.ptrSize()); } fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - if (self.liveness.isUnused(inst)) return WValue.none; + if (self.liveness.isUnused(inst)) return WValue{ .none = {} }; const bin_op = self.air.instructions.items(.data)[inst].bin_op; const slice_ty = self.air.typeOf(bin_op.lhs); @@ -2784,7 +2906,7 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue { if (isByRef(elem_ty, self.target)) { return result; } - return try self.load(result, elem_ty, 0); + return self.load(result, elem_ty, 0); } fn airSliceElemPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -2812,10 +2934,10 @@ fn airSliceElemPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { } fn airSlicePtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { - if (self.liveness.isUnused(inst)) return WValue.none; + if (self.liveness.isUnused(inst)) return WValue{ .none = {} }; const ty_op = self.air.instructions.items(.data)[inst].ty_op; const operand = try self.resolveInst(ty_op.operand); - return try self.load(operand, Type.usize, 0); + return self.load(operand, Type.usize, 0); } fn airTrunc(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -2880,7 +3002,7 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) InnerError!WValue { fn airBoolToInt(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const un_op = self.air.instructions.items(.data)[inst].un_op; - return try self.resolveInst(un_op); + return self.resolveInst(un_op); } fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -2912,7 +3034,7 @@ fn airArrayToSlice(self: *Self, inst: Air.Inst.Index) InnerError!WValue { fn airPtrToInt(self: *Self, inst: Air.Inst.Index) InnerError!WValue { if (self.liveness.isUnused(inst)) return WValue{ .none = {} }; const un_op = self.air.instructions.items(.data)[inst].un_op; - return try self.resolveInst(un_op); + return self.resolveInst(un_op); } fn airPtrElemVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -2927,7 +3049,7 @@ fn airPtrElemVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue { // load pointer onto the stack if (ptr_ty.isSlice()) { - const ptr_local = try self.load(pointer, ptr_ty, 0); + const ptr_local = try self.load(pointer, Type.usize, 0); try self.addLabel(.local_get, ptr_local.local); } else { try self.emitWValue(pointer); @@ -2944,7 +3066,7 @@ fn airPtrElemVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue { if (isByRef(elem_ty, self.target)) { return result; } - return try self.load(result, elem_ty, 0); + return self.load(result, elem_ty, 0); } fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -2960,7 +3082,7 @@ fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) InnerError!WValue { // load pointer onto the stack if (ptr_ty.isSlice()) { - const ptr_local = try self.load(ptr, ptr_ty, 0); + const ptr_local = try self.load(ptr, Type.usize, 0); try self.addLabel(.local_get, ptr_local.local); } else { try self.emitWValue(ptr); @@ -3094,7 +3216,7 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) InnerError!WValue { if (isByRef(elem_ty, self.target)) { return result; } - return try self.load(result, elem_ty, 0); + return self.load(result, elem_ty, 0); } fn airFloatToInt(self: *Self, inst: Air.Inst.Index) InnerError!WValue { @@ -3138,8 +3260,63 @@ fn airVectorInit(self: *Self, inst: Air.Inst.Index) InnerError!WValue { const ty_pl = self.air.instructions.items(.data)[inst].ty_pl; const elements = @bitCast([]const Air.Inst.Ref, self.air.extra[ty_pl.payload..][0..len]); - _ = elements; - return self.fail("TODO: Wasm backend: implement airVectorInit", .{}); + switch (vector_ty.zigTypeTag()) { + .Vector => return self.fail("TODO: Wasm backend: implement airVectorInit for vectors", .{}), + .Array => { + const result = try self.allocStack(vector_ty); + const elem_ty = vector_ty.childType(); + const elem_size = @intCast(u32, elem_ty.abiSize(self.target)); + + // When the element type is by reference, we must copy the entire + // value. It is therefore safer to move the offset pointer and store + // each value individually, instead of using store offsets. + if (isByRef(elem_ty, self.target)) { + // copy stack pointer into a temporary local, which is + // moved for each element to store each value in the right position. + const offset = try self.allocLocal(Type.usize); + try self.emitWValue(result); + try self.addLabel(.local_set, offset.local); + for (elements) |elem, elem_index| { + const elem_val = try self.resolveInst(elem); + try self.store(offset, elem_val, elem_ty, 0); + + if (elem_index < elements.len - 1) { + _ = try self.buildPointerOffset(offset, elem_size, .modify); + } + } + } else { + var offset: u32 = 0; + for (elements) |elem| { + const elem_val = try self.resolveInst(elem); + try self.store(result, elem_val, elem_ty, offset); + offset += elem_size; + } + } + return result; + }, + .Struct => { + const tuple = vector_ty.castTag(.tuple).?.data; + const result = try self.allocStack(vector_ty); + const offset = try self.allocLocal(Type.usize); // pointer to offset + try self.emitWValue(result); + try self.addLabel(.local_set, offset.local); + for (elements) |elem, elem_index| { + if (tuple.values[elem_index].tag() != .unreachable_value) continue; + + const elem_ty = tuple.types[elem_index]; + const elem_size = @intCast(u32, elem_ty.abiSize(self.target)); + const value = try self.resolveInst(elem); + try self.store(offset, value, elem_ty, 0); + + if (elem_index < elements.len - 1) { + _ = try self.buildPointerOffset(offset, elem_size, .modify); + } + } + + return result; + }, + else => unreachable, + } } fn airPrefetch(self: *Self, inst: Air.Inst.Index) InnerError!WValue { diff --git a/src/arch/wasm/Emit.zig b/src/arch/wasm/Emit.zig index 9283a0e0b7..8cae78caf1 100644 --- a/src/arch/wasm/Emit.zig +++ b/src/arch/wasm/Emit.zig @@ -326,25 +326,27 @@ fn emitFunctionIndex(emit: *Emit, inst: Mir.Inst.Index) !void { } fn emitMemAddress(emit: *Emit, inst: Mir.Inst.Index) !void { - const symbol_index = emit.mir.instructions.items(.data)[inst].label; + const extra_index = emit.mir.instructions.items(.data)[inst].payload; + const mem = emit.mir.extraData(Mir.Memory, extra_index).data; const mem_offset = emit.offset() + 1; const is_wasm32 = emit.bin_file.options.target.cpu.arch == .wasm32; if (is_wasm32) { try emit.code.append(std.wasm.opcode(.i32_const)); var buf: [5]u8 = undefined; - leb128.writeUnsignedFixed(5, &buf, symbol_index); + leb128.writeUnsignedFixed(5, &buf, mem.pointer); try emit.code.appendSlice(&buf); } else { try emit.code.append(std.wasm.opcode(.i64_const)); var buf: [10]u8 = undefined; - leb128.writeUnsignedFixed(10, &buf, symbol_index); + leb128.writeUnsignedFixed(10, &buf, mem.pointer); try emit.code.appendSlice(&buf); } try emit.decl.link.wasm.relocs.append(emit.bin_file.allocator, .{ .offset = mem_offset, - .index = symbol_index, + .index = mem.pointer, .relocation_type = if (is_wasm32) .R_WASM_MEMORY_ADDR_LEB else .R_WASM_MEMORY_ADDR_LEB64, + .addend = mem.offset, }); } diff --git a/src/arch/wasm/Mir.zig b/src/arch/wasm/Mir.zig index 07696f0dd3..ed0867e583 100644 --- a/src/arch/wasm/Mir.zig +++ b/src/arch/wasm/Mir.zig @@ -546,3 +546,10 @@ pub const MemArg = struct { offset: u32, alignment: u32, }; + +/// Represents a memory address, which holds both the pointer +/// or the parent pointer and the offset to it. +pub const Memory = struct { + pointer: u32, + offset: u32, +}; diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig index a5d4630378..b047e4b68a 100644 --- a/src/link/Wasm.zig +++ b/src/link/Wasm.zig @@ -345,10 +345,19 @@ pub fn updateLocalSymbolCode(self: *Wasm, decl: *Module.Decl, symbol_index: u32, /// For a given decl, find the given symbol index's atom, and create a relocation for the type. /// Returns the given pointer address -pub fn getDeclVAddr(self: *Wasm, decl: *Module.Decl, ty: Type, symbol_index: u32, target_symbol_index: u32, offset: u32) !u32 { +pub fn getDeclVAddr( + self: *Wasm, + decl: *Module.Decl, + ty: Type, + symbol_index: u32, + target_symbol_index: u32, + offset: u32, + addend: u32, +) !u32 { const atom = decl.link.wasm.symbolAtom(symbol_index); const is_wasm32 = self.base.options.target.cpu.arch == .wasm32; if (ty.zigTypeTag() == .Fn) { + std.debug.assert(addend == 0); // addend not allowed for function relocations // We found a function pointer, so add it to our table, // as function pointers are not allowed to be stored inside the data section. // They are instead stored in a function table which are called by index. @@ -363,6 +372,7 @@ pub fn getDeclVAddr(self: *Wasm, decl: *Module.Decl, ty: Type, symbol_index: u32 .index = target_symbol_index, .offset = offset, .relocation_type = if (is_wasm32) .R_WASM_MEMORY_ADDR_I32 else .R_WASM_MEMORY_ADDR_I64, + .addend = addend, }); } // we do not know the final address at this point, |
