diff options
| author | Andrew Kelley <andrew@ziglang.org> | 2020-08-21 14:50:37 -0700 |
|---|---|---|
| committer | Andrew Kelley <andrew@ziglang.org> | 2020-08-21 14:50:37 -0700 |
| commit | f9bd049c89e4d2b4d3f51a937ec2114c3cac9176 (patch) | |
| tree | 81bf283c8ae2e06fd3a48d18e3b4196e194392d0 /src-self-hosted/Module.zig | |
| parent | 3aeeb21b82915a3d03e75a88ded93049a5c1730c (diff) | |
| parent | dad7af0b37f352c8c390438877ef1552a316ffde (diff) | |
| download | zig-f9bd049c89e4d2b4d3f51a937ec2114c3cac9176.tar.gz zig-f9bd049c89e4d2b4d3f51a937ec2114c3cac9176.zip | |
Merge remote-tracking branch 'origin/master' into llvm11
Diffstat (limited to 'src-self-hosted/Module.zig')
| -rw-r--r-- | src-self-hosted/Module.zig | 426 |
1 files changed, 412 insertions, 14 deletions
diff --git a/src-self-hosted/Module.zig b/src-self-hosted/Module.zig index 6e33101e76..c0d1d0d654 100644 --- a/src-self-hosted/Module.zig +++ b/src-self-hosted/Module.zig @@ -170,6 +170,9 @@ pub const Decl = struct { /// This flag is set when this Decl is added to a check_for_deletion set, and cleared /// when removed. deletion_flag: bool, + /// Whether the corresponding AST decl has a `pub` keyword. + is_pub: bool, + /// An integer that can be checked against the corresponding incrementing /// generation field of Module. This is used to determine whether `complete` status /// represents pre- or post- re-analysis. @@ -320,6 +323,16 @@ pub const Fn = struct { } }; +pub const Var = struct { + /// if is_extern == true this is undefined + init: Value, + owner_decl: *Decl, + + is_extern: bool, + is_mutable: bool, + is_threadlocal: bool, +}; + pub const Scope = struct { tag: Tag, @@ -1235,6 +1248,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { }; defer fn_type_scope.instructions.deinit(self.gpa); + decl.is_pub = fn_proto.getTrailer("visib_token") != null; const body_node = fn_proto.getTrailer("body_node") orelse return self.failTok(&fn_type_scope.base, fn_proto.fn_token, "TODO implement extern functions", .{}); @@ -1419,8 +1433,213 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool { } return type_changed; }, - .VarDecl => @panic("TODO var decl"), - .Comptime => @panic("TODO comptime decl"), + .VarDecl => { + const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", ast_node); + + decl.analysis = .in_progress; + + // We need the memory for the Type to go into the arena for the Decl + var decl_arena = std.heap.ArenaAllocator.init(self.gpa); + errdefer decl_arena.deinit(); + const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State); + + var block_scope: Scope.Block = .{ + .parent = null, + .func = null, + .decl = decl, + .instructions = .{}, + .arena = &decl_arena.allocator, + }; + defer block_scope.instructions.deinit(self.gpa); + + decl.is_pub = var_decl.getTrailer("visib_token") != null; + const is_extern = blk: { + const maybe_extern_token = var_decl.getTrailer("extern_export_token") orelse + break :blk false; + if (tree.token_ids[maybe_extern_token] != .Keyword_extern) break :blk false; + if (var_decl.getTrailer("init_node")) |some| { + return self.failNode(&block_scope.base, some, "extern variables have no initializers", .{}); + } + break :blk true; + }; + if (var_decl.getTrailer("lib_name")) |lib_name| { + assert(is_extern); + return self.failNode(&block_scope.base, lib_name, "TODO implement function library name", .{}); + } + const is_mutable = tree.token_ids[var_decl.mut_token] == .Keyword_var; + const is_threadlocal = if (var_decl.getTrailer("thread_local_token")) |some| blk: { + if (!is_mutable) { + return self.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{}); + } + break :blk true; + } else false; + assert(var_decl.getTrailer("comptime_token") == null); + if (var_decl.getTrailer("align_node")) |align_expr| { + return self.failNode(&block_scope.base, align_expr, "TODO implement function align expression", .{}); + } + if (var_decl.getTrailer("section_node")) |sect_expr| { + return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{}); + } + + const explicit_type = blk: { + const type_node = var_decl.getTrailer("type_node") orelse + break :blk null; + + // Temporary arena for the zir instructions. + var type_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + defer type_scope_arena.deinit(); + var type_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &type_scope_arena.allocator, + .parent = decl.scope, + }; + defer type_scope.instructions.deinit(self.gpa); + + const src = tree.token_locs[type_node.firstToken()].start; + const type_type = try astgen.addZIRInstConst(self, &type_scope.base, src, .{ + .ty = Type.initTag(.type), + .val = Value.initTag(.type_type), + }); + const var_type = try astgen.expr(self, &type_scope.base, .{ .ty = type_type }, type_node); + _ = try astgen.addZIRUnOp(self, &type_scope.base, src, .@"return", var_type); + + break :blk try zir_sema.analyzeBodyValueAsType(self, &block_scope, .{ + .instructions = type_scope.instructions.items, + }); + }; + + var var_type: Type = undefined; + const value: ?Value = if (var_decl.getTrailer("init_node")) |init_node| blk: { + var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa); + defer gen_scope_arena.deinit(); + var gen_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &gen_scope_arena.allocator, + .parent = decl.scope, + }; + defer gen_scope.instructions.deinit(self.gpa); + const src = tree.token_locs[init_node.firstToken()].start; + + // TODO comptime scope here + const init_inst = try astgen.expr(self, &gen_scope.base, .none, init_node); + _ = try astgen.addZIRUnOp(self, &gen_scope.base, src, .@"return", init_inst); + + var inner_block: Scope.Block = .{ + .parent = null, + .func = null, + .decl = decl, + .instructions = .{}, + .arena = &gen_scope_arena.allocator, + }; + defer inner_block.instructions.deinit(self.gpa); + try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items }); + + for (inner_block.instructions.items) |inst| { + if (inst.castTag(.ret)) |ret| { + const coerced = if (explicit_type) |some| + try self.coerce(&inner_block.base, some, ret.operand) + else + ret.operand; + const val = coerced.value() orelse + return self.fail(&block_scope.base, inst.src, "unable to resolve comptime value", .{}); + + var_type = explicit_type orelse try ret.operand.ty.copy(block_scope.arena); + break :blk try val.copy(block_scope.arena); + } else { + return self.fail(&block_scope.base, inst.src, "unable to resolve comptime value", .{}); + } + } + unreachable; + } else if (!is_extern) { + return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{}); + } else if (explicit_type) |some| blk: { + var_type = some; + break :blk null; + } else { + return self.failTok(&block_scope.base, var_decl.firstToken(), "unable to infer variable type", .{}); + }; + + if (is_mutable and !var_type.isValidVarType(is_extern)) { + return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_type}); + } + + var type_changed = true; + if (decl.typedValueManaged()) |tvm| { + type_changed = !tvm.typed_value.ty.eql(var_type); + + tvm.deinit(self.gpa); + } + + const new_variable = try decl_arena.allocator.create(Var); + const var_payload = try decl_arena.allocator.create(Value.Payload.Variable); + new_variable.* = .{ + .owner_decl = decl, + .init = value orelse undefined, + .is_extern = is_extern, + .is_mutable = is_mutable, + .is_threadlocal = is_threadlocal, + }; + var_payload.* = .{ .variable = new_variable }; + + decl_arena_state.* = decl_arena.state; + decl.typed_value = .{ + .most_recent = .{ + .typed_value = .{ + .ty = var_type, + .val = Value.initPayload(&var_payload.base), + }, + .arena = decl_arena_state, + }, + }; + decl.analysis = .complete; + decl.generation = self.generation; + + if (var_decl.getTrailer("extern_export_token")) |maybe_export_token| { + if (tree.token_ids[maybe_export_token] == .Keyword_export) { + const export_src = tree.token_locs[maybe_export_token].start; + const name_loc = tree.token_locs[var_decl.name_token]; + const name = tree.tokenSliceLoc(name_loc); + // The scope needs to have the decl in it. + try self.analyzeExport(&block_scope.base, export_src, name, decl); + } + } + return type_changed; + }, + .Comptime => { + const comptime_decl = @fieldParentPtr(ast.Node.Comptime, "base", ast_node); + + decl.analysis = .in_progress; + + // A comptime decl does not store any value so we can just deinit this arena after analysis is done. + var analysis_arena = std.heap.ArenaAllocator.init(self.gpa); + defer analysis_arena.deinit(); + var gen_scope: Scope.GenZIR = .{ + .decl = decl, + .arena = &analysis_arena.allocator, + .parent = decl.scope, + }; + defer gen_scope.instructions.deinit(self.gpa); + + // TODO comptime scope here + _ = try astgen.expr(self, &gen_scope.base, .none, comptime_decl.expr); + + var block_scope: Scope.Block = .{ + .parent = null, + .func = null, + .decl = decl, + .instructions = .{}, + .arena = &analysis_arena.allocator, + }; + defer block_scope.instructions.deinit(self.gpa); + + _ = try zir_sema.analyzeBody(self, &block_scope.base, .{ + .instructions = gen_scope.instructions.items, + }); + + decl.analysis = .complete; + decl.generation = self.generation; + return true; + }, .Use => @panic("TODO usingnamespace decl"), else => unreachable, } @@ -1583,11 +1802,53 @@ fn analyzeRootSrcFile(self: *Module, root_scope: *Scope.File) !void { } } } + } else if (src_decl.castTag(.VarDecl)) |var_decl| { + const name_loc = tree.token_locs[var_decl.name_token]; + const name = tree.tokenSliceLoc(name_loc); + const name_hash = root_scope.fullyQualifiedNameHash(name); + const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); + if (self.decl_table.get(name_hash)) |decl| { + // Update the AST Node index of the decl, even if its contents are unchanged, it may + // have been re-ordered. + decl.src_index = decl_i; + if (deleted_decls.remove(decl) == null) { + decl.analysis = .sema_failure; + const err_msg = try ErrorMsg.create(self.gpa, name_loc.start, "redefinition of '{}'", .{decl.name}); + errdefer err_msg.destroy(self.gpa); + try self.failed_decls.putNoClobber(self.gpa, decl, err_msg); + } else if (!srcHashEql(decl.contents_hash, contents_hash)) { + try self.markOutdatedDecl(decl); + decl.contents_hash = contents_hash; + } + } else { + const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash); + root_scope.decls.appendAssumeCapacity(new_decl); + if (var_decl.getTrailer("extern_export_token")) |maybe_export_token| { + if (tree.token_ids[maybe_export_token] == .Keyword_export) { + self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); + } + } + } + } else if (src_decl.castTag(.Comptime)) |comptime_node| { + const name_index = self.getNextAnonNameIndex(); + const name = try std.fmt.allocPrint(self.gpa, "__comptime_{}", .{name_index}); + defer self.gpa.free(name); + + const name_hash = root_scope.fullyQualifiedNameHash(name); + const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl)); + + const new_decl = try self.createNewDecl(&root_scope.base, name, decl_i, name_hash, contents_hash); + root_scope.decls.appendAssumeCapacity(new_decl); + self.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl }); + } else if (src_decl.castTag(.ContainerField)) |container_field| { + log.err("TODO: analyze container field", .{}); + } else if (src_decl.castTag(.TestDecl)) |test_decl| { + log.err("TODO: analyze test decl", .{}); + } else if (src_decl.castTag(.Use)) |use_decl| { + log.err("TODO: analyze usingnamespace decl", .{}); } else { - std.debug.panic("TODO: analyzeRootSrcFile {}", .{src_decl.tag}); + unreachable; } - // TODO also look for global variable declarations - // TODO also look for comptime blocks and exported globals } // Handle explicitly deleted decls from the source code. Not to be confused // with when we delete decls because they are no longer referenced. @@ -1790,6 +2051,7 @@ fn allocateNewDecl( .wasm => .{ .wasm = null }, }, .generation = 0, + .is_pub = false, }; return new_decl; } @@ -2209,20 +2471,46 @@ pub fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) Inn }; const decl_tv = try decl.typedValue(); - const ty_payload = try scope.arena().create(Type.Payload.Pointer); - ty_payload.* = .{ - .base = .{ .tag = .single_const_pointer }, - .pointee_type = decl_tv.ty, - }; + if (decl_tv.val.tag() == .variable) { + return self.analyzeVarRef(scope, src, decl_tv); + } + const ty = try self.simplePtrType(scope, src, decl_tv.ty, false, .One); const val_payload = try scope.arena().create(Value.Payload.DeclRef); val_payload.* = .{ .decl = decl }; return self.constInst(scope, src, .{ - .ty = Type.initPayload(&ty_payload.base), + .ty = ty, .val = Value.initPayload(&val_payload.base), }); } +fn analyzeVarRef(self: *Module, scope: *Scope, src: usize, tv: TypedValue) InnerError!*Inst { + const variable = tv.val.cast(Value.Payload.Variable).?.variable; + + const ty = try self.simplePtrType(scope, src, tv.ty, variable.is_mutable, .One); + if (!variable.is_mutable and !variable.is_extern) { + const val_payload = try scope.arena().create(Value.Payload.RefVal); + val_payload.* = .{ .val = variable.init }; + return self.constInst(scope, src, .{ + .ty = ty, + .val = Value.initPayload(&val_payload.base), + }); + } + + const b = try self.requireRuntimeBlock(scope, src); + const inst = try b.arena.create(Inst.VarPtr); + inst.* = .{ + .base = .{ + .tag = .varptr, + .ty = ty, + .src = src, + }, + .variable = variable, + }; + try b.instructions.append(self.gpa, &inst.base); + return &inst.base; +} + pub fn analyzeDeref(self: *Module, scope: *Scope, src: usize, ptr: *Inst, ptr_src: usize) InnerError!*Inst { const elem_ty = switch (ptr.ty.zigTypeTag()) { .Pointer => ptr.ty.elemType(), @@ -2523,7 +2811,7 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst // T to ?T if (dest_type.zigTypeTag() == .Optional) { - var buf: Type.Payload.Pointer = undefined; + var buf: Type.Payload.PointerSimple = undefined; const child_type = dest_type.optionalChild(&buf); if (child_type.eql(inst.ty)) { return self.wrapOptional(scope, dest_type, inst); @@ -2902,15 +3190,125 @@ pub fn floatSub(self: *Module, scope: *Scope, float_type: Type, src: usize, lhs: return Value.initPayload(val_payload); } -pub fn singlePtrType(self: *Module, scope: *Scope, src: usize, mutable: bool, elem_ty: Type) error{OutOfMemory}!Type { +pub fn simplePtrType(self: *Module, scope: *Scope, src: usize, elem_ty: Type, mutable: bool, size: std.builtin.TypeInfo.Pointer.Size) Allocator.Error!Type { + if (!mutable and size == .Slice and elem_ty.eql(Type.initTag(.u8))) { + return Type.initTag(.const_slice_u8); + } + // TODO stage1 type inference bug + const T = Type.Tag; + + const type_payload = try scope.arena().create(Type.Payload.PointerSimple); + type_payload.* = .{ + .base = .{ + .tag = switch (size) { + .One => if (mutable) T.single_mut_pointer else T.single_const_pointer, + .Many => if (mutable) T.many_mut_pointer else T.many_const_pointer, + .C => if (mutable) T.c_mut_pointer else T.c_const_pointer, + .Slice => if (mutable) T.mut_slice else T.const_slice, + }, + }, + .pointee_type = elem_ty, + }; + return Type.initPayload(&type_payload.base); +} + +pub fn ptrType( + self: *Module, + scope: *Scope, + src: usize, + elem_ty: Type, + sentinel: ?Value, + @"align": u32, + bit_offset: u16, + host_size: u16, + mutable: bool, + @"allowzero": bool, + @"volatile": bool, + size: std.builtin.TypeInfo.Pointer.Size, +) Allocator.Error!Type { + assert(host_size == 0 or bit_offset < host_size * 8); + + // TODO check if type can be represented by simplePtrType const type_payload = try scope.arena().create(Type.Payload.Pointer); type_payload.* = .{ - .base = .{ .tag = if (mutable) .single_mut_pointer else .single_const_pointer }, .pointee_type = elem_ty, + .sentinel = sentinel, + .@"align" = @"align", + .bit_offset = bit_offset, + .host_size = host_size, + .@"allowzero" = @"allowzero", + .mutable = mutable, + .@"volatile" = @"volatile", + .size = size, }; return Type.initPayload(&type_payload.base); } +pub fn optionalType(self: *Module, scope: *Scope, child_type: Type) Allocator.Error!Type { + return Type.initPayload(switch (child_type.tag()) { + .single_const_pointer => blk: { + const payload = try scope.arena().create(Type.Payload.PointerSimple); + payload.* = .{ + .base = .{ .tag = .optional_single_const_pointer }, + .pointee_type = child_type.elemType(), + }; + break :blk &payload.base; + }, + .single_mut_pointer => blk: { + const payload = try scope.arena().create(Type.Payload.PointerSimple); + payload.* = .{ + .base = .{ .tag = .optional_single_mut_pointer }, + .pointee_type = child_type.elemType(), + }; + break :blk &payload.base; + }, + else => blk: { + const payload = try scope.arena().create(Type.Payload.Optional); + payload.* = .{ + .child_type = child_type, + }; + break :blk &payload.base; + }, + }); +} + +pub fn arrayType(self: *Module, scope: *Scope, len: u64, sentinel: ?Value, elem_type: Type) Allocator.Error!Type { + if (elem_type.eql(Type.initTag(.u8))) { + if (sentinel) |some| { + if (some.eql(Value.initTag(.zero))) { + const payload = try scope.arena().create(Type.Payload.Array_u8_Sentinel0); + payload.* = .{ + .len = len, + }; + return Type.initPayload(&payload.base); + } + } else { + const payload = try scope.arena().create(Type.Payload.Array_u8); + payload.* = .{ + .len = len, + }; + return Type.initPayload(&payload.base); + } + } + + if (sentinel) |some| { + const payload = try scope.arena().create(Type.Payload.ArraySentinel); + payload.* = .{ + .len = len, + .sentinel = some, + .elem_type = elem_type, + }; + return Type.initPayload(&payload.base); + } + + const payload = try scope.arena().create(Type.Payload.Array); + payload.* = .{ + .len = len, + .elem_type = elem_type, + }; + return Type.initPayload(&payload.base); +} + pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void { const zir_module = scope.namespace(); const source = zir_module.getSource(self) catch @panic("dumpInst failed to get source"); |
