Skip to content

Commit

Permalink
improve tuple literal code analysis
Browse files Browse the repository at this point in the history
- resolve destructured variable declarations according to the initialization expression
- semantic tokens will highlight destructured variable declarations
- don't treat tuple literals as array literals (see #2065)
  • Loading branch information
Techatrix committed Nov 7, 2024
1 parent 6144e77 commit 8e1fc42
Show file tree
Hide file tree
Showing 5 changed files with 85 additions and 55 deletions.
30 changes: 14 additions & 16 deletions src/analysis.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1108,9 +1108,18 @@ fn resolveBracketAccessType(analyser: *Analyser, lhs: Type, rhs: BracketAccessKi
}
}

fn resolveTupleFieldType(analyser: *Analyser, tuple: Type, index: usize) error{OutOfMemory}!?Type {
pub fn resolveTupleFieldType(analyser: *Analyser, tuple: Type, index: usize) error{OutOfMemory}!?Type {
const scope_handle = switch (tuple.data) {
.container => |s| s,
.other => |node| {
var buffer: [2]Ast.Node.Index = undefined;
const array_init_info = node.handle.tree.fullArrayInit(&buffer, node.node) orelse return null;

const elements = array_init_info.ast.elements;
if (index >= elements.len) return null;

return try analyser.resolveTypeOfNode(.{ .handle = node.handle, .node = elements[index] });
},
else => return null,
};
const node = scope_handle.toNode();
Expand Down Expand Up @@ -1692,26 +1701,13 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, node_handle: NodeWithHandle) e
var buffer: [2]Ast.Node.Index = undefined;
const array_init_info = tree.fullArrayInit(&buffer, node).?;

std.debug.assert(array_init_info.ast.elements.len != 0);

if (array_init_info.ast.type_expr != 0) blk: {
const array_ty = try analyser.resolveTypeOfNode(.{ .node = array_init_info.ast.type_expr, .handle = handle }) orelse break :blk;
return try array_ty.instanceTypeVal(analyser);
}

// try to infer the array type
const maybe_elem_ty = try analyser.resolveTypeOfNodeInternal(.{ .node = array_init_info.ast.elements[0], .handle = handle });
const elem_ty = if (maybe_elem_ty) |elem_ty| elem_ty.typeOf(analyser) else try Type.typeValFromIP(analyser, .type_type);

const elem_ty_ptr = try analyser.arena.allocator().create(Type);
elem_ty_ptr.* = elem_ty;

return Type{
.data = .{ .array = .{
.elem_count = @intCast(array_init_info.ast.elements.len),
.sentinel = .none,
.elem_ty = elem_ty_ptr,
} },
.data = .{ .other = node_handle },
.is_type_val = false,
};
},
Expand Down Expand Up @@ -2373,6 +2369,8 @@ pub const Type = struct {

/// - Error type: `Foo || Bar`, `Foo!Bar`
/// - Function: `fn () Foo`, `fn foo() Foo`
/// - `.{a,b}`
/// - `start..end`
other: NodeWithHandle,

/// - `@compileError("")`
Expand Down Expand Up @@ -4012,7 +4010,7 @@ pub const DeclWithHandle = struct {
}) orelse return null;
break :blk switch (node.data) {
.array => |array_info| try array_info.elem_ty.instanceTypeVal(analyser),
.container => try analyser.resolveTupleFieldType(node, pay.index),
.container, .other => try analyser.resolveTupleFieldType(node, pay.index),
else => null,
};
},
Expand Down
9 changes: 1 addition & 8 deletions src/features/completions.zig
Original file line number Diff line number Diff line change
Expand Up @@ -121,14 +121,7 @@ fn typeToCompletion(builder: *Builder, ty: Analyser.Type) error{OutOfMemory}!voi
try typeToCompletion(builder, rhs_ty);
}
},

.fn_proto,
.fn_proto_multi,
.fn_proto_one,
.fn_proto_simple,
.fn_decl,
=> {},
else => unreachable,
else => {},
},
.ip_index => |payload| try analyser_completions.dotCompletions(
builder.arena,
Expand Down
64 changes: 38 additions & 26 deletions src/features/semantic_tokens.zig
Original file line number Diff line number Diff line change
Expand Up @@ -312,30 +312,8 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
.aligned_var_decl,
=> {
const var_decl = tree.fullVarDecl(node).?;
try writeToken(builder, var_decl.visib_token, .keyword);
try writeToken(builder, var_decl.extern_export_token, .keyword);
try writeToken(builder, var_decl.threadlocal_token, .keyword);
try writeToken(builder, var_decl.comptime_token, .keyword);
try writeToken(builder, var_decl.ast.mut_token, .keyword);

if (try builder.analyser.resolveTypeOfNode(.{ .node = node, .handle = handle })) |decl_type| {
try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, false, .{ .declaration = true });
} else {
try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true });
}

try writeNodeTokens(builder, var_decl.ast.type_node);
try writeNodeTokens(builder, var_decl.ast.align_node);
try writeNodeTokens(builder, var_decl.ast.section_node);

if (var_decl.ast.init_node != 0) {
const equal_token = tree.firstToken(var_decl.ast.init_node) - 1;
if (token_tags[equal_token] == .equal) {
try writeToken(builder, equal_token, .operator);
}
}

try writeNodeTokens(builder, var_decl.ast.init_node);
const resolved_type = try builder.analyser.resolveTypeOfNode(.{ .node = node, .handle = handle });
try writeVarDecl(builder, var_decl, resolved_type);
},
.@"usingnamespace" => {
const first_token = tree.firstToken(node);
Expand Down Expand Up @@ -829,9 +807,14 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
.assign_destructure => {
const lhs_count = tree.extra_data[node_data[node].lhs];
const lhs_exprs = tree.extra_data[node_data[node].lhs + 1 ..][0..lhs_count];
const init_expr = node_data[node].rhs;

for (lhs_exprs) |lhs_node| {
try writeNodeTokens(builder, lhs_node);
const resolved_type = try builder.analyser.resolveTypeOfNode(.{ .node = init_expr, .handle = handle });

for (lhs_exprs, 0..) |lhs_node, index| {
const var_decl = tree.fullVarDecl(lhs_node).?;
const field_type = if (resolved_type) |ty| try builder.analyser.resolveTupleFieldType(ty, index) else null;
try writeVarDecl(builder, var_decl, field_type);
}

try writeToken(builder, main_token, .operator);
Expand Down Expand Up @@ -989,6 +972,35 @@ fn writeContainerField(builder: *Builder, node: Ast.Node.Index, container_decl:
}
}

fn writeVarDecl(builder: *Builder, var_decl: Ast.full.VarDecl, resolved_type: ?Analyser.Type) error{OutOfMemory}!void {
const tree = builder.handle.tree;
const token_tags = tree.tokens.items(.tag);

try writeToken(builder, var_decl.visib_token, .keyword);
try writeToken(builder, var_decl.extern_export_token, .keyword);
try writeToken(builder, var_decl.threadlocal_token, .keyword);
try writeToken(builder, var_decl.comptime_token, .keyword);
try writeToken(builder, var_decl.ast.mut_token, .keyword);

if (resolved_type) |decl_type| {
try colorIdentifierBasedOnType(builder, decl_type, var_decl.ast.mut_token + 1, false, .{ .declaration = true });
} else {
try writeTokenMod(builder, var_decl.ast.mut_token + 1, .variable, .{ .declaration = true });
}

try writeNodeTokens(builder, var_decl.ast.type_node);
try writeNodeTokens(builder, var_decl.ast.align_node);
try writeNodeTokens(builder, var_decl.ast.section_node);

if (var_decl.ast.init_node != 0) {
const equal_token = tree.firstToken(var_decl.ast.init_node) - 1;
if (token_tags[equal_token] == .equal) {
try writeToken(builder, equal_token, .operator);
}
try writeNodeTokens(builder, var_decl.ast.init_node);
}
}

fn writeIdentifier(builder: *Builder, name_token: Ast.Node.Index) error{OutOfMemory}!void {
const handle = builder.handle;
const tree = handle.tree;
Expand Down
10 changes: 9 additions & 1 deletion tests/lsp_features/completion.zig
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,15 @@ test "assign destructure" {
.{ .label = "foo", .kind = .Constant, .detail = "comptime_int" },
.{ .label = "bar", .kind = .Variable, .detail = "u32" },
});
if (true) return error.SkipZigTest; // TODO
try testCompletion(
\\test {
\\ var foo, const bar = .{@as(u32, 42), @as(u64, 7)};
\\ <cursor>
\\}
, &.{
.{ .label = "foo", .kind = .Variable, .detail = "u32" },
.{ .label = "bar", .kind = .Constant, .detail = "u64" },
});
try testCompletion(
\\test {
\\ const S, const E = .{struct{}, enum{}};
Expand Down
27 changes: 23 additions & 4 deletions tests/lsp_features/semantic_tokens.zig
Original file line number Diff line number Diff line change
Expand Up @@ -185,22 +185,41 @@ test "var decl" {

test "var decl destructure" {
try testSemanticTokens(
\\const foo = {
\\test {
\\ var alpha: bool, var beta = .{ 1, 2 };
\\};
, &.{
.{ "const", .keyword, .{} },
.{ "foo", .variable, .{ .declaration = true } },
.{ "=", .operator, .{} },
.{ "test", .keyword, .{} },

.{ "var", .keyword, .{} },
.{ "alpha", .variable, .{ .declaration = true } },
.{ "bool", .type, .{} },

.{ "var", .keyword, .{} },
.{ "beta", .variable, .{ .declaration = true } },

.{ "=", .operator, .{} },
.{ "1", .number, .{} },
.{ "2", .number, .{} },
});
try testSemanticTokens(
\\test {
\\ const S, const E = .{ struct {}, enum {} };
\\};
, &.{
.{ "test", .keyword, .{} },

.{ "const", .keyword, .{} },
.{ "S", .namespace, .{ .declaration = true } },

.{ "const", .keyword, .{} },
.{ "E", .@"enum", .{ .declaration = true } },

.{ "=", .operator, .{} },

.{ "struct", .keyword, .{} },
.{ "enum", .keyword, .{} },
});
}

test "local var decl" {
Expand Down

0 comments on commit 8e1fc42

Please sign in to comment.