Skip to content

Commit

Permalink
refactor: embrace RLS, anonymous struct literals and decl literals (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
Techatrix authored Jan 1, 2025
1 parent b36f7c4 commit 19421e0
Show file tree
Hide file tree
Showing 52 changed files with 494 additions and 473 deletions.
18 changes: 9 additions & 9 deletions build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ const builtin = @import("builtin");

/// Must match the `version` in `build.zig.zon`.
/// Remove `.pre` when tagging a new ZLS release and add it back on the next development cycle.
const zls_version = std.SemanticVersion{ .major = 0, .minor = 14, .patch = 0, .pre = "dev" };
const zls_version: std.SemanticVersion = .{ .major = 0, .minor = 14, .patch = 0, .pre = "dev" };

/// Specify the minimum Zig version that is required to compile and test ZLS:
/// std.Build: add new functions to create artifacts/Step.Compile from existing module
Expand Down Expand Up @@ -313,7 +313,7 @@ fn getVersion(b: *Build) std.SemanticVersion {
std.debug.assert(zls_version.order(ancestor_ver) == .gt); // ZLS version must be greater than its previous version
std.debug.assert(std.mem.startsWith(u8, commit_id, "g")); // commit hash is prefixed with a 'g'

return std.SemanticVersion{
return .{
.major = zls_version.major,
.minor = zls_version.minor,
.patch = zls_version.patch,
Expand Down Expand Up @@ -436,7 +436,7 @@ fn release(b: *Build, target_queries: []const std.Target.Query, release_artifact
"--form", b.fmt("minimum-runtime-zig-version={s}", .{minimum_runtime_zig_version}),
});

var compressed_artifacts = std.StringArrayHashMap(std.Build.LazyPath).init(b.allocator);
var compressed_artifacts: std.StringArrayHashMapUnmanaged(std.Build.LazyPath) = .empty;

for (target_queries, release_artifacts) |target_query, exe| {
const resolved_target = exe.root_module.resolved_target.?.result;
Expand All @@ -462,7 +462,7 @@ fn release(b: *Build, target_queries: []const std.Target.Query, release_artifact
switch (extension) {
.zip => {
compress_cmd.addArgs(&.{ "7z", "a", "-mx=9" });
compressed_artifacts.putNoClobber(file_name, compress_cmd.addOutputFileArg(file_name)) catch @panic("OOM");
compressed_artifacts.putNoClobber(b.allocator, file_name, compress_cmd.addOutputFileArg(file_name)) catch @panic("OOM");
compress_cmd.addArtifactArg(exe);
compress_cmd.addFileArg(exe.getEmittedPdb());
compress_cmd.addFileArg(b.path("LICENSE"));
Expand All @@ -473,7 +473,7 @@ fn release(b: *Build, target_queries: []const std.Target.Query, release_artifact
=> {
compress_cmd.setEnvironmentVariable("XZ_OPT", "-9");
compress_cmd.addArgs(&.{ "tar", "caf" });
compressed_artifacts.putNoClobber(file_name, compress_cmd.addOutputFileArg(file_name)) catch @panic("OOM");
compressed_artifacts.putNoClobber(b.allocator, file_name, compress_cmd.addOutputFileArg(file_name)) catch @panic("OOM");
compress_cmd.addPrefixedDirectoryArg("-C", exe.getEmittedBinDirectory());
compress_cmd.addArg(exe_name);

Expand Down Expand Up @@ -554,8 +554,8 @@ const Build = blk: {
@compileError(message);
}
} else {
const min_build_zig_simple = std.SemanticVersion{ .major = min_build_zig.major, .minor = min_build_zig.minor, .patch = 0 };
const zls_version_simple = std.SemanticVersion{ .major = zls_version.major, .minor = zls_version.minor, .patch = 0 };
const min_build_zig_simple: std.SemanticVersion = .{ .major = min_build_zig.major, .minor = min_build_zig.minor, .patch = 0 };
const zls_version_simple: std.SemanticVersion = .{ .major = zls_version.major, .minor = zls_version.minor, .patch = 0 };
const min_zig_is_tagged = min_build_zig.build == null and min_build_zig.pre == null;
if (!min_zig_is_tagged and zls_version_simple.order(min_build_zig_simple) != .eq) {
const message = std.fmt.comptimePrint(
Expand All @@ -574,8 +574,8 @@ const Build = blk: {
// check minimum build version
const is_current_zig_tagged_release = builtin.zig_version.pre == null and builtin.zig_version.build == null;
const is_min_build_zig_tagged_release = min_build_zig.pre == null and min_build_zig.build == null;
const min_build_zig_simple = std.SemanticVersion{ .major = min_build_zig.major, .minor = min_build_zig.minor, .patch = 0 };
const current_zig_simple = std.SemanticVersion{ .major = builtin.zig_version.major, .minor = builtin.zig_version.minor, .patch = 0 };
const min_build_zig_simple: std.SemanticVersion = .{ .major = min_build_zig.major, .minor = min_build_zig.minor, .patch = 0 };
const current_zig_simple: std.SemanticVersion = .{ .major = builtin.zig_version.major, .minor = builtin.zig_version.minor, .patch = 0 };
if (switch (builtin.zig_version.order(min_build_zig)) {
.lt => true,
.eq => false,
Expand Down
16 changes: 8 additions & 8 deletions src/DiagnosticsCollection.zig
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ fn pathToUri(allocator: std.mem.Allocator, base_path: ?[]const u8, src_path: []c
pub fn publishDiagnostics(collection: *DiagnosticsCollection) (std.mem.Allocator.Error || lsp.AnyTransport.WriteError)!void {
const transport = collection.transport orelse return;

var arena_allocator = std.heap.ArenaAllocator.init(collection.allocator);
var arena_allocator: std.heap.ArenaAllocator = .init(collection.allocator);
defer arena_allocator.deinit();

while (true) {
Expand All @@ -240,7 +240,7 @@ pub fn publishDiagnostics(collection: *DiagnosticsCollection) (std.mem.Allocator

_ = arena_allocator.reset(.retain_capacity);

var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .{};
var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .empty;
try collection.collectLspDiagnosticsForDocument(document_uri, collection.offset_encoding, arena_allocator.allocator(), &diagnostics);

const params: lsp.types.PublishDiagnosticsParams = .{
Expand Down Expand Up @@ -429,7 +429,7 @@ test errorBundleSourceLocationToRange {
}

test DiagnosticsCollection {
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
var arena_allocator: std.heap.ArenaAllocator = .init(std.testing.allocator);
defer arena_allocator.deinit();

const arena = arena_allocator.allocator();
Expand All @@ -454,7 +454,7 @@ test DiagnosticsCollection {
try std.testing.expectEqual(1, collection.outdated_files.count());
try std.testing.expectEqualStrings(uri, collection.outdated_files.keys()[0]);

var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .{};
var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .empty;
try collection.collectLspDiagnosticsForDocument(uri, .@"utf-8", arena, &diagnostics);

try std.testing.expectEqual(1, diagnostics.items.len);
Expand All @@ -466,7 +466,7 @@ test DiagnosticsCollection {
{
try collection.pushErrorBundle(.parse, 0, null, eb2);

var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .{};
var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .empty;
try collection.collectLspDiagnosticsForDocument(uri, .@"utf-8", arena, &diagnostics);

try std.testing.expectEqual(1, diagnostics.items.len);
Expand All @@ -476,7 +476,7 @@ test DiagnosticsCollection {
{
try collection.pushErrorBundle(.parse, 2, null, eb2);

var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .{};
var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .empty;
try collection.collectLspDiagnosticsForDocument(uri, .@"utf-8", arena, &diagnostics);

try std.testing.expectEqual(1, diagnostics.items.len);
Expand All @@ -486,7 +486,7 @@ test DiagnosticsCollection {
{
try collection.pushErrorBundle(.parse, 3, null, .empty);

var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .{};
var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .empty;
try collection.collectLspDiagnosticsForDocument(uri, .@"utf-8", arena, &diagnostics);

try std.testing.expectEqual(0, diagnostics.items.len);
Expand All @@ -496,7 +496,7 @@ test DiagnosticsCollection {
try collection.pushErrorBundle(@enumFromInt(16), 4, null, eb2);
try collection.pushErrorBundle(@enumFromInt(17), 4, null, eb3);

var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .{};
var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .empty;
try collection.collectLspDiagnosticsForDocument(uri, .@"utf-8", arena, &diagnostics);

try std.testing.expectEqual(2, diagnostics.items.len);
Expand Down
33 changes: 20 additions & 13 deletions src/DocumentScope.zig
Original file line number Diff line number Diff line change
Expand Up @@ -8,21 +8,21 @@ const offsets = @import("offsets.zig");

const DocumentScope = @This();

scopes: std.MultiArrayList(Scope) = .{},
declarations: std.MultiArrayList(Declaration) = .{},
scopes: std.MultiArrayList(Scope),
declarations: std.MultiArrayList(Declaration),
/// used for looking up a child declaration in a given scope
declaration_lookup_map: DeclarationLookupMap = .{},
extra: std.ArrayListUnmanaged(u32) = .{},
declaration_lookup_map: DeclarationLookupMap,
extra: std.ArrayListUnmanaged(u32),
/// All identifier token that are in error sets.
/// When there are multiple error sets that contain the same error, only one of them is stored.
/// A token that has a doc comment takes priority.
/// This means that if there a multiple error sets with the same name, only one of them is included.
global_error_set: IdentifierSet = .{},
global_error_set: IdentifierSet,
/// All identifier token that are in enums.
/// When there are multiple enums that contain the field name, only one of them is stored.
/// A token that has a doc comment takes priority.
/// This means that if there a multiple enums with the same name, only one of them is included.
global_enum_set: IdentifierSet = .{},
global_enum_set: IdentifierSet,

/// Stores a set of identifier tokens with unique names
pub const IdentifierSet = std.ArrayHashMapUnmanaged(Ast.TokenIndex, void, IdentifierTokenContext, true);
Expand Down Expand Up @@ -63,7 +63,7 @@ pub const DeclarationLookup = struct {
pub const DeclarationLookupContext = struct {
pub fn hash(self: @This(), s: DeclarationLookup) u32 {
_ = self;
var hasher = std.hash.Wyhash.init(0);
var hasher: std.hash.Wyhash = .init(0);
std.hash.autoHash(&hasher, s.scope);
hasher.update(s.name);
std.hash.autoHash(&hasher, s.kind);
Expand Down Expand Up @@ -372,8 +372,8 @@ const ScopeContext = struct {
doc_scope: *DocumentScope,

current_scope: Scope.OptionalIndex = .none,
child_scopes_scratch: std.ArrayListUnmanaged(Scope.Index) = .{},
child_declarations_scratch: std.ArrayListUnmanaged(Declaration.Index) = .{},
child_scopes_scratch: std.ArrayListUnmanaged(Scope.Index) = .empty,
child_declarations_scratch: std.ArrayListUnmanaged(Declaration.Index) = .empty,

fn deinit(context: *ScopeContext) void {
context.child_scopes_scratch.deinit(context.allocator);
Expand Down Expand Up @@ -548,10 +548,17 @@ pub fn init(allocator: std.mem.Allocator, tree: Ast) error{OutOfMemory}!Document
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();

var document_scope = DocumentScope{};
var document_scope: DocumentScope = .{
.scopes = .empty,
.declarations = .empty,
.declaration_lookup_map = .empty,
.extra = .empty,
.global_error_set = .empty,
.global_enum_set = .empty,
};
errdefer document_scope.deinit(allocator);

var context = ScopeContext{
var context: ScopeContext = .{
.allocator = allocator,
.tree = tree,
.doc_scope = &document_scope,
Expand Down Expand Up @@ -855,7 +862,7 @@ noinline fn walkContainerDecl(
locToSmallLoc(offsets.nodeToLoc(tree, node_idx)),
);

var uses = std.ArrayListUnmanaged(Ast.Node.Index){};
var uses: std.ArrayListUnmanaged(Ast.Node.Index) = .empty;
defer uses.deinit(allocator);

for (container_decl.ast.members) |decl| {
Expand Down Expand Up @@ -949,7 +956,7 @@ noinline fn walkErrorSetNode(
locToSmallLoc(offsets.nodeToLoc(tree, node_idx)),
);

var it = ast.ErrorSetIterator.init(tree, node_idx);
var it: ast.ErrorSetIterator = .init(tree, node_idx);

while (it.next()) |identifier_token| {
try scope.pushDeclaration(identifier_token, .{ .error_token = identifier_token }, .other);
Expand Down
Loading

0 comments on commit 19421e0

Please sign in to comment.