From 0e5e90cebea1743bd5b694c0740ad9be3d0aa0d9 Mon Sep 17 00:00:00 2001 From: Techatrix Date: Fri, 24 Oct 2025 03:01:03 +0200 Subject: [PATCH] rewrite uri logic to always perform normalization --- src/DiagnosticsCollection.zig | 64 ++-- src/DocumentStore.zig | 197 +++++----- src/Server.zig | 204 ++++++---- src/Uri.zig | 491 +++++++++++++++++++++++++ src/analysis.zig | 42 +-- src/features/code_actions.zig | 4 +- src/features/completions.zig | 20 +- src/features/diagnostics.zig | 6 +- src/features/goto.zig | 55 +-- src/features/hover.zig | 2 +- src/features/references.zig | 38 +- src/translate_c.zig | 10 +- src/uri.zig | 149 -------- src/zls.zig | 2 +- tests/analysis_check.zig | 2 +- tests/context.zig | 26 +- tests/language_features/cimport.zig | 2 +- tests/lsp_features/code_actions.zig | 6 +- tests/lsp_features/completion.zig | 26 +- tests/lsp_features/definition.zig | 46 +-- tests/lsp_features/diagnostics.zig | 3 +- tests/lsp_features/document_symbol.zig | 2 +- tests/lsp_features/folding_range.zig | 2 +- tests/lsp_features/hover.zig | 37 +- tests/lsp_features/inlay_hints.zig | 16 +- tests/lsp_features/references.zig | 25 +- tests/lsp_features/selection_range.zig | 2 +- tests/lsp_features/semantic_tokens.zig | 14 +- tests/lsp_features/signature_help.zig | 2 +- 29 files changed, 964 insertions(+), 531 deletions(-) create mode 100644 src/Uri.zig delete mode 100644 src/uri.zig diff --git a/src/DiagnosticsCollection.zig b/src/DiagnosticsCollection.zig index 126eda30b..f7f8bfa9e 100644 --- a/src/DiagnosticsCollection.zig +++ b/src/DiagnosticsCollection.zig @@ -2,7 +2,7 @@ const std = @import("std"); const lsp = @import("lsp"); const tracy = @import("tracy"); const offsets = @import("offsets.zig"); -const URI = @import("uri.zig"); +const Uri = @import("Uri.zig"); allocator: std.mem.Allocator, mutex: std.Thread.Mutex = .{}, @@ -12,13 +12,13 @@ tag_set: std.AutoArrayHashMapUnmanaged(Tag, struct { /// Used to store diagnostics from `pushErrorBundle` error_bundle: std.zig.ErrorBundle = .empty, /// Used to store diagnostics from `pushSingleDocumentDiagnostics` - diagnostics_set: std.StringArrayHashMapUnmanaged(struct { + diagnostics_set: Uri.ArrayHashMap(struct { arena: std.heap.ArenaAllocator.State = .{}, diagnostics: []lsp.types.Diagnostic = &.{}, error_bundle: std.zig.ErrorBundle = .empty, }) = .empty, }) = .empty, -outdated_files: std.StringArrayHashMapUnmanaged(void) = .empty, +outdated_files: Uri.ArrayHashMap(void) = .empty, transport: ?*lsp.Transport = null, offset_encoding: offsets.Encoding = .@"utf-16", @@ -44,14 +44,14 @@ pub fn deinit(collection: *DiagnosticsCollection) void { entry.error_bundle.deinit(collection.allocator); if (entry.error_bundle_src_base_path) |src_path| collection.allocator.free(src_path); for (entry.diagnostics_set.keys(), entry.diagnostics_set.values()) |uri, *lsp_diagnostic| { - collection.allocator.free(uri); + uri.deinit(collection.allocator); lsp_diagnostic.arena.promote(collection.allocator).deinit(); lsp_diagnostic.error_bundle.deinit(collection.allocator); } entry.diagnostics_set.deinit(collection.allocator); } collection.tag_set.deinit(collection.allocator); - for (collection.outdated_files.keys()) |uri| collection.allocator.free(uri); + for (collection.outdated_files.keys()) |uri| uri.deinit(collection.allocator); collection.outdated_files.deinit(collection.allocator); collection.* = undefined; } @@ -59,7 +59,7 @@ pub fn deinit(collection: *DiagnosticsCollection) void { pub fn pushSingleDocumentDiagnostics( collection: *DiagnosticsCollection, tag: Tag, - document_uri: []const u8, + document_uri: Uri, /// LSP and ErrorBundle will not override each other. /// /// Takes ownership on success. @@ -81,15 +81,15 @@ pub fn pushSingleDocumentDiagnostics( { try collection.outdated_files.ensureUnusedCapacity(collection.allocator, 1); - const duped_uri = try collection.allocator.dupe(u8, document_uri); - if (collection.outdated_files.fetchPutAssumeCapacity(duped_uri, {})) |_| collection.allocator.free(duped_uri); + const duped_uri = try document_uri.dupe(collection.allocator); + if (collection.outdated_files.fetchPutAssumeCapacity(duped_uri, {})) |_| duped_uri.deinit(collection.allocator); } try gop_tag.value_ptr.diagnostics_set.ensureUnusedCapacity(collection.allocator, 1); - const duped_uri = try collection.allocator.dupe(u8, document_uri); + const duped_uri = try document_uri.dupe(collection.allocator); const gop_file = gop_tag.value_ptr.diagnostics_set.getOrPutAssumeCapacity(duped_uri); if (gop_file.found_existing) { - collection.allocator.free(duped_uri); + duped_uri.deinit(collection.allocator); } else { gop_file.value_ptr.* = .{}; } @@ -214,7 +214,7 @@ fn collectUrisFromErrorBundle( allocator: std.mem.Allocator, error_bundle: std.zig.ErrorBundle, src_base_path: ?[]const u8, - uri_set: *std.StringArrayHashMapUnmanaged(void), + uri_set: *Uri.ArrayHashMap(void), ) error{OutOfMemory}!void { if (error_bundle.errorMessageCount() == 0) return; for (error_bundle.getMessages()) |msg_index| { @@ -226,20 +226,20 @@ fn collectUrisFromErrorBundle( try uri_set.ensureUnusedCapacity(allocator, 1); const uri = try pathToUri(allocator, src_base_path, src_path) orelse continue; if (uri_set.fetchPutAssumeCapacity(uri, {})) |_| { - allocator.free(uri); + uri.deinit(allocator); } } } -fn pathToUri(allocator: std.mem.Allocator, base_path: ?[]const u8, src_path: []const u8) error{OutOfMemory}!?[]const u8 { +fn pathToUri(allocator: std.mem.Allocator, base_path: ?[]const u8, src_path: []const u8) error{OutOfMemory}!?Uri { if (std.fs.path.isAbsolute(src_path)) { - return try URI.fromPath(allocator, src_path); + return try .fromPath(allocator, src_path); } const base = base_path orelse return null; const absolute_src_path = try std.fs.path.join(allocator, &.{ base, src_path }); defer allocator.free(absolute_src_path); - return try URI.fromPath(allocator, absolute_src_path); + return try .fromPath(allocator, absolute_src_path); } pub fn publishDiagnostics(collection: *DiagnosticsCollection) (std.mem.Allocator.Error || std.posix.WriteError)!void { @@ -254,8 +254,8 @@ pub fn publishDiagnostics(collection: *DiagnosticsCollection) (std.mem.Allocator defer collection.mutex.unlock(); const entry = collection.outdated_files.pop() orelse break; - defer collection.allocator.free(entry.key); - const document_uri = entry.key; + defer entry.key.deinit(collection.allocator); + const document_uri: Uri = entry.key; _ = arena_allocator.reset(.retain_capacity); @@ -265,7 +265,7 @@ pub fn publishDiagnostics(collection: *DiagnosticsCollection) (std.mem.Allocator const notification: lsp.TypedJsonRPCNotification(lsp.types.PublishDiagnosticsParams) = .{ .method = "textDocument/publishDiagnostics", .params = .{ - .uri = document_uri, + .uri = document_uri.raw, .diagnostics = diagnostics.items, }, }; @@ -281,7 +281,7 @@ pub fn publishDiagnostics(collection: *DiagnosticsCollection) (std.mem.Allocator fn collectLspDiagnosticsForDocument( collection: *DiagnosticsCollection, - document_uri: []const u8, + document_uri: Uri, offset_encoding: offsets.Encoding, arena: std.mem.Allocator, diagnostics: *std.ArrayList(lsp.types.Diagnostic), @@ -318,7 +318,7 @@ pub const collectLspDiagnosticsForDocumentTesting = if (@import("builtin").is_te fn convertErrorBundleToLSPDiangostics( eb: std.zig.ErrorBundle, error_bundle_src_base_path: ?[]const u8, - document_uri: []const u8, + document_uri: Uri, offset_encoding: offsets.Encoding, arena: std.mem.Allocator, diagnostics: *std.ArrayList(lsp.types.Diagnostic), @@ -333,8 +333,8 @@ fn convertErrorBundleToLSPDiangostics( const src_path = eb.nullTerminatedString(src_loc.src_path); if (!is_single_document) { - const uri = try pathToUri(arena, error_bundle_src_base_path, src_path) orelse continue; - if (!std.mem.eql(u8, document_uri, uri)) continue; + const src_uri = try pathToUri(arena, error_bundle_src_base_path, src_path) orelse continue; + if (!document_uri.eql(src_uri)) continue; } const src_range = errorBundleSourceLocationToRange(eb, src_loc, offset_encoding); @@ -350,14 +350,14 @@ fn convertErrorBundleToLSPDiangostics( const note_src_path = eb.nullTerminatedString(note_src_loc.src_path); const note_src_range = errorBundleSourceLocationToRange(eb, note_src_loc, offset_encoding); - const note_uri = if (is_single_document) + const note_uri: Uri = if (is_single_document) document_uri else try pathToUri(arena, error_bundle_src_base_path, note_src_path) orelse continue; lsp_note.* = .{ .location = .{ - .uri = note_uri, + .uri = note_uri.raw, .range = note_src_range, }, .message = eb.nullTerminatedString(eb_note.msg), @@ -478,13 +478,13 @@ test DiagnosticsCollection { var eb3 = try createTestingErrorBundle(&.{.{ .message = "As" }}, ""); defer eb3.deinit(std.testing.allocator); - const uri = try URI.fromPath(std.testing.allocator, testing_src_path); - defer std.testing.allocator.free(uri); + const uri: Uri = try .fromPath(std.testing.allocator, testing_src_path); + defer uri.deinit(std.testing.allocator); { try collection.pushErrorBundle(.parse, 1, null, eb1); try std.testing.expectEqual(1, collection.outdated_files.count()); - try std.testing.expectEqualStrings(uri, collection.outdated_files.keys()[0]); + try std.testing.expect(uri.eql(collection.outdated_files.keys()[0])); var diagnostics: std.ArrayList(lsp.types.Diagnostic) = .empty; try collection.collectLspDiagnosticsForDocument(uri, .@"utf-8", arena, &diagnostics); @@ -544,20 +544,20 @@ test "DiagnosticsCollection - compile_log_text" { var eb = try createTestingErrorBundle(&.{.{ .message = "found compile log statement" }}, "@as(comptime_int, 7)\n@as(comptime_int, 13)"); defer eb.deinit(std.testing.allocator); - const uri = try URI.fromPath(std.testing.allocator, testing_src_path); - defer std.testing.allocator.free(uri); + const src_uri: Uri = try .fromPath(std.testing.allocator, testing_src_path); + defer src_uri.deinit(std.testing.allocator); try collection.pushErrorBundle(.parse, 1, null, eb); try std.testing.expectEqual(1, collection.outdated_files.count()); - try std.testing.expectEqualStrings(uri, collection.outdated_files.keys()[0]); + try std.testing.expect(src_uri.eql(collection.outdated_files.keys()[0])); var arena_allocator: std.heap.ArenaAllocator = .init(std.testing.allocator); defer arena_allocator.deinit(); const arena = arena_allocator.allocator(); - var diagnostics: std.ArrayListUnmanaged(lsp.types.Diagnostic) = .empty; - try collection.collectLspDiagnosticsForDocument(uri, .@"utf-8", arena, &diagnostics); + var diagnostics: std.ArrayList(lsp.types.Diagnostic) = .empty; + try collection.collectLspDiagnosticsForDocument(src_uri, .@"utf-8", arena, &diagnostics); try std.testing.expectEqual(1, diagnostics.items.len); try std.testing.expectEqual(lsp.types.DiagnosticSeverity.Error, diagnostics.items[0].severity); diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 5830e4cb2..e076caf76 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -2,7 +2,7 @@ const std = @import("std"); const builtin = @import("builtin"); -const URI = @import("uri.zig"); +const Uri = @import("Uri.zig"); const analysis = @import("analysis.zig"); const offsets = @import("offsets.zig"); const log = std.log.scoped(.store); @@ -22,8 +22,8 @@ allocator: std.mem.Allocator, config: Config, lock: std.Thread.RwLock = .{}, thread_pool: *std.Thread.Pool, -handles: std.StringArrayHashMapUnmanaged(*Handle) = .empty, -build_files: if (supports_build_system) std.StringArrayHashMapUnmanaged(*BuildFile) else void = if (supports_build_system) .empty else {}, +handles: Uri.ArrayHashMap(*Handle) = .empty, +build_files: if (supports_build_system) Uri.ArrayHashMap(*BuildFile) else void = if (supports_build_system) .empty else {}, cimports: if (supports_build_system) std.AutoArrayHashMapUnmanaged(Hash, translate_c.Result) else void = if (supports_build_system) .empty else {}, diagnostics_collection: *DiagnosticsCollection, builds_in_progress: std.atomic.Value(i32) = .init(0), @@ -34,8 +34,6 @@ lsp_capabilities: struct { supports_inlay_hints_refresh: bool = false, } = .{}, -pub const Uri = []const u8; - pub const Hasher = std.crypto.auth.siphash.SipHash128(1, 3); pub const Hash = [Hasher.mac_length]u8; @@ -125,7 +123,7 @@ pub const BuildFile = struct { try package_uris.ensureUnusedCapacity(allocator, build_config.packages.len); for (build_config.packages) |package| { - package_uris.appendAssumeCapacity(try URI.fromPath(allocator, package.path)); + package_uris.appendAssumeCapacity(try .fromPath(allocator, package.path)); } return true; } @@ -155,10 +153,12 @@ pub const BuildFile = struct { const absolute_path = if (std.fs.path.isAbsolute(include_path)) try allocator.dupe(u8, include_path) else blk: { - const build_file_dir = std.fs.path.dirname(self.uri).?; - const build_file_path = try URI.toFsPath(allocator, build_file_dir); - defer allocator.free(build_file_path); - break :blk try std.fs.path.join(allocator, &.{ build_file_path, include_path }); + const build_file_path = self.uri.toFsPath(allocator) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.UnsupportedScheme => continue, + }; + const build_file_dirname = std.fs.path.dirname(build_file_path) orelse continue; + break :blk try std.fs.path.join(allocator, &.{ build_file_dirname, include_path }); }; include_paths.appendAssumeCapacity(absolute_path); @@ -167,9 +167,9 @@ pub const BuildFile = struct { } fn deinit(self: *BuildFile, allocator: std.mem.Allocator) void { - allocator.free(self.uri); + self.uri.deinit(allocator); if (self.impl.config) |cfg| cfg.deinit(); - if (self.builtin_uri) |builtin_uri| allocator.free(builtin_uri); + if (self.builtin_uri) |builtin_uri| builtin_uri.deinit(allocator); if (self.build_associated_config) |cfg| cfg.deinit(); } }; @@ -251,7 +251,7 @@ pub const Handle = struct { text: [:0]const u8, lsp_synced: bool, ) error{OutOfMemory}!Handle { - const mode: Ast.Mode = if (std.mem.eql(u8, std.fs.path.extension(uri), ".zon")) .zon else .zig; + const mode: Ast.Mode = if (std.mem.eql(u8, std.fs.path.extension(uri.raw), ".zon")) .zon else .zig; var tree = try parseTree(allocator, text, mode); errdefer tree.deinit(allocator); @@ -290,7 +290,7 @@ pub const Handle = struct { self.tree.deinit(allocator); if (self.impl.import_uris) |import_uris| { - for (import_uris) |uri| allocator.free(uri); + for (import_uris) |uri| uri.deinit(allocator); allocator.free(import_uris); } @@ -314,30 +314,29 @@ pub const Handle = struct { if (self.impl.import_uris) |import_uris| return import_uris; var imports = try analysis.collectImports(allocator, self.tree); + defer imports.deinit(allocator); + + const base_path = self.uri.toFsPath(allocator) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.UnsupportedScheme => { + self.impl.import_uris = &.{}; + return self.impl.import_uris.?; + }, + }; + defer allocator.free(base_path); - var i: usize = 0; + var uris: std.ArrayList(Uri) = try .initCapacity(allocator, imports.items.len); errdefer { - // only free the uris - for (imports.items[0..i]) |uri| allocator.free(uri); - imports.deinit(allocator); + for (uris.items) |uri| uri.deinit(allocator); + uris.deinit(allocator); } - // Convert to URIs - while (i < imports.items.len) { - const import_str = imports.items[i]; - if (!std.mem.endsWith(u8, import_str, ".zig")) { - _ = imports.swapRemove(i); - continue; - } - // The raw import strings are owned by the document and do not need to be freed here. - imports.items[i] = try uriFromFileImportStr(allocator, self, import_str) orelse { - _ = imports.swapRemove(i); - continue; - }; - i += 1; + for (imports.items) |import_str| { + if (!std.mem.endsWith(u8, import_str, ".zig")) continue; + uris.appendAssumeCapacity(try resolveFileImportString(allocator, base_path, import_str) orelse continue); } - self.impl.import_uris = try imports.toOwnedSlice(allocator); + self.impl.import_uris = try uris.toOwnedSlice(allocator); return self.impl.import_uris.?; } @@ -444,7 +443,7 @@ pub const Handle = struct { const unresolved = switch (self.impl.associated_build_file) { .init => blk: { const potential_build_files = document_store.collectPotentialBuildFiles(self.uri) catch { - log.err("failed to collect potential build files of '{s}'", .{self.uri}); + log.err("failed to collect potential build files of '{s}'", .{self.uri.raw}); self.impl.associated_build_file = .none; return .none; }; @@ -473,7 +472,7 @@ pub const Handle = struct { // special case when there is only one potential build file if (unresolved.potential_build_files.len == 1) { const build_file = unresolved.potential_build_files[0]; - log.debug("Resolved build file of '{s}' as '{s}'", .{ self.uri, build_file.uri }); + log.debug("Resolved build file of '{s}' as '{s}'", .{ self.uri.raw, build_file.uri.raw }); unresolved.deinit(document_store.allocator); self.impl.associated_build_file = .{ .resolved = build_file }; return .{ .resolved = build_file }; @@ -498,7 +497,7 @@ pub const Handle = struct { continue; } - log.debug("Resolved build file of '{s}' as '{s}'", .{ self.uri, build_file.uri }); + log.debug("Resolved build file of '{s}' as '{s}'", .{ self.uri.raw, build_file.uri.raw }); unresolved.deinit(document_store.allocator); self.impl.associated_build_file = .{ .resolved = build_file }; return .{ .resolved = build_file }; @@ -599,7 +598,7 @@ pub fn deinit(self: *DocumentStore) void { for (self.handles.keys(), self.handles.values()) |uri, handle| { handle.deinit(); self.allocator.destroy(handle); - self.allocator.free(uri); + uri.deinit(self.allocator); } self.handles.deinit(self.allocator); @@ -632,9 +631,12 @@ fn readFile(self: *DocumentStore, uri: Uri) ?[:0]u8 { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const file_path = URI.toFsPath(self.allocator, uri) catch |err| { - log.err("failed to parse URI '{s}': {}", .{ uri, err }); - return null; + const file_path = uri.toFsPath(self.allocator) catch |err| switch (err) { + error.UnsupportedScheme => return null, // https://github.com/microsoft/language-server-protocol/issues/1264 + error.OutOfMemory => |e| { + log.err("failed to parse Uri '{s}': {}", .{ uri.raw, e }); + return null; + }, }; defer self.allocator.free(file_path); @@ -686,7 +688,7 @@ pub fn getOrLoadHandle(self: *DocumentStore, uri: Uri) ?*Handle { if (self.getHandle(uri)) |handle| return handle; const file_contents = self.readFile(uri) orelse return null; return self.createAndStoreDocument(uri, file_contents, false) catch |err| { - log.err("failed to store document '{s}': {}", .{ uri, err }); + log.err("failed to store document '{s}': {}", .{ uri.raw, err }); return null; }; } @@ -717,14 +719,14 @@ fn getOrLoadBuildFile(self: *DocumentStore, uri: Uri) ?*BuildFile { gop.value_ptr.* = self.allocator.create(BuildFile) catch |err| { self.build_files.swapRemoveAt(gop.index); - log.debug("Failed to load build file {s}: {}", .{ uri, err }); + log.debug("Failed to load build file {s}: {}", .{ uri.raw, err }); return null; }; gop.value_ptr.*.* = self.createBuildFile(uri) catch |err| { self.allocator.destroy(gop.value_ptr.*); self.build_files.swapRemoveAt(gop.index); - log.debug("Failed to load build file {s}: {}", .{ uri, err }); + log.debug("Failed to load build file {s}: {}", .{ uri.raw, err }); return null; }; gop.key_ptr.* = gop.value_ptr.*.uri; @@ -746,7 +748,7 @@ pub fn openLspSyncedDocument(self: *DocumentStore, uri: Uri, text: []const u8) e if (self.handles.get(uri)) |handle| { if (handle.isLspSynced()) { - log.warn("Document already open: {s}", .{uri}); + log.warn("Document already open: {s}", .{uri.raw}); } } @@ -761,14 +763,14 @@ pub fn closeLspSyncedDocument(self: *DocumentStore, uri: Uri) void { defer tracy_zone.end(); const kv = self.handles.fetchSwapRemove(uri) orelse { - log.warn("Document not found: {s}", .{uri}); + log.warn("Document not found: {s}", .{uri.raw}); return; }; if (!kv.value.isLspSynced()) { - log.warn("Document already closed: {s}", .{uri}); + log.warn("Document already closed: {s}", .{uri.raw}); } - self.allocator.free(kv.key); + kv.key.deinit(self.allocator); kv.value.deinit(); self.allocator.destroy(kv.value); } @@ -782,10 +784,10 @@ pub fn refreshLspSyncedDocument(self: *DocumentStore, uri: Uri, new_text: [:0]co if (self.handles.get(uri)) |old_handle| { if (!old_handle.isLspSynced()) { - log.warn("Document modified without being opened: {s}", .{uri}); + log.warn("Document modified without being opened: {s}", .{uri.raw}); } } else { - log.warn("Document modified without being opened: {s}", .{uri}); + log.warn("Document modified without being opened: {s}", .{uri.raw}); } _ = try self.createAndStoreDocument(uri, new_text, true); @@ -806,7 +808,7 @@ pub fn refreshDocumentFromFileSystem(self: *DocumentStore, uri: Uri, should_dele const handle_uri = handle.uri; handle.deinit(); self.allocator.destroy(handle); - self.allocator.free(handle_uri); + handle_uri.deinit(self.allocator); } else { if (self.handles.get(uri)) |handle| { if (handle.isLspSynced()) return false; @@ -947,7 +949,7 @@ fn invalidateBuildFileWorker(self: *DocumentStore, build_file: *BuildFile) void const build_config = loadBuildConfiguration(self, build_file.uri, new_version) catch |err| { if (err != error.RunFailed) { // already logged - log.err("Failed to load build configuration for {s} (error: {})", .{ build_file.uri, err }); + log.err("Failed to load build configuration for {s} (error: {})", .{ build_file.uri.raw, err }); } self.notifyBuildEnd(.failed); build_file.impl.mutex.lock(); @@ -1006,16 +1008,16 @@ fn invalidateBuildFileWorker(self: *DocumentStore, build_file: *BuildFile) void } pub fn isBuildFile(uri: Uri) bool { - return std.mem.endsWith(u8, uri, "/build.zig"); + return std.mem.endsWith(u8, uri.raw, "/build.zig"); } pub fn isBuiltinFile(uri: Uri) bool { - return std.mem.endsWith(u8, uri, "/builtin.zig"); + return std.mem.endsWith(u8, uri.raw, "/builtin.zig"); } pub fn isInStd(uri: Uri) bool { // TODO: Better logic for detecting std or subdirectories? - return std.mem.indexOf(u8, uri, "/std/") != null; + return std.mem.indexOf(u8, uri.raw, "/std/") != null; } /// looks for a `zls.build.json` file in the build file directory @@ -1024,7 +1026,7 @@ fn loadBuildAssociatedConfiguration(allocator: std.mem.Allocator, build_file: Bu const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const build_file_path = try URI.toFsPath(allocator, build_file.uri); + const build_file_path = try build_file.uri.toFsPath(allocator); defer allocator.free(build_file_path); const config_file_path = try std.fs.path.resolve(allocator, &.{ build_file_path, "..", "zls.build.json" }); defer allocator.free(config_file_path); @@ -1044,7 +1046,7 @@ fn loadBuildAssociatedConfiguration(allocator: std.mem.Allocator, build_file: Bu ); } -fn prepareBuildRunnerArgs(self: *DocumentStore, build_file_uri: []const u8) ![][]const u8 { +fn prepareBuildRunnerArgs(self: *DocumentStore, build_file_uri: Uri) ![][]const u8 { const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); @@ -1090,7 +1092,7 @@ fn loadBuildConfiguration(self: *DocumentStore, build_file_uri: Uri, build_file_ std.debug.assert(self.config.global_cache_dir != null); std.debug.assert(self.config.zig_lib_dir != null); - const build_file_path = try URI.toFsPath(self.allocator, build_file_uri); + const build_file_path = try build_file_uri.toFsPath(self.allocator); defer self.allocator.free(build_file_path); const cwd = std.fs.path.dirname(build_file_path).?; @@ -1121,7 +1123,7 @@ fn loadBuildConfiguration(self: *DocumentStore, build_file_uri: Uri, build_file_ const diagnostic_tag: DiagnosticsCollection.Tag = tag: { var hasher: std.hash.Wyhash = .init(47); // Chosen by the following prompt: Pwease give a wandom nyumbew - hasher.update(build_file_uri); + hasher.update(build_file_uri.raw); break :tag @enumFromInt(@as(u32, @truncate(hasher.final()))); }; @@ -1186,17 +1188,29 @@ fn buildDotZigExists(dir_path: []const u8) bool { /// `build.zig` files higher in the filesystem have precedence. /// See `Handle.getAssociatedBuildFileUri`. /// Caller owns returned memory. -fn collectPotentialBuildFiles(self: *DocumentStore, uri: Uri) ![]*BuildFile { +fn collectPotentialBuildFiles(self: *DocumentStore, uri: Uri) error{OutOfMemory}![]*BuildFile { if (isInStd(uri)) return &.{}; var potential_build_files: std.ArrayList(*BuildFile) = .empty; errdefer potential_build_files.deinit(self.allocator); - const path = try URI.toFsPath(self.allocator, uri); + const path = uri.toFsPath(self.allocator) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.UnsupportedScheme => return &.{}, + }; defer self.allocator.free(path); + // Zig's filesystem API does not handle `OBJECT_PATH_INVALID` being returned when dealing with invalid UNC paths on Windows. + // https://github.com/ziglang/zig/issues/15607 + const root_end_index: usize = root_end_index: { + if (builtin.target.os.tag != .windows) break :root_end_index 0; + const component_iterator = std.fs.path.componentIterator(path) catch return &.{}; + break :root_end_index component_iterator.root_end_index; + }; + var current_path: []const u8 = path; while (std.fs.path.dirname(current_path)) |potential_root_path| : (current_path = potential_root_path) { + if (potential_root_path.len < root_end_index) break; if (!buildDotZigExists(potential_root_path)) continue; const build_path = try std.fs.path.join(self.allocator, &.{ potential_root_path, "build.zig" }); @@ -1204,8 +1218,8 @@ fn collectPotentialBuildFiles(self: *DocumentStore, uri: Uri) ![]*BuildFile { try potential_build_files.ensureUnusedCapacity(self.allocator, 1); - const build_file_uri = try URI.fromPath(self.allocator, build_path); - defer self.allocator.free(build_file_uri); + const build_file_uri: Uri = try .fromPath(self.allocator, build_path); + defer build_file_uri.deinit(self.allocator); const build_file = self.getOrLoadBuildFile(build_file_uri) orelse continue; potential_build_files.appendAssumeCapacity(build_file); @@ -1225,7 +1239,7 @@ fn createBuildFile(self: *DocumentStore, uri: Uri) error{OutOfMemory}!BuildFile defer tracy_zone.end(); var build_file: BuildFile = .{ - .uri = try self.allocator.dupe(u8, uri), + .uri = try uri.dupe(self.allocator), }; errdefer build_file.deinit(self.allocator); @@ -1234,18 +1248,18 @@ fn createBuildFile(self: *DocumentStore, uri: Uri) error{OutOfMemory}!BuildFile build_file.build_associated_config = cfg; if (cfg.value.relative_builtin_path) |relative_builtin_path| blk: { - const build_file_path = URI.toFsPath(self.allocator, build_file.uri) catch break :blk; + const build_file_path = build_file.uri.toFsPath(self.allocator) catch break :blk; const absolute_builtin_path = std.fs.path.resolve(self.allocator, &.{ build_file_path, "..", relative_builtin_path }) catch break :blk; defer self.allocator.free(absolute_builtin_path); - build_file.builtin_uri = try URI.fromPath(self.allocator, absolute_builtin_path); + build_file.builtin_uri = try .fromPath(self.allocator, absolute_builtin_path); } } else |err| { if (err != error.FileNotFound) { - log.debug("Failed to load config associated with build file {s} (error: {})", .{ build_file.uri, err }); + log.debug("Failed to load config associated with build file {s} (error: {})", .{ build_file.uri.raw, err }); } } - log.info("Loaded build file '{s}'", .{build_file.uri}); + log.info("Loaded build file '{s}'", .{build_file.uri.raw}); return build_file; } @@ -1264,12 +1278,12 @@ fn uriAssociatedWithBuild( const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - var checked_uris: std.StringHashMapUnmanaged(void) = .empty; + var checked_uris: Uri.ArrayHashMap(void) = .empty; defer checked_uris.deinit(self.allocator); var package_uris: std.ArrayList(Uri) = .empty; defer { - for (package_uris.items) |package_uri| self.allocator.free(package_uri); + for (package_uris.items) |package_uri| package_uri.deinit(self.allocator); package_uris.deinit(self.allocator); } const success = try build_file.collectBuildConfigPackageUris(self.allocator, &package_uris); @@ -1287,26 +1301,26 @@ fn uriAssociatedWithBuild( /// **Thread safe** takes an exclusive lock fn uriInImports( self: *DocumentStore, - checked_uris: *std.StringHashMapUnmanaged(void), + checked_uris: *Uri.ArrayHashMap(void), build_file_uri: Uri, source_uri: Uri, uri: Uri, ) error{OutOfMemory}!bool { - if (std.mem.eql(u8, uri, source_uri)) return true; + if (uri.eql(source_uri)) return true; if (isInStd(source_uri)) return false; const gop = try checked_uris.getOrPut(self.allocator, source_uri); if (gop.found_existing) return false; const handle = self.getOrLoadHandle(source_uri) orelse { - errdefer std.debug.assert(checked_uris.remove(source_uri)); - gop.key_ptr.* = try self.allocator.dupe(u8, source_uri); + errdefer std.debug.assert(checked_uris.swapRemove(source_uri)); + gop.key_ptr.* = try source_uri.dupe(self.allocator); return false; }; gop.key_ptr.* = handle.uri; if (try handle.getAssociatedBuildFileUri(self)) |associated_build_file_uri| { - return std.mem.eql(u8, associated_build_file_uri, build_file_uri); + return associated_build_file_uri.eql(build_file_uri); } for (try handle.getImportUris()) |import_uri| { @@ -1357,8 +1371,8 @@ fn createAndStoreDocument( new_handle.deinit(); } } else { - gop.key_ptr.* = try self.allocator.dupe(u8, uri); - errdefer self.allocator.free(gop.key_ptr.*); + gop.key_ptr.* = try uri.dupe(self.allocator); + errdefer gop.key_ptr.*.deinit(self.allocator); gop.value_ptr.* = try self.allocator.create(Handle); errdefer self.allocator.destroy(gop.value_ptr.*); @@ -1429,7 +1443,7 @@ pub fn collectDependencies( try dependencies.ensureUnusedCapacity(allocator, import_uris.len + handle.cimports.len); for (import_uris) |uri| { - dependencies.appendAssumeCapacity(try allocator.dupe(u8, uri)); + dependencies.appendAssumeCapacity(try uri.dupe(allocator)); } if (supports_build_system) { @@ -1438,7 +1452,7 @@ pub fn collectDependencies( for (handle.cimports.items(.hash)) |hash| { const result = store.cimports.get(hash) orelse continue; switch (result) { - .success => |uri| dependencies.appendAssumeCapacity(try allocator.dupe(u8, uri)), + .success => |uri| dependencies.appendAssumeCapacity(try uri.dupe(allocator)), .failure => continue, } } @@ -1618,7 +1632,7 @@ pub fn resolveCImport(self: *DocumentStore, handle: *Handle, node: Ast.Node.Inde switch (result) { .success => |uri| { - log.debug("Translated cImport into {s}", .{uri}); + log.debug("Translated cImport into {s}", .{uri.raw}); return uri; }, .failure => return null, @@ -1698,18 +1712,18 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand const std_path = try zig_lib_dir.join(allocator, &.{ "std", "std.zig" }); defer allocator.free(std_path); - return try URI.fromPath(allocator, std_path); + return try .fromPath(allocator, std_path); } else if (std.mem.eql(u8, import_str, "builtin")) { if (supports_build_system) { if (try handle.getAssociatedBuildFileUri(self)) |build_file_uri| { const build_file = self.getBuildFile(build_file_uri).?; if (build_file.builtin_uri) |builtin_uri| { - return try allocator.dupe(u8, builtin_uri); + return try builtin_uri.dupe(allocator); } } } if (self.config.builtin_path) |builtin_path| { - return try URI.fromPath(allocator, builtin_path); + return try .fromPath(allocator, builtin_path); } return null; } else if (!std.mem.endsWith(u8, import_str, ".zig")) { @@ -1722,7 +1736,7 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand for (build_config.deps_build_roots) |dep_build_root| { if (std.mem.eql(u8, import_str, dep_build_root.name)) { - return try URI.fromPath(allocator, dep_build_root.path); + return try .fromPath(allocator, dep_build_root.path); } } } else if (try handle.getAssociatedBuildFileUri(self)) |build_file_uri| blk: { @@ -1732,28 +1746,27 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand for (build_config.packages) |pkg| { if (std.mem.eql(u8, import_str, pkg.name)) { - return try URI.fromPath(allocator, pkg.path); + return try .fromPath(allocator, pkg.path); } } } return null; } else { - return try uriFromFileImportStr(allocator, handle, import_str); + const base_path = handle.uri.toFsPath(allocator) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.UnsupportedScheme => return null, + }; + defer allocator.free(base_path); + return try resolveFileImportString(allocator, base_path, import_str); } } -fn uriFromFileImportStr(allocator: std.mem.Allocator, handle: *Handle, import_str: []const u8) error{OutOfMemory}!?Uri { - const base_path = URI.toFsPath(allocator, handle.uri) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - else => return null, - }; - defer allocator.free(base_path); - +fn resolveFileImportString(allocator: std.mem.Allocator, base_path: []const u8, import_str: []const u8) error{OutOfMemory}!?Uri { const joined_path = std.fs.path.resolve(allocator, &.{ base_path, "..", import_str }) catch |err| switch (err) { error.OutOfMemory => return error.OutOfMemory, else => return null, }; defer allocator.free(joined_path); - return try URI.fromPath(allocator, joined_path); + return try .fromPath(allocator, joined_path); } diff --git a/src/Server.zig b/src/Server.zig index 497ccb90c..301f1cdf1 100644 --- a/src/Server.zig +++ b/src/Server.zig @@ -17,7 +17,7 @@ const Analyser = @import("analysis.zig"); const offsets = @import("offsets.zig"); const tracy = @import("tracy"); const diff = @import("diff.zig"); -const Uri = @import("uri.zig"); +const Uri = @import("Uri.zig"); const InternPool = @import("analyser/analyser.zig").InternPool; const DiagnosticsCollection = @import("DiagnosticsCollection.zig"); const build_runner_shared = @import("build_runner/shared.zig"); @@ -480,7 +480,11 @@ fn initializeHandler(server: *Server, arena: std.mem.Allocator, request: types.I if (request.workspaceFolders) |workspace_folders| { for (workspace_folders) |src| { - try server.addWorkspace(src.uri); + const uri = Uri.parse(arena, src.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + try server.addWorkspace(uri); } } @@ -662,7 +666,7 @@ fn requestConfiguration(server: *Server) Error!void { const configuration_items: [1]types.ConfigurationItem = .{ .{ .section = "zls", - .scopeUri = if (server.workspaces.items.len == 1) server.workspaces.items[0].uri else null, + .scopeUri = if (server.workspaces.items.len == 1) server.workspaces.items[0].uri.raw else null, }, }; @@ -722,10 +726,9 @@ fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory} const maybe_root_dir: ?[]const u8 = dir: { if (server.workspaces.items.len != 1) break :dir null; - break :dir Uri.toFsPath(arena, server.workspaces.items[0].uri) catch |err| { - log.err("failed to parse root uri for workspace {s}: {}", .{ - server.workspaces.items[0].uri, err, - }); + const workspace = server.workspaces.items[0]; + break :dir workspace.uri.toFsPath(arena) catch |err| { + log.err("failed to parse root uri for workspace {s}: {}", .{ workspace.uri.raw, err }); break :dir null; }; }; @@ -754,13 +757,13 @@ fn handleConfiguration(server: *Server, json: std.json.Value) error{OutOfMemory} } const Workspace = struct { - uri: types.URI, + uri: Uri, build_on_save: if (BuildOnSaveSupport.isSupportedComptime()) ?BuildOnSave else void, build_on_save_mode: if (BuildOnSaveSupport.isSupportedComptime()) ?enum { watch, manual } else void, - fn init(server: *Server, uri: types.URI) error{OutOfMemory}!Workspace { - const duped_uri = try server.allocator.dupe(u8, uri); - errdefer server.allocator.free(duped_uri); + fn init(server: *Server, uri: Uri) error{OutOfMemory}!Workspace { + const duped_uri = try uri.dupe(server.allocator); + errdefer duped_uri.deinit(server.allocator); return .{ .uri = duped_uri, @@ -773,7 +776,7 @@ const Workspace = struct { if (BuildOnSaveSupport.isSupportedComptime()) { if (workspace.build_on_save) |*build_on_save| build_on_save.deinit(); } - allocator.free(workspace.uri); + workspace.uri.deinit(allocator); } fn sendManualWatchUpdate(workspace: *Workspace) void { @@ -811,7 +814,7 @@ const Workspace = struct { if (workspace.build_on_save) |*build_on_save| { if (enable and !args.restart) return; - log.debug("stopped Build-On-Save for '{s}'", .{workspace.uri}); + log.debug("stopped Build-On-Save for '{s}'", .{workspace.uri.raw}); build_on_save.deinit(); workspace.build_on_save = null; } @@ -822,9 +825,9 @@ const Workspace = struct { const zig_lib_path = config.zig_lib_path orelse return; const build_runner_path = config.build_runner_path orelse return; - const workspace_path = Uri.toFsPath(args.server.allocator, workspace.uri) catch |err| { - log.err("failed to parse URI '{s}': {}", .{ workspace.uri, err }); - return; + const workspace_path = workspace.uri.toFsPath(args.server.allocator) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.UnsupportedScheme => return, }; defer args.server.allocator.free(workspace_path); @@ -839,18 +842,18 @@ const Workspace = struct { .build_runner_path = build_runner_path, .collection = &args.server.diagnostics_collection, }) catch |err| { - log.err("failed to initilize Build-On-Save for '{s}': {}", .{ workspace.uri, err }); + log.err("failed to initilize Build-On-Save for '{s}': {}", .{ workspace.uri.raw, err }); return; }; - log.info("trying to start Build-On-Save for '{s}'", .{workspace.uri}); + log.info("trying to start Build-On-Save for '{s}'", .{workspace.uri.raw}); } }; -fn addWorkspace(server: *Server, uri: types.URI) error{OutOfMemory}!void { +fn addWorkspace(server: *Server, uri: Uri) error{OutOfMemory}!void { try server.workspaces.ensureUnusedCapacity(server.allocator, 1); server.workspaces.appendAssumeCapacity(try Workspace.init(server, uri)); - log.info("added Workspace Folder: {s}", .{uri}); + log.info("added Workspace Folder: {s}", .{uri.raw}); if (BuildOnSaveSupport.isSupportedComptime() and // Don't initialize build on save until initialization finished. @@ -865,35 +868,29 @@ fn addWorkspace(server: *Server, uri: types.URI) error{OutOfMemory}!void { } } -fn removeWorkspace(server: *Server, uri: types.URI) void { +fn removeWorkspace(server: *Server, uri: Uri) void { for (server.workspaces.items, 0..) |workspace, i| { - if (std.mem.eql(u8, workspace.uri, uri)) { + if (workspace.uri.eql(uri)) { var removed_workspace = server.workspaces.swapRemove(i); removed_workspace.deinit(server.allocator); - log.info("removed Workspace Folder: {s}", .{uri}); + log.info("removed Workspace Folder: {s}", .{uri.raw}); break; } } else { - log.warn("could not remove Workspace Folder: {s}", .{uri}); + log.warn("could not remove Workspace Folder: {s}", .{uri.raw}); } } fn didChangeWatchedFilesHandler(server: *Server, arena: std.mem.Allocator, notification: types.DidChangeWatchedFilesParams) Error!void { var updated_files: usize = 0; for (notification.changes) |change| { - const file_path = Uri.toFsPath(arena, change.uri) catch |err| switch (err) { - error.UnsupportedScheme => continue, - else => { - log.err("failed to parse URI '{s}': {}", .{ change.uri, err }); - continue; - }, + const uri = Uri.parse(arena, change.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, }; - const file_extension = std.fs.path.extension(file_path); + const file_extension = std.fs.path.extension(uri.raw); if (!std.mem.eql(u8, file_extension, ".zig") and !std.mem.eql(u8, file_extension, ".zon")) continue; - // very inefficient way of achieving some basic URI normalization - const uri = try Uri.fromPath(arena, file_path); - switch (change.type) { .Created, .Changed, .Deleted => |kind| { const did_update_file = try server.document_store.refreshDocumentFromFileSystem(uri, kind == .Deleted); @@ -908,14 +905,20 @@ fn didChangeWatchedFilesHandler(server: *Server, arena: std.mem.Allocator, notif } fn didChangeWorkspaceFoldersHandler(server: *Server, arena: std.mem.Allocator, notification: types.DidChangeWorkspaceFoldersParams) Error!void { - _ = arena; - for (notification.event.added) |folder| { - try server.addWorkspace(folder.uri); + const uri = Uri.parse(arena, folder.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + try server.addWorkspace(uri); } for (notification.event.removed) |folder| { - server.removeWorkspace(folder.uri); + const uri = Uri.parse(arena, folder.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + server.removeWorkspace(uri); } } @@ -1123,7 +1126,7 @@ fn createDocumentStoreConfig(config_manager: *const configuration.Manager) Docum }; } -fn openDocumentHandler(server: *Server, _: std.mem.Allocator, notification: types.DidOpenTextDocumentParams) Error!void { +fn openDocumentHandler(server: *Server, arena: std.mem.Allocator, notification: types.DidOpenTextDocumentParams) Error!void { if (notification.textDocument.text.len > DocumentStore.max_document_size) { log.err("open document '{s}' failed: text size ({d}) is above maximum length ({d})", .{ notification.textDocument.uri, @@ -1133,19 +1136,27 @@ fn openDocumentHandler(server: *Server, _: std.mem.Allocator, notification: type return error.InternalError; } - try server.document_store.openLspSyncedDocument(notification.textDocument.uri, notification.textDocument.text); - server.generateDiagnostics(server.document_store.getHandle(notification.textDocument.uri).?); + const document_uri = Uri.parse(arena, notification.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + try server.document_store.openLspSyncedDocument(document_uri, notification.textDocument.text); + server.generateDiagnostics(server.document_store.getHandle(document_uri).?); } -fn changeDocumentHandler(server: *Server, _: std.mem.Allocator, notification: types.DidChangeTextDocumentParams) Error!void { +fn changeDocumentHandler(server: *Server, arena: std.mem.Allocator, notification: types.DidChangeTextDocumentParams) Error!void { if (notification.contentChanges.len == 0) return; - const handle = server.document_store.getHandle(notification.textDocument.uri) orelse return; + const document_uri = Uri.parse(arena, notification.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return; const new_text = try diff.applyContentChanges(server.allocator, handle.tree.source, notification.contentChanges, server.offset_encoding); if (new_text.len > DocumentStore.max_document_size) { log.err("change document '{s}' failed: text size ({d}) is above maximum length ({d})", .{ - notification.textDocument.uri, + document_uri.raw, new_text.len, DocumentStore.max_document_size, }); @@ -1158,18 +1169,21 @@ fn changeDocumentHandler(server: *Server, _: std.mem.Allocator, notification: ty } fn saveDocumentHandler(server: *Server, arena: std.mem.Allocator, notification: types.DidSaveTextDocumentParams) Error!void { - const uri = notification.textDocument.uri; + const document_uri = Uri.parse(arena, notification.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; - if (std.process.can_spawn and DocumentStore.isBuildFile(uri)) { - server.document_store.invalidateBuildFile(uri); + if (std.process.can_spawn and DocumentStore.isBuildFile(document_uri)) { + server.document_store.invalidateBuildFile(document_uri); } if (server.autofixWorkaround() == .on_save) { - const handle = server.document_store.getHandle(uri) orelse return; + const handle = server.document_store.getHandle(document_uri) orelse return; var text_edits = try server.autofix(arena, handle); var workspace_edit: types.WorkspaceEdit = .{ .changes = .{} }; - try workspace_edit.changes.?.map.putNoClobber(arena, uri, try text_edits.toOwnedSlice(arena)); + try workspace_edit.changes.?.map.putNoClobber(arena, document_uri.raw, try text_edits.toOwnedSlice(arena)); const json_message = try server.sendToClientRequest( .{ .string = "apply_edit" }, @@ -1189,13 +1203,17 @@ fn saveDocumentHandler(server: *Server, arena: std.mem.Allocator, notification: } } -fn closeDocumentHandler(server: *Server, _: std.mem.Allocator, notification: types.DidCloseTextDocumentParams) error{}!void { - server.document_store.closeLspSyncedDocument(notification.textDocument.uri); +fn closeDocumentHandler(server: *Server, arena: std.mem.Allocator, notification: types.DidCloseTextDocumentParams) Error!void { + const document_uri = Uri.parse(arena, notification.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + server.document_store.closeLspSyncedDocument(document_uri); if (server.client_capabilities.supports_publish_diagnostics) { // clear diagnostics on closed file const json_message = server.sendToClientNotification("textDocument/publishDiagnostics", .{ - .uri = notification.textDocument.uri, + .uri = document_uri, .diagnostics = &.{}, }) catch return; server.allocator.free(json_message); @@ -1213,7 +1231,11 @@ fn willSaveWaitUntilHandler(server: *Server, arena: std.mem.Allocator, request: _ => return null, } - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; var text_edits = try server.autofix(arena, handle); @@ -1223,7 +1245,11 @@ fn willSaveWaitUntilHandler(server: *Server, arena: std.mem.Allocator, request: fn semanticTokensFullHandler(server: *Server, arena: std.mem.Allocator, request: types.SemanticTokensParams) Error!?types.SemanticTokens { if (server.config_manager.config.semantic_tokens == .none) return null; - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; // Workaround: The Ast on .zon files is unusable when an error occured on the root expr if (handle.tree.mode == .zon and handle.tree.errors.len > 0) return null; @@ -1248,7 +1274,11 @@ fn semanticTokensFullHandler(server: *Server, arena: std.mem.Allocator, request: fn semanticTokensRangeHandler(server: *Server, arena: std.mem.Allocator, request: types.SemanticTokensRangeParams) Error!?types.SemanticTokens { if (server.config_manager.config.semantic_tokens == .none) return null; - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; // Workaround: The Ast on .zon files is unusable when an error occured on the root expr if (handle.tree.mode == .zon and handle.tree.errors.len > 0) return null; @@ -1273,7 +1303,11 @@ fn semanticTokensRangeHandler(server: *Server, arena: std.mem.Allocator, request } fn completionHandler(server: *Server, arena: std.mem.Allocator, request: types.CompletionParams) Error!lsp.ResultType("textDocument/completion") { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; if (handle.tree.mode == .zon) return null; const source_index = offsets.positionToIndex(handle.tree.source, request.position, server.offset_encoding); @@ -1287,7 +1321,11 @@ fn completionHandler(server: *Server, arena: std.mem.Allocator, request: types.C } fn signatureHelpHandler(server: *Server, arena: std.mem.Allocator, request: types.SignatureHelpParams) Error!?types.SignatureHelp { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; if (handle.tree.mode == .zon) return null; const source_index = offsets.positionToIndex(handle.tree.source, request.position, server.offset_encoding); @@ -1363,7 +1401,11 @@ fn gotoDeclarationHandler(server: *Server, arena: std.mem.Allocator, request: ty } fn hoverHandler(server: *Server, arena: std.mem.Allocator, request: types.HoverParams) Error!?types.Hover { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; if (handle.tree.mode == .zon) return null; const source_index = offsets.positionToIndex(handle.tree.source, request.position, server.offset_encoding); @@ -1383,7 +1425,11 @@ fn hoverHandler(server: *Server, arena: std.mem.Allocator, request: types.HoverP } fn documentSymbolsHandler(server: *Server, arena: std.mem.Allocator, request: types.DocumentSymbolParams) Error!lsp.ResultType("textDocument/documentSymbol") { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; if (handle.tree.mode == .zon) return null; return .{ .array_of_DocumentSymbol = try document_symbol.getDocumentSymbols(arena, handle.tree, server.offset_encoding), @@ -1391,7 +1437,11 @@ fn documentSymbolsHandler(server: *Server, arena: std.mem.Allocator, request: ty } fn formattingHandler(server: *Server, arena: std.mem.Allocator, request: types.DocumentFormattingParams) Error!?[]types.TextEdit { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; if (handle.tree.errors.len != 0) return null; @@ -1408,8 +1458,12 @@ fn renameHandler(server: *Server, arena: std.mem.Allocator, request: types.Renam return if (response) |rep| rep.rename else null; } -fn prepareRenameHandler(server: *Server, request: types.PrepareRenameParams) ?types.PrepareRenameResult { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; +fn prepareRenameHandler(server: *Server, arena: std.mem.Allocator, request: types.PrepareRenameParams) Error!?types.PrepareRenameResult { + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; const source_index = offsets.positionToIndex(handle.tree.source, request.position, server.offset_encoding); const name_loc = Analyser.identifierLocFromIndex(handle.tree, source_index) orelse return null; const name = offsets.locToSlice(handle.tree.source, name_loc); @@ -1432,7 +1486,11 @@ fn documentHighlightHandler(server: *Server, arena: std.mem.Allocator, request: } fn inlayHintHandler(server: *Server, arena: std.mem.Allocator, request: types.InlayHintParams) Error!?[]types.InlayHint { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; if (handle.tree.mode == .zon) return null; // The Language Server Specification does not provide a client capabilities that allows the client to specify the MarkupKind of inlay hints. @@ -1454,7 +1512,11 @@ fn inlayHintHandler(server: *Server, arena: std.mem.Allocator, request: types.In } fn codeActionHandler(server: *Server, arena: std.mem.Allocator, request: types.CodeActionParams) Error!lsp.ResultType("textDocument/codeAction") { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; // as of right now, only ast-check errors may get a code action if (handle.tree.errors.len != 0) return null; @@ -1495,13 +1557,21 @@ fn codeActionHandler(server: *Server, arena: std.mem.Allocator, request: types.C } fn foldingRangeHandler(server: *Server, arena: std.mem.Allocator, request: types.FoldingRangeParams) Error!?[]types.FoldingRange { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; return try folding_range.generateFoldingRanges(arena, handle.tree, server.offset_encoding); } fn selectionRangeHandler(server: *Server, arena: std.mem.Allocator, request: types.SelectionRangeParams) Error!?[]types.SelectionRange { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; return try selection_range.generateSelectionRanges(arena, handle, request.positions, server.offset_encoding); } @@ -1746,7 +1816,7 @@ pub fn sendRequestSync(server: *Server, arena: std.mem.Allocator, comptime metho .@"textDocument/documentSymbol" => try server.documentSymbolsHandler(arena, params), .@"textDocument/formatting" => try server.formattingHandler(arena, params), .@"textDocument/rename" => try server.renameHandler(arena, params), - .@"textDocument/prepareRename" => server.prepareRenameHandler(params), + .@"textDocument/prepareRename" => try server.prepareRenameHandler(arena, params), .@"textDocument/references" => try server.referencesHandler(arena, params), .@"textDocument/documentHighlight" => try server.documentHighlightHandler(arena, params), .@"textDocument/codeAction" => try server.codeActionHandler(arena, params), diff --git a/src/Uri.zig b/src/Uri.zig new file mode 100644 index 000000000..3546705d6 --- /dev/null +++ b/src/Uri.zig @@ -0,0 +1,491 @@ +const std = @import("std"); +const builtin = @import("builtin"); + +const Uri = @This(); + +/// The raw Uri string is guaranteed to have been normalized with the following rules: +/// - consistent percent encoding (implementations may escape differently) +/// - consistent casing of the Windows drive letter +/// - consistent path seperator on Windows (convert '\\' to '/') +raw: []const u8, + +pub fn parse(allocator: std.mem.Allocator, text: []const u8) (std.Uri.ParseError || error{OutOfMemory})!Uri { + return try parseWithOs(allocator, text, builtin.os.tag == .windows); +} + +fn parseWithOs( + allocator: std.mem.Allocator, + text: []const u8, + comptime is_windows: bool, +) (std.Uri.ParseError || error{OutOfMemory})!Uri { + var uri: std.Uri = try .parse(text); + + const capacity = capacity: { + var capacity: usize = 0; + capacity += uri.scheme.len + ":".len; + if (uri.host) |host| { + capacity += "//".len; + if (uri.user) |user| { + capacity += user.percent_encoded.len; + if (uri.password) |password| { + capacity += ":".len; + capacity += password.percent_encoded.len; + } + capacity += "@".len; + } + capacity += host.percent_encoded.len; + } + if (uri.port != null) capacity += comptime ":".len + std.math.log10_int(@as(usize, std.math.maxInt(u16))); // TODO check this + if (!std.mem.startsWith(u8, uri.path.percent_encoded, "/")) { + capacity += "/".len; + } + capacity += uri.path.percent_encoded.len; + break :capacity capacity; + }; + + var result: std.ArrayList(u8) = try .initCapacity(allocator, capacity); + errdefer result.deinit(allocator); + + result.appendSliceAssumeCapacity(uri.scheme); + result.appendAssumeCapacity(':'); + if (uri.host) |host| { + result.appendSliceAssumeCapacity("//"); + if (uri.user) |user| { + normalizePercentEncoded(&result, user.percent_encoded, &isUserChar); + if (uri.password) |password| { + result.appendAssumeCapacity(':'); + normalizePercentEncoded(&result, password.percent_encoded, &isPasswordChar); + } + result.appendAssumeCapacity('@'); + } + normalizePercentEncoded(&result, host.percent_encoded, &isHostChar); + } + if (uri.port) |port| result.printAssumeCapacity(":{d}", .{port}); + + if (!std.mem.startsWith(u8, uri.path.percent_encoded, "/")) { + result.appendAssumeCapacity('/'); + } + if (!is_windows) { + normalizePercentEncoded(&result, uri.path.percent_encoded, &isPathChar); + } else { + const path_start = result.items.len; + // do not percent encode '\\' so that we can then convert it to '/' + normalizePercentEncoded(&result, uri.path.percent_encoded, &isPathCharWithBackslash); + const path = result.items[path_start..]; + + // normalize windows path seperator ('\\' -> '/') + for (path) |*c| { + if (c.* == '\\') c.* = '/'; + } + + // convert windows drive letter to lower case + if (path.len >= 3 and + path[0] == '/' and + std.ascii.isUpper(path[1]) and + path[2] == ':') + { + path[1] = std.ascii.toLower(path[1]); + } + } + + return .{ .raw = try result.toOwnedSlice(allocator) }; +} + +test "parse (posix)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/foo/main.zig", false); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file:/foo/main.zig", uri.raw); +} + +test "parse (windows)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/C:/foo\\main.zig", true); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file:/c:/foo/main.zig", uri.raw); +} + +test "parse - UNC (windows)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file://wsl.localhost/foo\\main.zig", true); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file://wsl.localhost/foo/main.zig", uri.raw); +} + +test "parse - normalize percent encoding (posix)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/foo%5cmain%2ezig", false); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file:/foo%5Cmain.zig", uri.raw); +} + +test "parse - convert percent encoded '\\' to '/' (windows)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/C:%5Cmain.zig", true); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file:/c:/main.zig", uri.raw); +} + +test "parse - preserve percent encoded '\\' (posix)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/foo%5Cmain.zig", false); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file:/foo%5Cmain.zig", uri.raw); +} + +test "parse - percent encoded drive letter (windows)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/%43%3a%5Cfoo\\main.zig", true); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file:/c:/foo/main.zig", uri.raw); +} + +test "parse - windows like path on posix" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:///C:%5Cmain.zig", false); + defer uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings("file:/C:%5Cmain.zig", uri.raw); +} + +pub fn deinit(uri: Uri, allocator: std.mem.Allocator) void { + allocator.free(uri.raw); +} + +pub fn dupe(uri: Uri, allocator: std.mem.Allocator) error{OutOfMemory}!Uri { + return .{ .raw = try allocator.dupe(u8, uri.raw) }; +} + +pub fn eql(a: Uri, b: Uri) bool { + return std.mem.eql(u8, a.raw, b.raw); +} + +pub fn format(_: Uri, _: *std.Io.Writer) std.Io.Writer.Error!void { + @compileError("Cannot format @import(\"Uri.zig\") directly!. Access the underlying raw string field instead."); +} + +pub fn ArrayHashMap(comptime V: type) type { + return std.ArrayHashMapUnmanaged(Uri, V, Context, true); +} + +const Context = struct { + pub fn hash(self: @This(), s: Uri) u32 { + _ = self; + return std.array_hash_map.hashString(s.raw); + } + pub fn eql(self: @This(), a: Uri, b: Uri, b_index: usize) bool { + _ = self; + _ = b_index; + return std.array_hash_map.eqlString(a.raw, b.raw); + } +}; + +/// Converts a file system path to a Uri. +/// Caller owns the returned memory +pub fn fromPath(allocator: std.mem.Allocator, path: []const u8) error{OutOfMemory}!Uri { + return try fromPathWithOs(allocator, path, builtin.os.tag == .windows); +} + +fn fromPathWithOs( + allocator: std.mem.Allocator, + path: []const u8, + comptime is_windows: bool, +) error{OutOfMemory}!Uri { + var buf: std.ArrayList(u8) = try .initCapacity(allocator, path.len + 6); + errdefer buf.deinit(allocator); + + buf.appendSliceAssumeCapacity("file:"); + if (is_windows and + path.len >= 2 and + (path[0] == std.fs.path.sep_windows or path[0] == std.fs.path.sep_posix) and + path[0] == path[1]) + { + // UNC path + } else if (!std.mem.startsWith(u8, path, "/")) { + buf.appendAssumeCapacity('/'); + } + + var value = path; + + if (is_windows and + path.len >= 2 and + std.ascii.isAlphabetic(path[0]) and + path[1] == ':') + { + // convert windows drive letter to lower case + buf.appendAssumeCapacity(std.ascii.toLower(path[0])); + value = value[1..]; + } + + for (value) |c| { + if (is_windows and c == '\\') { + try buf.append(allocator, '/'); + continue; + } + if (isPathChar(c)) { + try buf.append(allocator, c); + } else { + try buf.print(allocator, "%{X:0>2}", .{c}); + } + } + + return .{ .raw = try buf.toOwnedSlice(allocator) }; +} + +test "fromPath (posix)" { + const uri = try fromPathWithOs(std.testing.allocator, "/home/main.zig", false); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:/home/main.zig", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, false); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +test "fromPath (windows)" { + const uri = try fromPathWithOs(std.testing.allocator, "C:/main.zig", true); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:/c:/main.zig", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, true); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +test "fromPath - UNC (windows)" { + const uri = try fromPathWithOs(std.testing.allocator, "\\\\wsl.localhost\\foo\\main.zig", true); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file://wsl.localhost/foo/main.zig", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, true); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +test "fromPath - preserve '\\' (posix)" { + const uri = try fromPathWithOs(std.testing.allocator, "/home\\main.zig", false); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:/home%5Cmain.zig", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, false); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +test "fromPath - convert '\\' to '/' (windows)" { + const uri = try fromPathWithOs(std.testing.allocator, "C:\\main.zig", true); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:/c:/main.zig", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, true); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +test "fromPath - root directory (posix)" { + const uri = try fromPathWithOs(std.testing.allocator, "/", false); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:/", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, false); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +test "fromPath - root directory (windows)" { + const uri = try fromPathWithOs(std.testing.allocator, "C:/", true); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:/c:/", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, true); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +test "fromPath - windows like path on posix" { + const uri = try fromPathWithOs(std.testing.allocator, "/C:\\main.zig", false); + defer uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings("file:/C:%5Cmain.zig", uri.raw); + + const reparsed_uri: Uri = try .parseWithOs(std.testing.allocator, uri.raw, false); + defer reparsed_uri.deinit(std.testing.allocator); + try std.testing.expectEqualStrings(reparsed_uri.raw, uri.raw); +} + +/// Converts a Uri to a file system path. +/// Caller owns the returned memory +pub fn toFsPath( + uri: Uri, + allocator: std.mem.Allocator, +) error{ UnsupportedScheme, OutOfMemory }![]u8 { + return try toFsPathWithOs(uri, allocator, builtin.os.tag == .windows); +} + +fn toFsPathWithOs( + uri: Uri, + allocator: std.mem.Allocator, + comptime is_windows: bool, +) error{ UnsupportedScheme, OutOfMemory }![]u8 { + const parsed_uri = std.Uri.parse(uri.raw) catch unreachable; // The Uri is guranteed to be valid + if (!std.mem.eql(u8, parsed_uri.scheme, "file")) return error.UnsupportedScheme; + + var aw: std.Io.Writer.Allocating = try .initCapacity(allocator, uri.raw.len); + if (is_windows and parsed_uri.host != null) { + const host = parsed_uri.host.?; + aw.writer.writeAll("\\\\") catch unreachable; + if (parsed_uri.user) |user| { + user.formatRaw(&aw.writer) catch unreachable; + if (parsed_uri.password) |password| { + aw.writer.writeByte(':') catch unreachable; + password.formatRaw(&aw.writer) catch unreachable; + } + aw.writer.writeByte('@') catch unreachable; + } + host.formatRaw(&aw.writer) catch unreachable; + if (parsed_uri.port) |port| aw.writer.print(":{d}", .{port}) catch unreachable; + } + parsed_uri.path.formatRaw(&aw.writer) catch unreachable; // capacity has already been reserved + var buf = aw.toArrayList(); + errdefer buf.deinit(allocator); + + if (is_windows and + buf.items.len >= 3 and + buf.items[0] == '/' and + std.ascii.isAlphabetic(buf.items[1]) and + buf.items[2] == ':') + { + // remove the extra slash + @memmove(buf.items[0 .. buf.items.len - 1], buf.items[1..]); + buf.items.len -= 1; + } + + return try buf.toOwnedSlice(allocator); +} + +test "toFsPath (posix)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/foo/main.zig", false); + defer uri.deinit(std.testing.allocator); + + const path = try uri.toFsPath(std.testing.allocator); + defer std.testing.allocator.free(path); + + try std.testing.expectEqualStrings("/foo/main.zig", path); + + var round_trip_uri: Uri = try .fromPathWithOs(std.testing.allocator, path, false); + defer round_trip_uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings(uri.raw, round_trip_uri.raw); +} + +test "toFsPath (windows)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file:/c:/foo/main.zig", true); + defer uri.deinit(std.testing.allocator); + + const path = try uri.toFsPathWithOs(std.testing.allocator, true); + defer std.testing.allocator.free(path); + + try std.testing.expectEqualStrings("c:/foo/main.zig", path); + + var round_trip_uri: Uri = try .fromPathWithOs(std.testing.allocator, path, true); + defer round_trip_uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings(uri.raw, round_trip_uri.raw); +} + +test "toFsPath - UNC (windows)" { + const uri: Uri = try .parseWithOs(std.testing.allocator, "file://wsl.localhost/foo/main.zig", true); + defer uri.deinit(std.testing.allocator); + + const path = try uri.toFsPathWithOs(std.testing.allocator, true); + defer std.testing.allocator.free(path); + + try std.testing.expectEqualStrings("\\\\wsl.localhost/foo/main.zig", path); + + var round_trip_uri: Uri = try .fromPathWithOs(std.testing.allocator, path, true); + defer round_trip_uri.deinit(std.testing.allocator); + + try std.testing.expectEqualStrings(uri.raw, round_trip_uri.raw); +} + +fn normalizePercentEncoded( + result: *std.ArrayList(u8), + percent_encoded: []const u8, + isValidChar: *const fn (u8) bool, +) void { + var start: usize = 0; + var index: usize = 0; + while (std.mem.findScalarPos(u8, percent_encoded, index, '%')) |percent| { + index = percent + 1; + if (percent_encoded.len - index < 2) continue; + + const upper_hex, const lower_hex = percent_encoded[index..][0..2].*; + const upper_value = std.fmt.charToDigit(upper_hex, 16) catch continue; + const lower_value = std.fmt.charToDigit(lower_hex, 16) catch continue; + const percent_encoded_char = upper_value * 16 + lower_value; + + if (!isValidChar(percent_encoded_char)) { + if (std.ascii.isUpper(upper_hex) or std.ascii.isUpper(lower_hex)) continue; + + // convert percent encoded character to upper case + result.appendSliceAssumeCapacity(percent_encoded[start..percent]); + result.appendAssumeCapacity('%'); + result.appendAssumeCapacity(std.ascii.toUpper(upper_hex)); + result.appendAssumeCapacity(std.ascii.toUpper(lower_hex)); + } else { + // a character has been unnecessarily escaped + result.appendSliceAssumeCapacity(percent_encoded[start..percent]); + result.appendAssumeCapacity(percent_encoded_char); + } + + start = percent + 3; + index = percent + 3; + } + result.appendSliceAssumeCapacity(percent_encoded[start..]); +} + +/// Taken from `std.Uri` +fn isSchemeChar(c: u8) bool { + return switch (c) { + 'A'...'Z', 'a'...'z', '0'...'9', '+', '-', '.' => true, + else => false, + }; +} + +/// Taken from `std.Uri` +fn isSubLimit(c: u8) bool { + return switch (c) { + '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=' => true, + else => false, + }; +} + +/// Taken from `std.Uri` +fn isUnreserved(c: u8) bool { + return switch (c) { + 'A'...'Z', 'a'...'z', '0'...'9', '-', '.', '_', '~' => true, + else => false, + }; +} + +/// Taken from `std.Uri` +fn isUserChar(c: u8) bool { + return isUnreserved(c) or isSubLimit(c); +} + +/// Taken from `std.Uri` +fn isPasswordChar(c: u8) bool { + return isUserChar(c) or c == ':'; +} + +/// Taken from `std.Uri` +fn isHostChar(c: u8) bool { + return isPasswordChar(c) or c == '[' or c == ']'; +} + +/// Taken from `std.Uri` +fn isPathChar(c: u8) bool { + return isUserChar(c) or c == '/' or c == ':' or c == '@'; +} + +fn isPathCharWithBackslash(c: u8) bool { + return isUserChar(c) or c == '/' or c == ':' or c == '@' or c == '\\'; +} diff --git a/src/analysis.zig b/src/analysis.zig index 07558c1aa..483bd9fb1 100644 --- a/src/analysis.zig +++ b/src/analysis.zig @@ -12,7 +12,7 @@ const std = @import("std"); const DocumentStore = @import("DocumentStore.zig"); const Ast = std.zig.Ast; const offsets = @import("offsets.zig"); -const URI = @import("uri.zig"); +const Uri = @import("Uri.zig"); const log = std.log.scoped(.analysis); const ast = @import("ast.zig"); const tracy = @import("tracy"); @@ -1666,7 +1666,7 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) ! }; const tree = decl_handle.handle.tree; - const is_cimport = std.mem.eql(u8, std.fs.path.basename(decl_handle.handle.uri), "cimport.zig"); + const is_cimport = std.mem.eql(u8, std.fs.path.basename(decl_handle.handle.uri.raw), "cimport.zig"); if (is_cimport or !analyser.collect_callsite_references) return null; @@ -1735,7 +1735,7 @@ fn resolveCallsiteReferences(analyser: *Analyser, decl_handle: DeclWithHandle) ! const loc = offsets.tokenToPosition(tree, tree.nodeMainToken(call.ast.params[real_param_idx]), .@"utf-8"); try possible.append(analyser.arena, .{ .type = ty, - .descriptor = try std.fmt.allocPrint(analyser.arena, "{s}:{d}:{d}", .{ handle.uri, loc.line + 1, loc.character + 1 }), + .descriptor = try std.fmt.allocPrint(analyser.arena, "{s}:{d}:{d}", .{ handle.uri.raw, loc.line + 1, loc.character + 1 }), }); } @@ -3211,7 +3211,7 @@ pub const Type = struct { }, .function => |info| { std.hash.autoHash(hasher, info.fn_token); - hasher.update(info.handle.uri); + hasher.update(info.handle.uri.raw); info.container_type.hashWithHasher(hasher); for (info.parameters) |param| { param.type.hashWithHasher(hasher); @@ -3220,7 +3220,7 @@ pub const Type = struct { }, .compile_error => |node_handle| { std.hash.autoHash(hasher, node_handle.node); - hasher.update(node_handle.handle.uri); + hasher.update(node_handle.handle.uri.raw); }, .type_parameter => |token_handle| token_handle.hashWithHasher(hasher), .anytype_parameter => |info| { @@ -3291,7 +3291,7 @@ pub const Type = struct { .function => |a_info| { const b_info = b.function; if (a_info.fn_token != b_info.fn_token) return false; - if (!std.mem.eql(u8, a_info.handle.uri, b_info.handle.uri)) return false; + if (!a_info.handle.uri.eql(b_info.handle.uri)) return false; if (!a_info.container_type.eql(b_info.container_type.*)) return false; if (a_info.parameters.len != b_info.parameters.len) return false; for (a_info.parameters, b_info.parameters) |a_param, b_param| { @@ -4253,7 +4253,7 @@ pub const Type = struct { switch (handle.tree.nodeTag(node)) { .root => { - const path = URI.toFsPath(analyser.arena, handle.uri) catch handle.uri; + const path = handle.uri.toFsPath(analyser.arena) catch handle.uri.raw; const str = std.fs.path.stem(path); try writer.writeAll(str); if (referenced) |r| try r.put(analyser.arena, .of(str, handle, tree.firstToken(node)), {}); @@ -4407,13 +4407,13 @@ pub const ScopeWithHandle = struct { } pub fn hashWithHasher(scope_handle: ScopeWithHandle, hasher: anytype) void { - hasher.update(scope_handle.handle.uri); + hasher.update(scope_handle.handle.uri.raw); std.hash.autoHash(hasher, scope_handle.scope); } pub fn eql(a: ScopeWithHandle, b: ScopeWithHandle) bool { if (a.scope != b.scope) return false; - if (!std.mem.eql(u8, a.handle.uri, b.handle.uri)) return false; + if (!a.handle.uri.eql(b.handle.uri)) return false; return true; } }; @@ -4423,7 +4423,7 @@ pub const ScopeWithHandle = struct { pub fn instanceStdBuiltinType(analyser: *Analyser, type_name: []const u8) error{OutOfMemory}!?Type { const zig_lib_dir = analyser.store.config.zig_lib_dir orelse return null; const builtin_path = try zig_lib_dir.join(analyser.arena, &.{ "std", "builtin.zig" }); - const builtin_uri = try URI.fromPath(analyser.arena, builtin_path); + const builtin_uri: Uri = try .fromPath(analyser.arena, builtin_path); const builtin_handle = analyser.store.getOrLoadHandle(builtin_uri) orelse return null; const builtin_root_struct_type: Type = .{ @@ -4490,21 +4490,21 @@ pub fn collectCImportNodes(allocator: std.mem.Allocator, tree: Ast) error{OutOfM pub const NodeWithUri = struct { node: Ast.Node.Index, - uri: []const u8, + uri: Uri, const Context = struct { pub fn hash(self: Context, item: NodeWithUri) u64 { _ = self; var hasher: std.hash.Wyhash = .init(0); std.hash.autoHash(&hasher, item.node); - hasher.update(item.uri); + hasher.update(item.uri.raw); return hasher.final(); } pub fn eql(self: Context, a: NodeWithUri, b: NodeWithUri) bool { _ = self; if (a.node != b.node) return false; - return std.mem.eql(u8, a.uri, b.uri); + return a.uri.eql(b.uri); } }; }; @@ -4519,7 +4519,7 @@ pub const NodeWithHandle = struct { pub fn eql(a: NodeWithHandle, b: NodeWithHandle) bool { if (a.node != b.node) return false; - return std.mem.eql(u8, a.handle.uri, b.handle.uri); + return a.handle.uri.eql(b.handle.uri); } }; @@ -4685,8 +4685,8 @@ pub fn getFieldAccessType( .start = import_str_tok.loc.start + 1, .end = import_str_tok.loc.end - 1, }); - const uri = try analyser.store.uriFromImportStr(analyser.arena, handle, import_str) orelse return null; - const node_handle = analyser.store.getOrLoadHandle(uri) orelse return null; + const import_uri = try analyser.store.uriFromImportStr(analyser.arena, handle, import_str) orelse return null; + const node_handle = analyser.store.getOrLoadHandle(import_uri) orelse return null; current_type = .{ .data = .{ .container = .root(node_handle) }, .is_type_val = true, @@ -5113,12 +5113,12 @@ pub const TokenWithHandle = struct { pub fn hashWithHasher(token_handle: TokenWithHandle, hasher: anytype) void { std.hash.autoHash(hasher, token_handle.token); - hasher.update(token_handle.handle.uri); + hasher.update(token_handle.handle.uri.raw); } pub fn eql(a: TokenWithHandle, b: TokenWithHandle) bool { if (a.token != b.token) return false; - if (!std.mem.eql(u8, a.handle.uri, b.handle.uri)) return false; + if (!a.handle.uri.eql(b.handle.uri)) return false; return true; } @@ -5144,7 +5144,7 @@ pub const DeclWithHandle = struct { container_type: ?Type = null, pub fn eql(a: DeclWithHandle, b: DeclWithHandle) bool { - return a.decl.eql(b.decl) and std.mem.eql(u8, a.handle.uri, b.handle.uri); + return a.decl.eql(b.decl) and a.handle.uri.eql(b.handle.uri); } /// Returns a `.identifier` or `.builtin` token. @@ -6373,7 +6373,7 @@ pub const ReferencedType = struct { _ = self; var hasher: std.hash.Wyhash = .init(0); hasher.update(item.str); - hasher.update(item.handle.uri); + hasher.update(item.handle.uri.raw); hasher.update(&std.mem.toBytes(item.token)); return @truncate(hasher.final()); } @@ -6382,7 +6382,7 @@ pub const ReferencedType = struct { _ = self; _ = b_index; return std.mem.eql(u8, a.str, b.str) and - std.mem.eql(u8, a.handle.uri, b.handle.uri) and + a.handle.uri.eql(b.handle.uri) and a.token == b.token; } }; diff --git a/src/features/code_actions.zig b/src/features/code_actions.zig index 34c9025d4..f93563921 100644 --- a/src/features/code_actions.zig +++ b/src/features/code_actions.zig @@ -127,7 +127,7 @@ pub const Builder = struct { pub fn createWorkspaceEdit(self: *Builder, edits: []const types.TextEdit) error{OutOfMemory}!types.WorkspaceEdit { var workspace_edit: types.WorkspaceEdit = .{ .changes = .{} }; - try workspace_edit.changes.?.map.putNoClobber(self.arena, self.handle.uri, try self.arena.dupe(types.TextEdit, edits)); + try workspace_edit.changes.?.map.putNoClobber(self.arena, self.handle.uri.raw, try self.arena.dupe(types.TextEdit, edits)); return workspace_edit; } @@ -288,7 +288,7 @@ pub fn collectAutoDiscardDiagnostics( const range = offsets.tokenToRange(tree, def.token, offset_encoding); break :blk try arena.dupe(types.DiagnosticRelatedInformation, &.{.{ .location = .{ - .uri = handle.uri, + .uri = handle.uri.raw, .range = range, }, .message = "variable declared here", diff --git a/src/features/completions.zig b/src/features/completions.zig index 4139c742a..987fad14d 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -11,7 +11,7 @@ const Analyser = @import("../analysis.zig"); const ast = @import("../ast.zig"); const offsets = @import("../offsets.zig"); const tracy = @import("tracy"); -const URI = @import("../uri.zig"); +const Uri = @import("../Uri.zig"); const DocumentScope = @import("../DocumentScope.zig"); const analyser_completions = @import("../analyser/completions.zig"); @@ -145,7 +145,7 @@ fn typeToCompletion(builder: *Builder, ty: Analyser.Type) error{OutOfMemory}!voi fn declToCompletion(builder: *Builder, decl_handle: Analyser.DeclWithHandle) error{OutOfMemory}!void { const name = decl_handle.handle.tree.tokenSlice(decl_handle.nameToken()); - const is_cimport = std.mem.eql(u8, std.fs.path.basename(decl_handle.handle.uri), "cimport.zig"); + const is_cimport = std.mem.eql(u8, std.fs.path.basename(decl_handle.handle.uri.raw), "cimport.zig"); if (is_cimport) { if (std.mem.startsWith(u8, name, "_")) return; // TODO figuring out which declarations should be excluded could be made more complete and accurate @@ -684,7 +684,7 @@ fn completeDot(builder: *Builder, loc: offsets.Loc) error{OutOfMemory}!void { /// - `.cinclude_string_literal` /// - `.embedfile_string_literal` /// - `.string_literal` -fn completeFileSystemStringLiteral(builder: *Builder, pos_context: Analyser.PositionContext) !void { +fn completeFileSystemStringLiteral(builder: *Builder, pos_context: Analyser.PositionContext) error{OutOfMemory}!void { var completions: CompletionSet = .empty; const store = &builder.server.document_store; const source = builder.orig_handle.tree.source; @@ -720,8 +720,11 @@ fn completeFileSystemStringLiteral(builder: *Builder, pos_context: Analyser.Posi log.err("failed to resolve include paths: {}", .{err}); return; }; - } else { - const document_path = try URI.toFsPath(builder.arena, builder.orig_handle.uri); + } else blk: { + const document_path = builder.orig_handle.uri.toFsPath(builder.arena) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + error.UnsupportedScheme => break :blk, + }; try search_paths.append(builder.arena, std.fs.path.dirname(document_path).?); } @@ -879,10 +882,7 @@ pub fn completionAtIndex( .cinclude_string_literal, .embedfile_string_literal, .string_literal, - => completeFileSystemStringLiteral(&builder, pos_context) catch |err| { - log.err("failed to get file system completions: {}", .{err}); - return null; - }, + => try completeFileSystemStringLiteral(&builder, pos_context), else => return null, } @@ -966,7 +966,7 @@ fn globalSetCompletions(builder: *Builder, kind: enum { error_set, enum_set }) e const store = &builder.server.document_store; - var dependencies: std.ArrayList(DocumentStore.Uri) = .empty; + var dependencies: std.ArrayList(Uri) = .empty; try dependencies.append(builder.arena, builder.orig_handle.uri); try store.collectDependencies(builder.arena, builder.orig_handle, &dependencies); diff --git a/src/features/diagnostics.zig b/src/features/diagnostics.zig index 3a6abbb0a..9b47fe164 100644 --- a/src/features/diagnostics.zig +++ b/src/features/diagnostics.zig @@ -12,7 +12,7 @@ const types = lsp.types; const Analyser = @import("../analysis.zig"); const ast = @import("../ast.zig"); const offsets = @import("../offsets.zig"); -const URI = @import("../uri.zig"); +const Uri = @import("../Uri.zig"); const code_actions = @import("code_actions.zig"); const tracy = @import("tracy"); const DiagnosticsCollection = @import("../DiagnosticsCollection.zig"); @@ -403,8 +403,8 @@ pub fn getErrorBundleFromStderr( .dynamic => |dynamic| source: { const file_path = try std.fs.path.resolve(allocator, &.{ dynamic.base_path, src_path }); defer allocator.free(file_path); - const file_uri = try URI.fromPath(allocator, file_path); - defer allocator.free(file_uri); + const file_uri: Uri = try .fromPath(allocator, file_path); + defer file_uri.deinit(allocator); const handle = dynamic.document_store.getOrLoadHandle(file_uri) orelse break :source null; break :source handle.tree.source; }, diff --git a/src/features/goto.zig b/src/features/goto.zig index 63a01be4b..1e0e45985 100644 --- a/src/features/goto.zig +++ b/src/features/goto.zig @@ -11,7 +11,7 @@ const Server = @import("../Server.zig"); const lsp = @import("lsp"); const types = lsp.types; const offsets = @import("../offsets.zig"); -const URI = @import("../uri.zig"); +const Uri = @import("../Uri.zig"); const tracy = @import("tracy"); const Analyser = @import("../analysis.zig"); @@ -52,7 +52,7 @@ fn gotoDefinitionSymbol( const target_range = offsets.nodeToRange(type_declaration.handle.tree, type_declaration.node, offset_encoding); return .{ .originSelectionRange = name_range, - .targetUri = type_declaration.handle.uri, + .targetUri = type_declaration.handle.uri.raw, .targetRange = target_range, .targetSelectionRange = target_range, }; @@ -62,7 +62,7 @@ fn gotoDefinitionSymbol( return .{ .originSelectionRange = name_range, - .targetUri = token_handle.handle.uri, + .targetUri = token_handle.handle.uri.raw, .targetRange = target_range, .targetSelectionRange = target_range, }; @@ -122,7 +122,7 @@ fn gotoDefinitionStructInit( const target_range = offsets.tokenToRange(token_handle.handle.tree, token_handle.token, offset_encoding); return .{ .originSelectionRange = offsets.tokenToRange(handle.tree, token, offset_encoding), - .targetUri = token_handle.handle.uri, + .targetUri = token_handle.handle.uri.raw, .targetRange = target_range, .targetSelectionRange = target_range, }; @@ -176,7 +176,7 @@ fn gotoDefinitionBuiltin( .failure => return null, .success => |uri| return .{ .originSelectionRange = offsets.locToRange(tree.source, name_loc, offset_encoding), - .targetUri = uri, + .targetUri = uri.raw, .targetRange = target_range, .targetSelectionRange = target_range, }, @@ -188,7 +188,7 @@ fn gotoDefinitionBuiltin( const target_range = offsets.locToRange(tree.source, token_loc, offset_encoding); return .{ .originSelectionRange = offsets.locToRange(tree.source, name_loc, offset_encoding), - .targetUri = handle.uri, + .targetUri = handle.uri.raw, .targetRange = target_range, .targetSelectionRange = target_range, }; @@ -240,29 +240,26 @@ fn gotoDefinitionString( if (loc.start == loc.end) return null; const import_str = offsets.locToSlice(handle.tree.source, loc); - const uri = switch (pos_context) { + const uri: ?Uri = switch (pos_context) { .import_string_literal, .embedfile_string_literal, => try document_store.uriFromImportStr(arena, handle, import_str), - .cinclude_string_literal => try URI.fromPath( - arena, - blk: { - if (!DocumentStore.supports_build_system) return null; - - if (std.fs.path.isAbsolute(import_str)) break :blk import_str; - var include_dirs: std.ArrayList([]const u8) = .empty; - _ = document_store.collectIncludeDirs(arena, handle, &include_dirs) catch |err| { - log.err("failed to resolve include paths: {}", .{err}); - return null; - }; - for (include_dirs.items) |dir| { - const path = try std.fs.path.join(arena, &.{ dir, import_str }); - std.fs.accessAbsolute(path, .{}) catch continue; - break :blk path; - } + .cinclude_string_literal => blk: { + if (!DocumentStore.supports_build_system) return null; + + if (std.fs.path.isAbsolute(import_str)) break :blk try Uri.fromPath(arena, import_str); + var include_dirs: std.ArrayList([]const u8) = .empty; + _ = document_store.collectIncludeDirs(arena, handle, &include_dirs) catch |err| { + log.err("failed to resolve include paths: {}", .{err}); return null; - }, - ), + }; + for (include_dirs.items) |dir| { + const path = try std.fs.path.join(arena, &.{ dir, import_str }); + std.fs.accessAbsolute(path, .{}) catch continue; + break :blk try Uri.fromPath(arena, path); + } + return null; + }, else => unreachable, }; @@ -272,7 +269,7 @@ fn gotoDefinitionString( }; return .{ .originSelectionRange = offsets.locToRange(handle.tree.source, loc, offset_encoding), - .targetUri = uri orelse return null, + .targetUri = if (uri) |u| u.raw else return null, .targetRange = target_range, .targetSelectionRange = target_range, }; @@ -284,7 +281,11 @@ pub fn gotoHandler( kind: GotoKind, request: types.DefinitionParams, ) Server.Error!lsp.ResultType("textDocument/definition") { - const handle = server.document_store.getHandle(request.textDocument.uri) orelse return null; + const document_uri = Uri.parse(arena, request.textDocument.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(document_uri) orelse return null; if (handle.tree.mode == .zon) return null; var analyser = server.initAnalyser(arena, handle); diff --git a/src/features/hover.zig b/src/features/hover.zig index 51dcaf475..0c93278a0 100644 --- a/src/features/hover.zig +++ b/src/features/hover.zig @@ -156,7 +156,7 @@ fn hoverSymbolResolved( try output.print(arena, " | ", .{}); const source_index = ref.handle.tree.tokenStart(ref.token); const line = 1 + std.mem.count(u8, ref.handle.tree.source[0..source_index], "\n"); - try output.print(arena, "[{s}]({s}#L{d})", .{ ref.str, ref.handle.uri, line }); + try output.print(arena, "[{s}]({s}#L{d})", .{ ref.str, ref.handle.uri.raw, line }); } } else { try output.print(arena, "{s}", .{def_str}); diff --git a/src/features/references.zig b/src/features/references.zig index e8f605bf9..25c777b7a 100644 --- a/src/features/references.zig +++ b/src/features/references.zig @@ -8,6 +8,7 @@ const DocumentStore = @import("../DocumentStore.zig"); const Analyser = @import("../analysis.zig"); const lsp = @import("lsp"); const types = lsp.types; +const Uri = @import("../Uri.zig"); const offsets = @import("../offsets.zig"); const ast = @import("../ast.zig"); const tracy = @import("tracy"); @@ -36,7 +37,7 @@ fn labelReferences( if (include_decl) { // The first token is always going to be the label try locations.append(allocator, .{ - .uri = handle.uri, + .uri = handle.uri.raw, .range = offsets.tokenToRange(handle.tree, first_tok, encoding), }); } @@ -52,7 +53,7 @@ fn labelReferences( if (!std.mem.eql(u8, tree.tokenSlice(curr_tok + 2), tree.tokenSlice(first_tok))) continue; try locations.append(allocator, .{ - .uri = handle.uri, + .uri = handle.uri.raw, .range = offsets.tokenToRange(handle.tree, curr_tok + 2, encoding), }); } @@ -89,7 +90,7 @@ const Builder = struct { self.did_add_decl_handle = true; } try self.locations.append(self.allocator, .{ - .uri = handle.uri, + .uri = handle.uri.raw, .range = offsets.tokenToRange(handle.tree, token_index, self.encoding), }); } @@ -228,21 +229,21 @@ fn gatherReferences( builder: anytype, handle_behavior: enum { get, get_or_load }, ) !void { - var dependencies: std.StringArrayHashMapUnmanaged(void) = .empty; + var dependencies: Uri.ArrayHashMap(void) = .empty; defer { for (dependencies.keys()) |uri| { - allocator.free(uri); + uri.deinit(allocator); } dependencies.deinit(allocator); } for (analyser.store.handles.values()) |handle| { - if (skip_std_references and std.mem.indexOf(u8, handle.uri, "std") != null) { - if (!include_decl or !std.mem.eql(u8, handle.uri, curr_handle.uri)) + if (skip_std_references and DocumentStore.isInStd(handle.uri)) { + if (!include_decl or !handle.uri.eql(curr_handle.uri)) continue; } - var handle_dependencies: std.ArrayList([]const u8) = .empty; + var handle_dependencies: std.ArrayList(Uri) = .empty; defer handle_dependencies.deinit(allocator); try analyser.store.collectDependencies(allocator, handle, &handle_dependencies); @@ -250,13 +251,13 @@ fn gatherReferences( for (handle_dependencies.items) |uri| { const gop = dependencies.getOrPutAssumeCapacity(uri); if (gop.found_existing) { - allocator.free(uri); + uri.deinit(allocator); } } } for (dependencies.keys()) |uri| { - if (std.mem.eql(u8, uri, curr_handle.uri)) continue; + if (uri.eql(curr_handle.uri)) continue; const handle = switch (handle_behavior) { .get => analyser.store.getHandle(uri), .get_or_load => analyser.store.getOrLoadHandle(uri), @@ -397,7 +398,7 @@ const ControlFlowBuilder = struct { fn add(builder: *ControlFlowBuilder, token_index: Ast.TokenIndex) Error!void { const handle = builder.token_handle.handle; try builder.locations.append(builder.allocator, .{ - .uri = handle.uri, + .uri = handle.uri.raw, .range = offsets.tokenToRange(handle.tree, token_index, builder.encoding), }); } @@ -497,7 +498,7 @@ fn controlFlowReferences( } pub const Callsite = struct { - uri: []const u8, + uri: Uri, call_node: Ast.Node.Index, }; @@ -645,7 +646,11 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - const handle = server.document_store.getHandle(request.uri()) orelse return null; + const uri = Uri.parse(arena, request.uri()) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + const handle = server.document_store.getHandle(uri) orelse return null; if (handle.tree.mode == .zon) return null; const source_index = offsets.positionToIndex(handle.tree.source, request.position(), server.offset_encoding); @@ -732,9 +737,12 @@ pub fn referencesHandler(server: *Server, arena: std.mem.Allocator, request: Gen .references => return .{ .references = locations.items }, .highlight => { var highlights: std.ArrayList(types.DocumentHighlight) = try .initCapacity(arena, locations.items.len); - const uri = handle.uri; for (locations.items) |loc| { - if (!std.mem.eql(u8, loc.uri, uri)) continue; + const loc_uri = Uri.parse(arena, loc.uri) catch |err| switch (err) { + error.OutOfMemory => return error.OutOfMemory, + else => return error.InvalidParams, + }; + if (!loc_uri.eql(handle.uri)) continue; highlights.appendAssumeCapacity(.{ .range = loc.range, .kind = .Text, diff --git a/src/translate_c.zig b/src/translate_c.zig index 207a62b4c..ea1dab3ee 100644 --- a/src/translate_c.zig +++ b/src/translate_c.zig @@ -6,7 +6,7 @@ const DocumentStore = @import("DocumentStore.zig"); const ast = @import("ast.zig"); const tracy = @import("tracy"); const Ast = std.zig.Ast; -const URI = @import("uri.zig"); +const Uri = @import("Uri.zig"); const log = std.log.scoped(.translate_c); const OutMessage = std.zig.Client.Message; @@ -90,20 +90,20 @@ fn convertCIncludeInternal( pub const Result = union(enum) { // uri to the generated zig file - success: []const u8, + success: Uri, // zig translate-c failed with the given error messages failure: std.zig.ErrorBundle, pub fn deinit(self: *Result, allocator: std.mem.Allocator) void { switch (self.*) { - .success => |path| allocator.free(path), + .success => |uri| uri.deinit(allocator), .failure => |*bundle| bundle.deinit(allocator), } } }; /// takes a c header file and returns the result from calling `zig translate-c` -/// returns a URI to the generated zig file on success or the content of stderr on failure +/// returns a Uri to the generated zig file on success or the content of stderr on failure /// null indicates a failure which is automatically logged /// Caller owns returned memory. pub fn translate( @@ -234,7 +234,7 @@ pub fn translate( const result_path = try global_cache_dir.join(allocator, &.{ "o", &hex_result_path, "cimport.zig" }); defer allocator.free(result_path); - return .{ .success = try URI.fromPath(allocator, std.mem.sliceTo(result_path, '\n')) }; + return .{ .success = try .fromPath(allocator, std.mem.sliceTo(result_path, '\n')) }; }, .error_bundle => { const error_bundle_header = reader.takeStruct(InMessage.ErrorBundle, .little) catch return error.InvalidMessage; diff --git a/src/uri.zig b/src/uri.zig deleted file mode 100644 index 21a7d828a..000000000 --- a/src/uri.zig +++ /dev/null @@ -1,149 +0,0 @@ -const std = @import("std"); -const builtin = @import("builtin"); - -/// Converts a file system path to a Uri. -/// Caller owns the returned memory -pub fn fromPath(allocator: std.mem.Allocator, path: []const u8) error{OutOfMemory}![]u8 { - return try fromPathWithOs(allocator, path, builtin.os.tag == .windows); -} - -fn fromPathWithOs( - allocator: std.mem.Allocator, - path: []const u8, - comptime is_windows: bool, -) error{OutOfMemory}![]u8 { - var buf: std.ArrayList(u8) = try .initCapacity(allocator, path.len + 8); - errdefer buf.deinit(allocator); - - buf.appendSliceAssumeCapacity("file://"); - if (!std.mem.startsWith(u8, path, "/")) { - buf.appendAssumeCapacity('/'); - } - - var value = path; - - if (is_windows and - path.len >= 2 and - std.ascii.isAlphabetic(path[0]) and - path[1] == ':') - { - // convert windows drive letter to lower case - try buf.append(allocator, std.ascii.toLower(path[0])); - value = value[1..]; - } - - for (value) |c| { - if (is_windows and c == '\\') { - try buf.append(allocator, '/'); - continue; - } - switch (c) { - // zig fmt: off - 'A'...'Z', 'a'...'z', '0'...'9', - '-', '.', '_', '~', - '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', - '/', ':', '@', - // zig fmt: on - => try buf.append(allocator, c), - else => try buf.print(allocator, "%{X:0>2}", .{c}), - } - } - - return try buf.toOwnedSlice(allocator); -} - -test "fromPath (posix)" { - const uri = try fromPathWithOs(std.testing.allocator, "/home/main.zig", false); - defer std.testing.allocator.free(uri); - try std.testing.expectEqualStrings("file:///home/main.zig", uri); -} - -test "fromPath (windows)" { - const uri = try fromPathWithOs(std.testing.allocator, "C:/main.zig", true); - defer std.testing.allocator.free(uri); - try std.testing.expectEqualStrings("file:///c:/main.zig", uri); -} - -test "fromPath - preserve '\\' (posix)" { - const uri = try fromPathWithOs(std.testing.allocator, "/home\\main.zig", false); - defer std.testing.allocator.free(uri); - try std.testing.expectEqualStrings("file:///home%5Cmain.zig", uri); -} - -test "fromPath - convert '\\' to '/' (windows)" { - const uri = try fromPathWithOs(std.testing.allocator, "C:\\main.zig", true); - defer std.testing.allocator.free(uri); - try std.testing.expectEqualStrings("file:///c:/main.zig", uri); -} - -test "fromPath - windows like path on posix" { - const uri = try fromPathWithOs(std.testing.allocator, "/C:\\main.zig", false); - defer std.testing.allocator.free(uri); - try std.testing.expectEqualStrings("file:///C:%5Cmain.zig", uri); -} - -/// Converts a Uri to a file system path. -/// Caller owns the returned memory -pub fn toFsPath(allocator: std.mem.Allocator, raw_uri: []const u8) (std.Uri.ParseError || error{ UnsupportedScheme, OutOfMemory })![]u8 { - return try toFsPathWithOs(allocator, raw_uri, builtin.os.tag == .windows); -} - -fn toFsPathWithOs( - allocator: std.mem.Allocator, - raw_uri: []const u8, - comptime is_windows: bool, -) (std.Uri.ParseError || error{ UnsupportedScheme, OutOfMemory })![]u8 { - const uri: std.Uri = try .parse(raw_uri); - if (!std.mem.eql(u8, uri.scheme, "file")) return error.UnsupportedScheme; - - var aw: std.Io.Writer.Allocating = try .initCapacity(allocator, raw_uri.len); - uri.path.formatRaw(&aw.writer) catch unreachable; - var buf = aw.toArrayList(); - defer buf.deinit(allocator); - - if (is_windows and - buf.items.len >= 3 and - buf.items[0] == '/' and - std.ascii.isAlphabetic(buf.items[1]) and - buf.items[2] == ':') - { - // convert windows drive letter to lower case - buf.items[1] = std.ascii.toLower(buf.items[1]); - - // remove the extra slash - @memmove(buf.items[0 .. buf.items.len - 1], buf.items[1..]); - buf.items.len -= 1; - } - - if (is_windows) { - for (buf.items) |*c| { - if (c.* == '\\') c.* = '/'; - } - } - - return try buf.toOwnedSlice(allocator); -} - -test "toFsPath - convert percent encoded '\\' to '/' (windows)" { - const path = try toFsPathWithOs(std.testing.allocator, "file:///C:%5Cmain.zig", true); - defer std.testing.allocator.free(path); - try std.testing.expectEqualStrings("c:/main.zig", path); -} - -test "toFsPath - preserve percent encoded '\\' (posix)" { - const path = try toFsPathWithOs(std.testing.allocator, "file:///foo%5Cmain.zig", false); - defer std.testing.allocator.free(path); - try std.testing.expectEqualStrings("/foo\\main.zig", path); -} - -test "toFsPath - percent encoded drive letter (windows)" { - const path = try toFsPathWithOs(std.testing.allocator, "file:///%43%3a%5Cfoo\\main.zig", true); - defer std.testing.allocator.free(path); - try std.testing.expectEqualStrings("c:/foo/main.zig", path); -} - -test "toFsPath - windows like path on posix" { - const path = try toFsPathWithOs(std.testing.allocator, "file:///C:%5Cmain.zig", false); - defer std.testing.allocator.free(path); - try std.testing.expectEqualStrings("/C:\\main.zig", path); -} diff --git a/src/zls.zig b/src/zls.zig index 4ecc96665..864bcb37f 100644 --- a/src/zls.zig +++ b/src/zls.zig @@ -18,7 +18,7 @@ pub const Server = @import("Server.zig"); pub const snippets = @import("snippets.zig"); pub const testing = @import("testing.zig"); pub const translate_c = @import("translate_c.zig"); -pub const URI = @import("uri.zig"); +pub const Uri = @import("Uri.zig"); pub const code_actions = @import("features/code_actions.zig"); pub const completions = @import("features/completions.zig"); diff --git a/tests/analysis_check.zig b/tests/analysis_check.zig index f50c7bf58..720eab992 100644 --- a/tests/analysis_check.zig +++ b/tests/analysis_check.zig @@ -130,7 +130,7 @@ pub fn main() Error!void { std.debug.panic("failed to read from {s}: {}", .{ file_path, err }); defer gpa.free(source); - const handle_uri = try zls.URI.fromPath(arena, file_path); + const handle_uri: zls.Uri = try .fromPath(arena, file_path); try document_store.openLspSyncedDocument(handle_uri, source); const handle: *zls.DocumentStore.Handle = document_store.handles.get(handle_uri).?; diff --git a/tests/context.zig b/tests/context.zig index d3902f602..c13665606 100644 --- a/tests/context.zig +++ b/tests/context.zig @@ -82,34 +82,32 @@ pub const Context = struct { // helper pub fn addDocument(self: *Context, options: struct { - uri: ?[]const u8 = null, + use_file_scheme: bool = false, source: []const u8, mode: std.zig.Ast.Mode = .zig, - }) ![]const u8 { + }) !zls.Uri { const fmt = switch (builtin.os.tag) { - .windows => "c:/nonexistent/test-{d}.{t}", - else => "/nonexistent/test-{d}.{t}", + .windows => "file:/c:/Untitled-{d}.{t}", + else => "file:/Untitled-{d}.{t}", }; - const uri = options.uri orelse uri: { - const path = try std.fmt.allocPrint( - self.arena.allocator(), - fmt, - .{ self.file_id, options.mode }, - ); - break :uri try zls.URI.fromPath(self.arena.allocator(), path); - }; + const arena = self.arena.allocator(); + const path = if (options.use_file_scheme) + try std.fmt.allocPrint(arena, fmt, .{ self.file_id, options.mode }) + else + try std.fmt.allocPrint(arena, "untitled:/Untitled-{d}.{t}", .{ self.file_id, options.mode }); + const uri: zls.Uri = try .parse(arena, path); const params: types.DidOpenTextDocumentParams = .{ .textDocument = .{ - .uri = uri, + .uri = uri.raw, .languageId = "zig", .version = 420, .text = options.source, }, }; - _ = try self.server.sendNotificationSync(self.arena.allocator(), "textDocument/didOpen", params); + _ = try self.server.sendNotificationSync(arena, "textDocument/didOpen", params); self.file_id += 1; return uri; diff --git a/tests/language_features/cimport.zig b/tests/language_features/cimport.zig index 55c3af2fd..f27499cba 100644 --- a/tests/language_features/cimport.zig +++ b/tests/language_features/cimport.zig @@ -120,7 +120,7 @@ fn testTranslate(c_source: []const u8) !translate_c.Result { switch (result) { .success => |uri| { - const path = try zls.URI.toFsPath(allocator, uri); + const path = try uri.toFsPath(allocator); defer allocator.free(path); try std.testing.expect(std.fs.path.isAbsolute(path)); try std.fs.accessAbsolute(path, .{}); diff --git a/tests/lsp_features/code_actions.zig b/tests/lsp_features/code_actions.zig index 44b3bd59b..1f84ed751 100644 --- a/tests/lsp_features/code_actions.zig +++ b/tests/lsp_features/code_actions.zig @@ -998,7 +998,7 @@ fn testDiagnostic( const handle = ctx.server.document_store.getHandle(uri).?; const params: types.CodeActionParams = .{ - .textDocument = .{ .uri = uri }, + .textDocument = .{ .uri = uri.raw }, .range = range, .context = .{ .diagnostics = &.{}, @@ -1029,9 +1029,9 @@ fn testDiagnostic( const workspace_edit = code_action.edit.?; const changes = workspace_edit.changes.?.map; try std.testing.expectEqual(@as(usize, 1), changes.count()); - try std.testing.expect(changes.contains(uri)); + try std.testing.expect(changes.contains(uri.raw)); - try text_edits.appendSlice(allocator, changes.get(uri).?); + try text_edits.appendSlice(allocator, changes.get(uri.raw).?); } const actual = try zls.diff.applyTextEdits(allocator, source, text_edits.items, ctx.server.offset_encoding); diff --git a/tests/lsp_features/completion.zig b/tests/lsp_features/completion.zig index 5e7425de0..f82a9dcd9 100644 --- a/tests/lsp_features/completion.zig +++ b/tests/lsp_features/completion.zig @@ -1262,7 +1262,7 @@ test "namespace" { \\const bar = foo.; , &.{ .{ .label = "beta", .kind = .Function, .detail = "fn (_: anytype) void" }, - .{ .label = "gamma", .kind = .Function, .detail = "fn (_: test-0) void" }, + .{ .label = "gamma", .kind = .Function, .detail = "fn (_: Untitled-0) void" }, }); } @@ -1362,7 +1362,7 @@ test "struct" { , &.{ .{ .label = "alpha", .kind = .Function, .detail = "fn () void" }, .{ .label = "beta", .kind = .Function, .detail = "fn (_: anytype) void" }, - .{ .label = "gamma", .kind = .Function, .detail = "fn (_: test-0) void" }, + .{ .label = "gamma", .kind = .Function, .detail = "fn (_: Untitled-0) void" }, .{ .label = "Self", .kind = .Struct }, .{ .label = "bar", .kind = .Struct }, }); @@ -4282,7 +4282,7 @@ fn testCompletionWithOptions( const test_uri = try ctx.addDocument(.{ .source = text }); const params: types.CompletionParams = .{ - .textDocument = .{ .uri = test_uri }, + .textDocument = .{ .uri = test_uri.raw }, .position = offsets.indexToPosition(source, cursor_idx, ctx.server.offset_encoding), }; @@ -4314,7 +4314,7 @@ fn testCompletionWithOptions( defer error_builder.deinit(); errdefer error_builder.writeDebug(); - try error_builder.addFile(test_uri, text); + try error_builder.addFile(test_uri.raw, text); for (found.keys()) |label| { const actual_completion: types.CompletionItem = blk: { @@ -4332,7 +4332,7 @@ fn testCompletionWithOptions( }; if (actual_completion.kind == null or expected_completion.kind != actual_completion.kind.?) { - try error_builder.msgAtIndex("completion item '{s}' should be of kind '{t}' but was '{?t}'!", test_uri, cursor_idx, .err, .{ + try error_builder.msgAtIndex("completion item '{s}' should be of kind '{t}' but was '{?t}'!", test_uri.raw, cursor_idx, .err, .{ label, expected_completion.kind, if (actual_completion.kind) |kind| kind else null, @@ -4349,7 +4349,7 @@ fn testCompletionWithOptions( if (actual_doc != null and std.mem.eql(u8, expected_doc, actual_doc.?)) break :doc_blk; - try error_builder.msgAtIndex("completion item '{s}' should have doc '{f}' but was '{?f}'!", test_uri, cursor_idx, .err, .{ + try error_builder.msgAtIndex("completion item '{s}' should have doc '{f}' but was '{?f}'!", test_uri.raw, cursor_idx, .err, .{ label, std.zig.fmtString(expected_doc), if (actual_doc) |str| std.zig.fmtString(str) else null, @@ -4366,7 +4366,7 @@ fn testCompletionWithOptions( if (expected_completion.detail) |expected_detail| blk: { if (actual_completion.detail != null and std.mem.eql(u8, expected_detail, actual_completion.detail.?)) break :blk; - try error_builder.msgAtIndex("completion item '{s}' should have detail '{s}' but was '{?s}'!", test_uri, cursor_idx, .err, .{ + try error_builder.msgAtIndex("completion item '{s}' should have detail '{s}' but was '{?s}'!", test_uri.raw, cursor_idx, .err, .{ label, expected_detail, actual_completion.detail, @@ -4376,14 +4376,14 @@ fn testCompletionWithOptions( if (expected_completion.labelDetails) |expected_label_details| { const actual_label_details = actual_completion.labelDetails orelse { - try error_builder.msgAtIndex("expected label details on completion item '{s}'!", test_uri, cursor_idx, .err, .{label}); + try error_builder.msgAtIndex("expected label details on completion item '{s}'!", test_uri.raw, cursor_idx, .err, .{label}); return error.InvalidCompletionLabelDetails; }; const detail_ok = (expected_label_details.detail == null and actual_label_details.detail == null) or (expected_label_details.detail != null and actual_label_details.detail != null and std.mem.eql(u8, expected_label_details.detail.?, actual_label_details.detail.?)); if (!detail_ok) { - try error_builder.msgAtIndex("completion item '{s}' should have label detail '{?s}' but was '{?s}'!", test_uri, cursor_idx, .err, .{ + try error_builder.msgAtIndex("completion item '{s}' should have label detail '{?s}' but was '{?s}'!", test_uri.raw, cursor_idx, .err, .{ label, expected_label_details.detail, actual_label_details.detail, @@ -4395,7 +4395,7 @@ fn testCompletionWithOptions( (expected_label_details.description != null and actual_label_details.description != null and std.mem.eql(u8, expected_label_details.description.?, actual_label_details.description.?)); if (!description_ok) { - try error_builder.msgAtIndex("completion item '{s}' should have label detail description '{?s}' but was '{?s}'!", test_uri, cursor_idx, .err, .{ + try error_builder.msgAtIndex("completion item '{s}' should have label detail description '{?s}' but was '{?s}'!", test_uri.raw, cursor_idx, .err, .{ label, expected_label_details.description, actual_label_details.description, @@ -4412,7 +4412,7 @@ fn testCompletionWithOptions( std.debug.assert(actual_deprecated == (actual_completion.deprecated orelse false)); if (expected_completion.deprecated == actual_deprecated) break :blk; - try error_builder.msgAtIndex("completion item '{s}' should {s} be marked as deprecated but {s}!", test_uri, cursor_idx, .err, .{ + try error_builder.msgAtIndex("completion item '{s}' should {s} be marked as deprecated but {s}!", test_uri.raw, cursor_idx, .err, .{ label, if (expected_completion.deprecated) "" else "not", if (actual_deprecated) "was" else "wasn't", @@ -4428,7 +4428,7 @@ fn testCompletionWithOptions( try printLabels(&buffer, found, "found"); try printLabels(&buffer, missing, "missing"); try printLabels(&buffer, unexpected, "unexpected"); - try error_builder.msgAtIndex("invalid completions\n{s}", test_uri, cursor_idx, .err, .{buffer.items}); + try error_builder.msgAtIndex("invalid completions\n{s}", test_uri.raw, cursor_idx, .err, .{buffer.items}); return error.MissingOrUnexpectedCompletions; } } @@ -4520,7 +4520,7 @@ fn testCompletionTextEdit( const cursor_position = offsets.indexToPosition(options.source, cursor_idx, ctx.server.offset_encoding); const params: types.CompletionParams = .{ - .textDocument = .{ .uri = test_uri }, + .textDocument = .{ .uri = test_uri.raw }, .position = cursor_position, }; diff --git a/tests/lsp_features/definition.zig b/tests/lsp_features/definition.zig index a6a768e5d..92d4e7671 100644 --- a/tests/lsp_features/definition.zig +++ b/tests/lsp_features/definition.zig @@ -354,7 +354,7 @@ fn testDefinition(source: []const u8) !void { defer error_builder.deinit(); errdefer error_builder.writeDebug(); - try error_builder.addFile(test_uri, phr.new_source); + try error_builder.addFile(test_uri.raw, phr.new_source); try error_builder.addFile("old_source", source); try error_builder.addFile("new_source", phr.new_source); @@ -410,9 +410,9 @@ fn testDefinition(source: []const u8) !void { const cursor_position = offsets.indexToPosition(phr.new_source, cursor_index, ctx.server.offset_encoding); - const declaration_params: types.DeclarationParams = .{ .textDocument = .{ .uri = test_uri }, .position = cursor_position }; - const definition_params: types.DefinitionParams = .{ .textDocument = .{ .uri = test_uri }, .position = cursor_position }; - const type_definition_params: types.TypeDefinitionParams = .{ .textDocument = .{ .uri = test_uri }, .position = cursor_position }; + const declaration_params: types.DeclarationParams = .{ .textDocument = .{ .uri = test_uri.raw }, .position = cursor_position }; + const definition_params: types.DefinitionParams = .{ .textDocument = .{ .uri = test_uri.raw }, .position = cursor_position }; + const type_definition_params: types.TypeDefinitionParams = .{ .textDocument = .{ .uri = test_uri.raw }, .position = cursor_position }; const maybe_declaration_response = if (declaration_loc != null) try ctx.server.sendRequestSync(ctx.arena.allocator(), "textDocument/declaration", declaration_params) @@ -432,71 +432,71 @@ fn testDefinition(source: []const u8) !void { if (maybe_declaration_response) |response| { try std.testing.expect(response == .array_of_DeclarationLink); try std.testing.expect(response.array_of_DeclarationLink.len == 1); - try std.testing.expectEqualStrings(test_uri, response.array_of_DeclarationLink[0].targetUri); + try std.testing.expectEqualStrings(test_uri.raw, response.array_of_DeclarationLink[0].targetUri); const actual_loc = offsets.rangeToLoc(phr.new_source, response.array_of_DeclarationLink[0].targetSelectionRange, ctx.server.offset_encoding); if (declaration_loc) |expected_loc| { if (!std.meta.eql(expected_loc, actual_loc)) { - try error_builder.msgAtLoc("expected declaration here!", test_uri, expected_loc, .err, .{}); - try error_builder.msgAtLoc("actual declaration here", test_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("expected declaration here!", test_uri.raw, expected_loc, .err, .{}); + try error_builder.msgAtLoc("actual declaration here", test_uri.raw, actual_loc, .err, .{}); } } const actual_origin_loc = offsets.rangeToLoc(phr.new_source, response.array_of_DeclarationLink[0].originSelectionRange.?, ctx.server.offset_encoding); if (origin_loc) |expected_origin_loc| { if (!std.meta.eql(expected_origin_loc, actual_origin_loc)) { - try error_builder.msgAtLoc("expected declaration origin here!", test_uri, expected_origin_loc, .err, .{}); - try error_builder.msgAtLoc("actual declaration origin here", test_uri, actual_origin_loc, .err, .{}); + try error_builder.msgAtLoc("expected declaration origin here!", test_uri.raw, expected_origin_loc, .err, .{}); + try error_builder.msgAtLoc("actual declaration origin here", test_uri.raw, actual_origin_loc, .err, .{}); } } } else if (declaration_loc) |expected_loc| { - try error_builder.msgAtLoc("expected declaration here but got no result instead!", test_uri, expected_loc, .err, .{}); + try error_builder.msgAtLoc("expected declaration here but got no result instead!", test_uri.raw, expected_loc, .err, .{}); } if (maybe_definition_response) |response| { try std.testing.expect(response == .array_of_DefinitionLink); try std.testing.expect(response.array_of_DefinitionLink.len == 1); - try std.testing.expectEqualStrings(test_uri, response.array_of_DefinitionLink[0].targetUri); + try std.testing.expectEqualStrings(test_uri.raw, response.array_of_DefinitionLink[0].targetUri); const actual_loc = offsets.rangeToLoc(phr.new_source, response.array_of_DefinitionLink[0].targetSelectionRange, ctx.server.offset_encoding); if (definition_loc) |expected_loc| { if (!std.meta.eql(expected_loc, actual_loc)) { - try error_builder.msgAtLoc("expected definition here!", test_uri, expected_loc, .err, .{}); - try error_builder.msgAtLoc("actual definition here", test_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("expected definition here!", test_uri.raw, expected_loc, .err, .{}); + try error_builder.msgAtLoc("actual definition here", test_uri.raw, actual_loc, .err, .{}); } } const actual_origin_loc = offsets.rangeToLoc(phr.new_source, response.array_of_DefinitionLink[0].originSelectionRange.?, ctx.server.offset_encoding); if (origin_loc) |expected_origin_loc| { if (!std.meta.eql(expected_origin_loc, actual_origin_loc)) { - try error_builder.msgAtLoc("expected definition origin here!", test_uri, expected_origin_loc, .err, .{}); - try error_builder.msgAtLoc("actual definition origin here", test_uri, actual_origin_loc, .err, .{}); + try error_builder.msgAtLoc("expected definition origin here!", test_uri.raw, expected_origin_loc, .err, .{}); + try error_builder.msgAtLoc("actual definition origin here", test_uri.raw, actual_origin_loc, .err, .{}); } } } else if (definition_loc) |expected_loc| { - try error_builder.msgAtLoc("expected definition here but got no result instead!", test_uri, expected_loc, .err, .{}); + try error_builder.msgAtLoc("expected definition here but got no result instead!", test_uri.raw, expected_loc, .err, .{}); } if (maybe_type_definition_response) |response| { try std.testing.expect(response == .array_of_DefinitionLink); try std.testing.expect(response.array_of_DefinitionLink.len == 1); - try std.testing.expectEqualStrings(test_uri, response.array_of_DefinitionLink[0].targetUri); + try std.testing.expectEqualStrings(test_uri.raw, response.array_of_DefinitionLink[0].targetUri); const actual_loc = offsets.rangeToLoc(phr.new_source, response.array_of_DefinitionLink[0].targetSelectionRange, ctx.server.offset_encoding); if (type_definition_loc) |expected_loc| { if (!std.meta.eql(expected_loc, actual_loc)) { - try error_builder.msgAtLoc("expected type definition here!", test_uri, expected_loc, .err, .{}); - try error_builder.msgAtLoc("actual type definition here", test_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("expected type definition here!", test_uri.raw, expected_loc, .err, .{}); + try error_builder.msgAtLoc("actual type definition here", test_uri.raw, actual_loc, .err, .{}); } } const actual_origin_loc = offsets.rangeToLoc(phr.new_source, response.array_of_DefinitionLink[0].originSelectionRange.?, ctx.server.offset_encoding); if (origin_loc) |expected_origin_loc| { if (!std.meta.eql(expected_origin_loc, actual_origin_loc)) { - try error_builder.msgAtLoc("expected type definition origin here!", test_uri, expected_origin_loc, .err, .{}); - try error_builder.msgAtLoc("actual type definition origin here", test_uri, actual_origin_loc, .err, .{}); + try error_builder.msgAtLoc("expected type definition origin here!", test_uri.raw, expected_origin_loc, .err, .{}); + try error_builder.msgAtLoc("actual type definition origin here", test_uri.raw, actual_origin_loc, .err, .{}); } } } else if (type_definition_loc) |expected_loc| { - try error_builder.msgAtLoc("expected type definition here but got no result instead!", test_uri, expected_loc, .err, .{}); + try error_builder.msgAtLoc("expected type definition here but got no result instead!", test_uri.raw, expected_loc, .err, .{}); } if (error_builder.hasMessages()) { - try error_builder.msgAtIndex("cursor position here", test_uri, cursor_index, .info, .{}); + try error_builder.msgAtIndex("cursor position here", test_uri.raw, cursor_index, .info, .{}); return error.InvalidResponse; } } diff --git a/tests/lsp_features/diagnostics.zig b/tests/lsp_features/diagnostics.zig index 49396553f..9aba5f1e5 100644 --- a/tests/lsp_features/diagnostics.zig +++ b/tests/lsp_features/diagnostics.zig @@ -143,7 +143,7 @@ test "autofix comment" { .relatedInformation = &.{ .{ .location = .{ - .uri = "file:///test.zig", + .uri = "untitled:/Untitled-0.zig", .range = .{ .start = .{ .line = 1, .character = 10 }, .end = .{ .line = 1, .character = 13 }, @@ -169,7 +169,6 @@ fn testDiagnostics( defer context.deinit(); const uri = try context.addDocument(.{ - .uri = "file:///test.zig", .source = source, }); diff --git a/tests/lsp_features/document_symbol.zig b/tests/lsp_features/document_symbol.zig index abee529fa..04985231a 100644 --- a/tests/lsp_features/document_symbol.zig +++ b/tests/lsp_features/document_symbol.zig @@ -100,7 +100,7 @@ fn testDocumentSymbol(source: []const u8, expected: []const u8) !void { const test_uri = try ctx.addDocument(.{ .source = source }); const params: types.DocumentSymbolParams = .{ - .textDocument = .{ .uri = test_uri }, + .textDocument = .{ .uri = test_uri.raw }, }; const response = try ctx.server.sendRequestSync(ctx.arena.allocator(), "textDocument/documentSymbol", params) orelse { diff --git a/tests/lsp_features/folding_range.zig b/tests/lsp_features/folding_range.zig index b8308f6a8..440a54b7d 100644 --- a/tests/lsp_features/folding_range.zig +++ b/tests/lsp_features/folding_range.zig @@ -304,7 +304,7 @@ fn testFoldingRange(source: []const u8, expect: []const types.FoldingRange) !voi const test_uri = try ctx.addDocument(.{ .source = source }); - const params: types.FoldingRangeParams = .{ .textDocument = .{ .uri = test_uri } }; + const params: types.FoldingRangeParams = .{ .textDocument = .{ .uri = test_uri.raw } }; const response = try ctx.server.sendRequestSync(ctx.arena.allocator(), "textDocument/foldingRange", params) orelse { std.debug.print("Server returned `null` as the result\n", .{}); diff --git a/tests/lsp_features/hover.zig b/tests/lsp_features/hover.zig index 4d2b17696..95ff7a6df 100644 --- a/tests/lsp_features/hover.zig +++ b/tests/lsp_features/hover.zig @@ -334,10 +334,10 @@ test "root struct" { \\const foo: @This() = .{} \\``` \\```zig - \\(test) + \\(Untitled-0) \\``` \\ - \\Go to [test](file:///test.zig#L1) + \\Go to [Untitled-0](untitled:/Untitled-0.zig#L1) ); } @@ -353,7 +353,7 @@ test "inferred struct init" { \\(type) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); try testHover( \\const S = struct { foo: u32 }; @@ -367,7 +367,7 @@ test "inferred struct init" { \\(type) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); } @@ -385,7 +385,7 @@ test "decl literal" { \\(S) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); try testHover( \\const S = struct { @@ -410,7 +410,7 @@ test "decl literal function" { \\(fn () S) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); try testHover( @@ -428,7 +428,7 @@ test "decl literal function" { \\(fn () !S) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); try testHover( \\const Inner = struct { @@ -448,7 +448,7 @@ test "decl literal function" { \\(fn () Inner) \\``` \\ - \\Go to [Inner](file:///test.zig#L1) + \\Go to [Inner](untitled:/Untitled-0.zig#L1) ); } @@ -471,7 +471,7 @@ test "decl literal on generic type" { \\(Box(u8)) \\``` \\ - \\Go to [Box](file:///test.zig#L1) + \\Go to [Box](untitled:/Untitled-0.zig#L1) ); } @@ -495,7 +495,7 @@ test "decl literal on generic type - alias" { \\(Box(u8)) \\``` \\ - \\Go to [Box](file:///test.zig#L1) + \\Go to [Box](untitled:/Untitled-0.zig#L1) ); } @@ -585,7 +585,7 @@ test "enum member" { \\(Enum) \\``` \\ - \\Go to [Enum](file:///test.zig#L1) + \\Go to [Enum](untitled:/Untitled-0.zig#L1) ); } @@ -607,7 +607,7 @@ test "generic type" { \\(GenericType(StructType,EnumType)) \\``` \\ - \\Go to [GenericType](file:///test.zig#L3) | [StructType](file:///test.zig#L1) | [EnumType](file:///test.zig#L2) + \\Go to [GenericType](untitled:/Untitled-0.zig#L3) | [StructType](untitled:/Untitled-0.zig#L1) | [EnumType](untitled:/Untitled-0.zig#L2) ); } @@ -652,7 +652,7 @@ test "enum literal" { \\(E) \\``` \\ - \\Go to [E](file:///test.zig#L1) + \\Go to [E](untitled:/Untitled-0.zig#L1) ); } @@ -671,7 +671,7 @@ test "function" { \\(fn (A, B) error{A,B}!C) \\``` \\ - \\Go to [A](file:///test.zig#L1) | [B](file:///test.zig#L2) | [C](file:///test.zig#L3) + \\Go to [A](untitled:/Untitled-0.zig#L1) | [B](untitled:/Untitled-0.zig#L2) | [C](untitled:/Untitled-0.zig#L3) ); try testHover( \\const S = struct { a: i32 }; @@ -685,7 +685,7 @@ test "function" { \\(fn (S, S) error{A,B}!S) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); try testHover( \\const E = error { A, B, C }; @@ -768,7 +768,7 @@ test "optional" { \\(?S) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); } @@ -785,7 +785,7 @@ test "error union" { \\(error{A,B}!S) \\``` \\ - \\Go to [S](file:///test.zig#L1) + \\Go to [S](untitled:/Untitled-0.zig#L1) ); } @@ -1130,7 +1130,7 @@ test "escaped identifier in enum literal" { \\(E) \\``` \\ - \\Go to [E](file:///test.zig#L1) + \\Go to [E](untitled:/Untitled-0.zig#L1) , .{ .highlight = "@\"hello world\"", .markup_kind = .markdown, @@ -1369,7 +1369,6 @@ fn testHoverWithOptions( const arena = ctx.arena.allocator(); const uri = try ctx.addDocument(.{ - .uri = "file:///test.zig", .source = text, }); const handle = server.document_store.getHandle(uri).?; diff --git a/tests/lsp_features/inlay_hints.zig b/tests/lsp_features/inlay_hints.zig index d3b378f0b..b14058b97 100644 --- a/tests/lsp_features/inlay_hints.zig +++ b/tests/lsp_features/inlay_hints.zig @@ -614,7 +614,7 @@ fn testInlayHints(source: []const u8, options: Options) !void { }; const params: types.InlayHintParams = .{ - .textDocument = .{ .uri = test_uri }, + .textDocument = .{ .uri = test_uri.raw }, .range = range, }; const response = try ctx.server.sendRequestSync(ctx.arena.allocator(), "textDocument/inlayHint", params); @@ -631,7 +631,7 @@ fn testInlayHints(source: []const u8, options: Options) !void { defer error_builder.deinit(); errdefer error_builder.writeDebug(); - try error_builder.addFile(test_uri, phr.new_source); + try error_builder.addFile(test_uri.raw, phr.new_source); outer: for (phr.locations.items(.old), phr.locations.items(.new)) |old_loc, new_loc| { const expected_name = offsets.locToSlice(source, old_loc); @@ -644,7 +644,7 @@ fn testInlayHints(source: []const u8, options: Options) !void { try std.testing.expectEqual(options.kind, hint.kind.?); if (visited.isSet(i)) { - try error_builder.msgAtIndex("duplicate inlay hint here!", test_uri, new_loc.start, .err, .{}); + try error_builder.msgAtIndex("duplicate inlay hint here!", test_uri.raw, new_loc.start, .err, .{}); continue :outer; } else { visited.set(i); @@ -653,14 +653,14 @@ fn testInlayHints(source: []const u8, options: Options) !void { const actual_label = switch (options.kind) { .Parameter => blk: { if (!std.mem.endsWith(u8, hint.label.string, ":")) { - try error_builder.msgAtLoc("label `{s}` must end with a colon!", test_uri, new_loc, .err, .{hint.label.string}); + try error_builder.msgAtLoc("label `{s}` must end with a colon!", test_uri.raw, new_loc, .err, .{hint.label.string}); continue :outer; } break :blk hint.label.string[0 .. hint.label.string.len - 1]; }, .Type => blk: { if (!std.mem.startsWith(u8, hint.label.string, ": ")) { - try error_builder.msgAtLoc("label `{s}` must start with \": \"!", test_uri, new_loc, .err, .{hint.label.string}); + try error_builder.msgAtLoc("label `{s}` must start with \": \"!", test_uri.raw, new_loc, .err, .{hint.label.string}); continue :outer; } break :blk hint.label.string[2..hint.label.string.len]; @@ -669,12 +669,12 @@ fn testInlayHints(source: []const u8, options: Options) !void { }; if (!std.mem.eql(u8, expected_label, actual_label)) { - try error_builder.msgAtLoc("expected label `{s}` here but got `{s}`!", test_uri, new_loc, .err, .{ expected_label, actual_label }); + try error_builder.msgAtLoc("expected label `{s}` here but got `{s}`!", test_uri.raw, new_loc, .err, .{ expected_label, actual_label }); } continue :outer; } - try error_builder.msgAtLoc("expected hint `{s}` here", test_uri, new_loc, .err, .{expected_label}); + try error_builder.msgAtLoc("expected hint `{s}` here", test_uri.raw, new_loc, .err, .{expected_label}); } var it = visited.iterator(.{ .kind = .unset }); @@ -682,7 +682,7 @@ fn testInlayHints(source: []const u8, options: Options) !void { const hint = hints[index]; try std.testing.expectEqual(options.kind, hint.kind.?); const source_index = offsets.positionToIndex(phr.new_source, hint.position, ctx.server.offset_encoding); - try error_builder.msgAtIndex("unexpected inlay hint `{s}` here!", test_uri, source_index, .err, .{hint.label.string}); + try error_builder.msgAtIndex("unexpected inlay hint `{s}` here!", test_uri.raw, source_index, .err, .{hint.label.string}); } if (error_builder.hasMessages()) return error.InvalidResponse; diff --git a/tests/lsp_features/references.zig b/tests/lsp_features/references.zig index 51296688a..a375921d6 100644 --- a/tests/lsp_features/references.zig +++ b/tests/lsp_features/references.zig @@ -307,7 +307,7 @@ test "cross-file reference" { // for now this only tests the ability to find references within a file to a decl from another file \\pub const placeholder = struct {}; , - \\const file = @import("test-0.zig"); + \\const file = @import("Untitled-0.zig"); \\const first = file.<0>; \\const second = file.<0>; , @@ -318,7 +318,7 @@ fn testSymbolReferences(source: []const u8) !void { return testMultiFileSymbolReferences(&.{source}, true); } -/// source files have the following name pattern: `test-{d}.zig` +/// source files have the following name pattern: `untitled-{d}.zig` fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool) !void { const placeholder_name = "placeholder"; @@ -345,8 +345,11 @@ fn testMultiFileSymbolReferences(sources: []const []const u8, include_decl: bool var phr = try helper.collectReplacePlaceholders(allocator, source, placeholder_name); defer phr.deinit(allocator); - const uri = try ctx.addDocument(.{ .source = phr.new_source }); - files.putAssumeCapacityNoClobber(uri, .{ .source = source, .new_source = phr.new_source }); + const uri = try ctx.addDocument(.{ + .use_file_scheme = sources.len > 1, // use 'file:/' scheme when testing with multiple files so that they can import each other + .source = phr.new_source, + }); + files.putAssumeCapacityNoClobber(uri.raw, .{ .source = source, .new_source = phr.new_source }); phr.new_source = ""; // `files` takes ownership of `new_source` from `phr` for (phr.locations.items(.old), phr.locations.items(.new)) |old, new| { @@ -638,11 +641,11 @@ fn testSimpleReferences(source: []const u8) !void { defer error_builder.deinit(); errdefer error_builder.writeDebug(); - try error_builder.addFile(file_uri, phr.new_source); - try error_builder.msgAtIndex("requested references here", file_uri, cursor_index, .info, .{}); + try error_builder.addFile(file_uri.raw, phr.new_source); + try error_builder.msgAtIndex("requested references here", file_uri.raw, cursor_index, .info, .{}); const params: types.ReferenceParams = .{ - .textDocument = .{ .uri = file_uri }, + .textDocument = .{ .uri = file_uri.raw }, .position = offsets.indexToPosition(phr.new_source, cursor_index, ctx.server.offset_encoding), .context = .{ .includeDeclaration = false }, }; @@ -657,7 +660,7 @@ fn testSimpleReferences(source: []const u8) !void { defer visited.deinit(allocator); for (actual_locations) |response_location| { - std.debug.assert(std.mem.eql(u8, response_location.uri, file_uri)); + std.debug.assert(std.mem.eql(u8, response_location.uri, file_uri.raw)); const actual_loc = offsets.rangeToLoc(phr.new_source, response_location.range, ctx.server.offset_encoding); const index = found_index: { @@ -666,12 +669,12 @@ fn testSimpleReferences(source: []const u8) !void { if (expected_loc.end != actual_loc.end) continue; break :found_index idx; } - try error_builder.msgAtLoc("server returned unexpected reference!", file_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("server returned unexpected reference!", file_uri.raw, actual_loc, .err, .{}); return error.UnexpectedReference; }; if (visited.isSet(index)) { - try error_builder.msgAtLoc("server returned duplicate reference!", file_uri, actual_loc, .err, .{}); + try error_builder.msgAtLoc("server returned duplicate reference!", file_uri.raw, actual_loc, .err, .{}); return error.DuplicateReference; } else { visited.set(index); @@ -682,7 +685,7 @@ fn testSimpleReferences(source: []const u8) !void { var unvisited_it = visited.iterator(.{ .kind = .unset }); while (unvisited_it.next()) |index| { const unvisited_loc = expected_locations.items[index]; - try error_builder.msgAtLoc("expected reference here!", file_uri, unvisited_loc, .err, .{}); + try error_builder.msgAtLoc("expected reference here!", file_uri.raw, unvisited_loc, .err, .{}); has_unvisited = true; } diff --git a/tests/lsp_features/selection_range.zig b/tests/lsp_features/selection_range.zig index eba752f53..4331c78eb 100644 --- a/tests/lsp_features/selection_range.zig +++ b/tests/lsp_features/selection_range.zig @@ -41,7 +41,7 @@ fn testSelectionRange(source: []const u8, want: []const []const u8) !void { const position = offsets.locToRange(phr.new_source, phr.locations.items(.new)[0], .@"utf-16").start; const params: types.SelectionRangeParams = .{ - .textDocument = .{ .uri = test_uri }, + .textDocument = .{ .uri = test_uri.raw }, .positions = &.{position}, }; const response = try ctx.server.sendRequestSync(ctx.arena.allocator(), "textDocument/selectionRange", params); diff --git a/tests/lsp_features/semantic_tokens.zig b/tests/lsp_features/semantic_tokens.zig index 6004d79bb..8981cdc00 100644 --- a/tests/lsp_features/semantic_tokens.zig +++ b/tests/lsp_features/semantic_tokens.zig @@ -2111,7 +2111,7 @@ fn testSemanticTokensOptions( }); const params: types.SemanticTokensParams = .{ - .textDocument = .{ .uri = uri }, + .textDocument = .{ .uri = uri.raw }, }; const response = try ctx.server.sendRequestSync(ctx.arena.allocator(), "textDocument/semanticTokens/full", params) orelse { std.debug.print("Server returned `null` as the result\n", .{}); @@ -2125,14 +2125,14 @@ fn testSemanticTokensOptions( defer error_builder.deinit(); errdefer error_builder.writeDebug(); - try error_builder.addFile(uri, source); + try error_builder.addFile(uri.raw, source); var token_it: TokenIterator = .init(source, actual); var last_token_index: usize = 0; // should only be used for error messages for (expected_tokens) |expected_token| { const token = token_it.next() orelse { - try error_builder.msgAtIndex("expected a `{s}` token here", uri, last_token_index, .err, .{expected_token.@"0"}); + try error_builder.msgAtIndex("expected a `{s}` token here", uri.raw, last_token_index, .err, .{expected_token.@"0"}); return error.ExpectedToken; }; last_token_index = if (options.overlapping_token_support) token.loc.start else token.loc.end; @@ -2144,19 +2144,19 @@ fn testSemanticTokensOptions( const expected_token_modifiers = expected_token.@"2"; if (!std.mem.eql(u8, expected_token_source, token_source)) { - try error_builder.msgAtLoc("expected `{s}` as the next token but got `{s}` here", uri, token.loc, .err, .{ expected_token_source, token_source }); + try error_builder.msgAtLoc("expected `{s}` as the next token but got `{s}` here", uri.raw, token.loc, .err, .{ expected_token_source, token_source }); return error.UnexpectedTokenContent; } else if (expected_token_type != token.type) { - try error_builder.msgAtLoc("expected token type `{t}` but got `{t}`", uri, token.loc, .err, .{ expected_token_type, token.type }); + try error_builder.msgAtLoc("expected token type `{t}` but got `{t}`", uri.raw, token.loc, .err, .{ expected_token_type, token.type }); return error.UnexpectedTokenType; } else if (!std.meta.eql(expected_token_modifiers, token.modifiers)) { - try error_builder.msgAtLoc("expected token modifiers `{f}` but got `{f}`", uri, token.loc, .err, .{ expected_token_modifiers, token.modifiers }); + try error_builder.msgAtLoc("expected token modifiers `{f}` but got `{f}`", uri.raw, token.loc, .err, .{ expected_token_modifiers, token.modifiers }); return error.UnexpectedTokenModifiers; } } if (token_it.next()) |unexpected_token| { - try error_builder.msgAtLoc("unexpected `{}` token here", uri, unexpected_token.loc, .err, .{unexpected_token.type}); + try error_builder.msgAtLoc("unexpected `{}` token here", uri.raw, unexpected_token.loc, .err, .{unexpected_token.type}); return error.UnexpectedToken; } } diff --git a/tests/lsp_features/signature_help.zig b/tests/lsp_features/signature_help.zig index b9375c184..af0f365c8 100644 --- a/tests/lsp_features/signature_help.zig +++ b/tests/lsp_features/signature_help.zig @@ -362,7 +362,7 @@ fn testSignatureHelp(source: []const u8, expected_label: []const u8, expected_ac const test_uri = try ctx.addDocument(.{ .source = text }); const params: types.SignatureHelpParams = .{ - .textDocument = .{ .uri = test_uri }, + .textDocument = .{ .uri = test_uri.raw }, .position = offsets.indexToPosition(text, cursor_idx, ctx.server.offset_encoding), };