Skip to content

Commit d6b1890

Browse files
committed
remove document store garbage collection
1 parent 43c234c commit d6b1890

File tree

2 files changed

+29
-188
lines changed

2 files changed

+29
-188
lines changed

src/DocumentStore.zig

Lines changed: 27 additions & 186 deletions
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ pub const Handle = struct {
230230
const Status = packed struct(u32) {
231231
/// `true` if the document has been directly opened by the client i.e. with `textDocument/didOpen`
232232
/// `false` indicates the document only exists because it is a dependency of another document
233-
/// or has been closed with `textDocument/didClose` and is awaiting cleanup through `garbageCollection`
233+
/// or has been closed with `textDocument/didClose`.
234234
lsp_synced: bool = false,
235235
/// true if a thread has acquired the permission to compute the `DocumentScope`
236236
/// all other threads will wait until the given thread has computed the `DocumentScope` before reading it.
@@ -743,22 +743,17 @@ pub fn closeLspSyncedDocument(self: *DocumentStore, uri: Uri) void {
743743
const tracy_zone = tracy.trace(@src());
744744
defer tracy_zone.end();
745745

746-
// instead of destroying the handle here we just mark it not lsp synced
747-
// and let it be destroy by the garbage collection code
748-
749-
const handle = self.handles.get(uri) orelse {
746+
const kv = self.handles.fetchSwapRemove(uri) orelse {
750747
log.warn("Document not found: {s}", .{uri});
751748
return;
752749
};
753-
if (!handle.setLspSynced(false)) {
750+
if (!kv.value.isLspSynced()) {
754751
log.warn("Document already closed: {s}", .{uri});
755752
}
756753

757-
self.garbageCollectionImports() catch {};
758-
if (supports_build_system) {
759-
self.garbageCollectionCImports() catch {};
760-
self.garbageCollectionBuildFiles() catch {};
761-
}
754+
self.allocator.free(kv.key);
755+
kv.value.deinit();
756+
self.allocator.destroy(kv.value);
762757
}
763758

764759
/// Updates a document that is synced over the LSP protocol (`textDocument/didChange`).
@@ -781,19 +776,27 @@ pub fn refreshLspSyncedDocument(self: *DocumentStore, uri: Uri, new_text: [:0]co
781776

782777
/// Refreshes a document from the file system, unless said document is synced over the LSP protocol.
783778
/// **Not thread safe**
784-
pub fn refreshDocumentFromFileSystem(self: *DocumentStore, uri: Uri) !bool {
779+
pub fn refreshDocumentFromFileSystem(self: *DocumentStore, uri: Uri, should_delete: bool) !bool {
785780
const tracy_zone = tracy.trace(@src());
786781
defer tracy_zone.end();
787782

788-
const index = self.handles.getIndex(uri) orelse return false;
789-
const handle = self.handles.values()[index];
790-
if (handle.isLspSynced()) return false;
783+
if (should_delete) {
784+
const index = self.handles.getIndex(uri) orelse return false;
785+
const handle = self.handles.values()[index];
786+
if (handle.isLspSynced()) return false;
787+
788+
self.handles.swapRemoveAt(index);
789+
handle.deinit();
790+
self.allocator.destroy(handle);
791+
self.allocator.free(handle.uri);
792+
} else {
793+
if (self.handles.get(uri)) |handle| {
794+
if (handle.isLspSynced()) return false;
795+
}
796+
const file_contents = self.readFile(uri) orelse return false;
797+
_ = try self.createAndStoreDocument(uri, file_contents, false);
798+
}
791799

792-
log.debug("Closing document {s}", .{handle.uri});
793-
self.handles.swapRemoveAt(index);
794-
handle.deinit();
795-
self.allocator.destroy(handle);
796-
self.allocator.free(handle.uri);
797800
return true;
798801
}
799802

@@ -984,150 +987,6 @@ fn invalidateBuildFileWorker(self: *DocumentStore, build_file: *BuildFile) void
984987
}
985988
}
986989

987-
/// The `DocumentStore` represents a graph structure where every
988-
/// handle/document is a node and every `@import` and `@cImport` represent
989-
/// a directed edge.
990-
/// We can remove every document which cannot be reached from
991-
/// another document that is `lsp_sycned` (see `Handle.Status.lsp_sycned`)
992-
/// **Not thread safe** requires access to `DocumentStore.handles`, `DocumentStore.cimports` and `DocumentStore.build_files`
993-
fn garbageCollectionImports(self: *DocumentStore) error{OutOfMemory}!void {
994-
const tracy_zone = tracy.trace(@src());
995-
defer tracy_zone.end();
996-
997-
var arena: std.heap.ArenaAllocator = .init(self.allocator);
998-
defer arena.deinit();
999-
1000-
var reachable: std.DynamicBitSetUnmanaged = try .initEmpty(arena.allocator(), self.handles.count());
1001-
1002-
var queue: std.ArrayListUnmanaged(Uri) = .empty;
1003-
1004-
for (self.handles.values(), 0..) |handle, handle_index| {
1005-
if (!handle.getStatus().lsp_synced) continue;
1006-
reachable.set(handle_index);
1007-
1008-
try self.collectDependenciesInternal(arena.allocator(), handle, &queue, false);
1009-
}
1010-
1011-
while (queue.pop()) |uri| {
1012-
const handle_index = self.handles.getIndex(uri) orelse continue;
1013-
if (reachable.isSet(handle_index)) continue;
1014-
reachable.set(handle_index);
1015-
1016-
const handle = self.handles.values()[handle_index];
1017-
1018-
try self.collectDependenciesInternal(arena.allocator(), handle, &queue, false);
1019-
}
1020-
1021-
var it = reachable.iterator(.{
1022-
.kind = .unset,
1023-
.direction = .reverse,
1024-
});
1025-
1026-
while (it.next()) |handle_index| {
1027-
const handle = self.handles.values()[handle_index];
1028-
log.debug("Closing document {s}", .{handle.uri});
1029-
self.handles.swapRemoveAt(handle_index);
1030-
self.allocator.free(handle.uri);
1031-
handle.deinit();
1032-
self.allocator.destroy(handle);
1033-
}
1034-
}
1035-
1036-
/// see `garbageCollectionImports`
1037-
/// **Not thread safe** requires access to `DocumentStore.handles` and `DocumentStore.cimports`
1038-
fn garbageCollectionCImports(self: *DocumentStore) error{OutOfMemory}!void {
1039-
const tracy_zone = tracy.trace(@src());
1040-
defer tracy_zone.end();
1041-
1042-
if (self.cimports.count() == 0) return;
1043-
1044-
var reachable: std.DynamicBitSetUnmanaged = try .initEmpty(self.allocator, self.cimports.count());
1045-
defer reachable.deinit(self.allocator);
1046-
1047-
for (self.handles.values()) |handle| {
1048-
for (handle.cimports.items(.hash)) |hash| {
1049-
const index = self.cimports.getIndex(hash) orelse continue;
1050-
reachable.set(index);
1051-
}
1052-
}
1053-
1054-
var it = reachable.iterator(.{
1055-
.kind = .unset,
1056-
.direction = .reverse,
1057-
});
1058-
1059-
while (it.next()) |cimport_index| {
1060-
var result = self.cimports.values()[cimport_index];
1061-
const message = switch (result) {
1062-
.failure => "",
1063-
.success => |uri| uri,
1064-
};
1065-
log.debug("Destroying cimport {s}", .{message});
1066-
self.cimports.swapRemoveAt(cimport_index);
1067-
result.deinit(self.allocator);
1068-
}
1069-
}
1070-
1071-
/// see `garbageCollectionImports`
1072-
/// **Not thread safe** requires access to `DocumentStore.handles` and `DocumentStore.build_files`
1073-
fn garbageCollectionBuildFiles(self: *DocumentStore) error{OutOfMemory}!void {
1074-
const tracy_zone = tracy.trace(@src());
1075-
defer tracy_zone.end();
1076-
1077-
if (self.build_files.count() == 0) return;
1078-
1079-
var reachable: std.DynamicBitSetUnmanaged = try .initEmpty(self.allocator, self.build_files.count());
1080-
defer reachable.deinit(self.allocator);
1081-
1082-
var queue: std.ArrayListUnmanaged(*BuildFile) = .empty;
1083-
defer queue.deinit(self.allocator);
1084-
1085-
for (self.handles.values()) |handle| {
1086-
for (handle.getReferencedBuildFiles()) |build_file| {
1087-
const build_file_index = self.build_files.getIndex(build_file.uri).?;
1088-
if (reachable.isSet(build_file_index)) continue;
1089-
try queue.append(self.allocator, build_file);
1090-
}
1091-
1092-
if (isBuildFile(handle.uri)) blk: {
1093-
const build_file_index = self.build_files.getIndex(handle.uri) orelse break :blk;
1094-
const build_file = self.build_files.values()[build_file_index];
1095-
if (reachable.isSet(build_file_index)) break :blk;
1096-
try queue.append(self.allocator, build_file);
1097-
}
1098-
}
1099-
1100-
while (queue.pop()) |build_file| {
1101-
const build_file_index = self.build_files.getIndex(build_file.uri).?;
1102-
if (reachable.isSet(build_file_index)) continue;
1103-
reachable.set(build_file_index);
1104-
1105-
const build_config = build_file.tryLockConfig() orelse continue;
1106-
defer build_file.unlockConfig();
1107-
1108-
try queue.ensureUnusedCapacity(self.allocator, build_config.deps_build_roots.len);
1109-
for (build_config.deps_build_roots) |dep_build_root| {
1110-
const dep_build_file_uri = try URI.fromPath(self.allocator, dep_build_root.path);
1111-
defer self.allocator.free(dep_build_file_uri);
1112-
const dep_build_file = self.build_files.get(dep_build_file_uri) orelse continue;
1113-
queue.appendAssumeCapacity(dep_build_file);
1114-
}
1115-
}
1116-
1117-
var it = reachable.iterator(.{
1118-
.kind = .unset,
1119-
.direction = .reverse,
1120-
});
1121-
1122-
while (it.next()) |build_file_index| {
1123-
const build_file = self.build_files.values()[build_file_index];
1124-
log.debug("Destroying build file {s}", .{build_file.uri});
1125-
self.build_files.swapRemoveAt(build_file_index);
1126-
build_file.deinit(self.allocator);
1127-
self.allocator.destroy(build_file);
1128-
}
1129-
}
1130-
1131990
pub fn isBuildFile(uri: Uri) bool {
1132991
return std.mem.endsWith(u8, uri, "/build.zig");
1133992
}
@@ -1569,25 +1428,15 @@ pub fn collectDependencies(
15691428
allocator: std.mem.Allocator,
15701429
handle: *Handle,
15711430
dependencies: *std.ArrayListUnmanaged(Uri),
1572-
) error{OutOfMemory}!void {
1573-
return store.collectDependenciesInternal(allocator, handle, dependencies, true);
1574-
}
1575-
1576-
fn collectDependenciesInternal(
1577-
store: *DocumentStore,
1578-
allocator: std.mem.Allocator,
1579-
handle: *Handle,
1580-
dependencies: *std.ArrayListUnmanaged(Uri),
1581-
lock: bool,
15821431
) error{OutOfMemory}!void {
15831432
const tracy_zone = tracy.trace(@src());
15841433
defer tracy_zone.end();
15851434

15861435
if (!supports_build_system) return;
15871436

15881437
{
1589-
if (lock) store.lock.lockShared();
1590-
defer if (lock) store.lock.unlockShared();
1438+
store.lock.lockShared();
1439+
defer store.lock.unlockShared();
15911440

15921441
try dependencies.ensureUnusedCapacity(allocator, handle.import_uris.items.len + handle.cimports.len);
15931442
for (handle.import_uris.items) |uri| {
@@ -1604,16 +1453,8 @@ fn collectDependenciesInternal(
16041453
}
16051454

16061455
no_build_file: {
1607-
const build_file_uri = if (lock)
1608-
try handle.getAssociatedBuildFileUri(store) orelse break :no_build_file
1609-
else
1610-
handle.getAssociatedBuildFileUriDontResolve() orelse break :no_build_file;
1611-
1612-
const build_file = if (lock)
1613-
store.getBuildFile(build_file_uri) orelse break :no_build_file
1614-
else
1615-
store.build_files.get(build_file_uri) orelse break :no_build_file;
1616-
1456+
const build_file_uri = try handle.getAssociatedBuildFileUri(store) orelse break :no_build_file;
1457+
const build_file = store.getBuildFile(build_file_uri) orelse break :no_build_file;
16171458
_ = try build_file.collectBuildConfigPackageUris(allocator, dependencies);
16181459
}
16191460
}

src/Server.zig

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -899,8 +899,8 @@ fn didChangeWatchedFilesHandler(server: *Server, arena: std.mem.Allocator, notif
899899
const uri = try Uri.fromPath(arena, file_path);
900900

901901
switch (change.type) {
902-
.Created, .Changed, .Deleted => {
903-
const did_update_file = try server.document_store.refreshDocumentFromFileSystem(uri);
902+
.Created, .Changed, .Deleted => |kind| {
903+
const did_update_file = try server.document_store.refreshDocumentFromFileSystem(uri, kind == .Deleted);
904904
updated_files += @intFromBool(did_update_file);
905905
},
906906
else => {},

0 commit comments

Comments
 (0)