Skip to content

Commit adfb68b

Browse files
q2vengregkh
authored andcommitted
af_unix: Avoid Tarjan's algorithm if unnecessary.
commit ad081928a8b0f57f269df999a28087fce6f2b6ce upstream. Once a cyclic reference is formed, we need to run GC to check if there is dead SCC. However, we do not need to run Tarjan's algorithm if we know that the shape of the inflight graph has not been changed. If an edge is added/updated/deleted and the edge's successor is inflight, we set false to unix_graph_grouped, which means we need to re-classify SCC. Once we finalise SCC, we set true to unix_graph_grouped. While unix_graph_grouped is true, we can iterate the grouped SCC using vertex->scc_entry in unix_walk_scc_fast(). list_add() and list_for_each_entry_reverse() uses seem weird, but they are to keep the vertex order consistent and make writing test easier. Signed-off-by: Kuniyuki Iwashima <[email protected]> Acked-by: Paolo Abeni <[email protected]> Link: https://lore.kernel.org/r/[email protected] Signed-off-by: Jakub Kicinski <[email protected]> Signed-off-by: Lee Jones <[email protected]> Signed-off-by: Greg Kroah-Hartman <[email protected]>
1 parent b5b54a3 commit adfb68b

File tree

1 file changed

+27
-1
lines changed

1 file changed

+27
-1
lines changed

net/unix/garbage.c

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@ static struct unix_vertex *unix_edge_successor(struct unix_edge *edge)
113113
}
114114

115115
static bool unix_graph_maybe_cyclic;
116+
static bool unix_graph_grouped;
116117

117118
static void unix_update_graph(struct unix_vertex *vertex)
118119
{
@@ -123,6 +124,7 @@ static void unix_update_graph(struct unix_vertex *vertex)
123124
return;
124125

125126
unix_graph_maybe_cyclic = true;
127+
unix_graph_grouped = false;
126128
}
127129

128130
static LIST_HEAD(unix_unvisited_vertices);
@@ -144,6 +146,7 @@ static void unix_add_edge(struct scm_fp_list *fpl, struct unix_edge *edge)
144146
vertex->index = unix_vertex_unvisited_index;
145147
vertex->out_degree = 0;
146148
INIT_LIST_HEAD(&vertex->edges);
149+
INIT_LIST_HEAD(&vertex->scc_entry);
147150

148151
list_move_tail(&vertex->entry, &unix_unvisited_vertices);
149152
edge->predecessor->vertex = vertex;
@@ -418,6 +421,26 @@ static void unix_walk_scc(void)
418421

419422
list_replace_init(&unix_visited_vertices, &unix_unvisited_vertices);
420423
swap(unix_vertex_unvisited_index, unix_vertex_grouped_index);
424+
425+
unix_graph_grouped = true;
426+
}
427+
428+
static void unix_walk_scc_fast(void)
429+
{
430+
while (!list_empty(&unix_unvisited_vertices)) {
431+
struct unix_vertex *vertex;
432+
struct list_head scc;
433+
434+
vertex = list_first_entry(&unix_unvisited_vertices, typeof(*vertex), entry);
435+
list_add(&scc, &vertex->scc_entry);
436+
437+
list_for_each_entry_reverse(vertex, &scc, scc_entry)
438+
list_move_tail(&vertex->entry, &unix_visited_vertices);
439+
440+
list_del(&scc);
441+
}
442+
443+
list_replace_init(&unix_visited_vertices, &unix_unvisited_vertices);
421444
}
422445

423446
static LIST_HEAD(gc_candidates);
@@ -570,7 +593,10 @@ static void __unix_gc(struct work_struct *work)
570593
if (!unix_graph_maybe_cyclic)
571594
goto skip_gc;
572595

573-
unix_walk_scc();
596+
if (unix_graph_grouped)
597+
unix_walk_scc_fast();
598+
else
599+
unix_walk_scc();
574600

575601
/* First, select candidates for garbage collection. Only
576602
* in-flight sockets are considered, and from those only ones

0 commit comments

Comments
 (0)