Skip to content
4 changes: 4 additions & 0 deletions DIRECTORY.md
Original file line number Diff line number Diff line change
Expand Up @@ -723,6 +723,8 @@
* [WordLadder](https://github.com/TheAlgorithms/Java/blob/master/src/main/java/com/thealgorithms/strings/WordLadder.java)
* zigZagPattern
* [ZigZagPattern](https://github.com/TheAlgorithms/Java/blob/master/src/main/java/com/thealgorithms/strings/zigZagPattern/ZigZagPattern.java)
* tree
* [HeavyLightDecomposition](https://github.com/TheAlgorithms/Java/blob/master/src/test/java/com/thealgorithms/tree/HeavyLightDecomposition.java)
* test
* java
* com
Expand Down Expand Up @@ -1367,3 +1369,5 @@
* [WordLadderTest](https://github.com/TheAlgorithms/Java/blob/master/src/test/java/com/thealgorithms/strings/WordLadderTest.java)
* zigZagPattern
* [ZigZagPatternTest](https://github.com/TheAlgorithms/Java/blob/master/src/test/java/com/thealgorithms/strings/zigZagPattern/ZigZagPatternTest.java)
* tree
* [HeavyLightDecompositionTest](https://github.com/TheAlgorithms/Java/blob/master/src/test/java/com/thealgorithms/tree/HeavyLightDecompositionTest.java)
196 changes: 196 additions & 0 deletions src/main/java/com/thealgorithms/tree/HeavyLightDecomposition.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,196 @@
package com.thealgorithms.tree;

/**
* Heavy-Light Decomposition (HLD) implementation in Java.
*
* HLD is used to efficiently handle path queries on trees, such as maximum, sum, or updates.
* It decomposes the tree into heavy and light chains, enabling queries in O(log N) time.
*
* Wikipedia Reference: https://en.wikipedia.org/wiki/Heavy-light_decomposition
*
* Author: Nithin U.
* Github: https://github.com/NithinU2802
*
*/

import java.util.ArrayList;
import java.util.List;

public class HeavyLightDecomposition {
private List<Integer>[] tree;
private int[] parent, depth, subtreeSize, chainHead, position, nodeValue;
private int[] segmentTree;
private int positionIndex;

public int getPosition(int index){
return position[index];
}

public int getPositionIndex(){
return positionIndex;
}

@SuppressWarnings("unchecked")
public HeavyLightDecomposition(int n) {
tree = new ArrayList[n + 1]; // Causes "unchecked or unsafe operations" warning
parent = new int[n + 1];
depth = new int[n + 1];
subtreeSize = new int[n + 1];
chainHead = new int[n + 1];
position = new int[n + 1];
nodeValue = new int[n + 1];
segmentTree = new int[4 * (n + 1)];

for (int i = 0; i <= n; i++) {
tree[i] = new ArrayList<>();
chainHead[i] = -1;
}
positionIndex = 0;
}

/**
* Adds an edge to the tree.
*/
public void addEdge(int u, int v) {
tree[u].add(v);
tree[v].add(u);
}

/**
* First DFS to calculate subtree sizes and determine heavy children.
*/
private void dfsSize(int node, int parentNode) {
parent[node] = parentNode;
subtreeSize[node] = 1;

for (int child : tree[node]) {
if (child != parentNode) {
depth[child] = depth[node] + 1;
dfsSize(child, node);
subtreeSize[node] += subtreeSize[child];
}
}
}

/**
* Second DFS to perform Heavy-Light Decomposition.
*/
private void decompose(int node, int head) {
chainHead[node] = head;
position[node] = positionIndex++;

int heavyChild = -1, maxSubtreeSize = -1;

for (int child : tree[node]) {
if (child != parent[node] && subtreeSize[child] > maxSubtreeSize) {
heavyChild = child;
maxSubtreeSize = subtreeSize[child];
}
}

if (heavyChild != -1) {
decompose(heavyChild, head);
}

for (int child : tree[node]) {
if (child != parent[node] && child != heavyChild) {
decompose(child, child);
}
}
}

/**
* Builds a Segment Tree to handle path queries efficiently.
*/
private void buildSegmentTree(int node, int start, int end) {
if (start == end) {
segmentTree[node] = nodeValue[start];
return;
}

int mid = (start + end) / 2;
buildSegmentTree(2 * node, start, mid);
buildSegmentTree(2 * node + 1, mid + 1, end);

segmentTree[node] = Math.max(segmentTree[2 * node], segmentTree[2 * node + 1]);
}

/**
* Updates a node's value in the Segment Tree.
*/
public void updateSegmentTree(int node, int start, int end, int index, int value) {
if (start == end) {
segmentTree[node] = value;
return;
}

int mid = (start + end) / 2;
if (index <= mid) {
updateSegmentTree(2 * node, start, mid, index, value);
} else {
updateSegmentTree(2 * node + 1, mid + 1, end, index, value);
}

segmentTree[node] = Math.max(segmentTree[2 * node], segmentTree[2 * node + 1]);
}

/**
* Queries the Segment Tree for the maximum value in a given range.
*/
public int querySegmentTree(int node, int start, int end, int left, int right) {
if (left > end || right < start) {
return Integer.MIN_VALUE;
}

if (left <= start && end <= right) {
return segmentTree[node];
}

int mid = (start + end) / 2;
int leftQuery = querySegmentTree(2 * node, start, mid, left, right);
int rightQuery = querySegmentTree(2 * node + 1, mid + 1, end, left, right);

return Math.max(leftQuery, rightQuery);
}

/**
* Queries the maximum value in the path from node u to node v.
*/
public int queryMaxInPath(int u, int v) {
int result = Integer.MIN_VALUE;

while (chainHead[u] != chainHead[v]) {
if (depth[chainHead[u]] < depth[chainHead[v]]) {
int temp = u;
u = v;
v = temp;
}

result = Math.max(result, querySegmentTree(1, 0, positionIndex - 1, position[chainHead[u]], position[u]));
u = parent[chainHead[u]];
}

if (depth[u] > depth[v]) {
int temp = u;
u = v;
v = temp;
}

result = Math.max(result, querySegmentTree(1, 0, positionIndex - 1, position[u], position[v]));
return result;
}

/**
* Initializes the HLD structure and Segment Tree.
*/
public void initialize(int root, int[] values) {
dfsSize(root, -1);
decompose(root, root);
for (int i = 0; i < values.length; i++) {
nodeValue[position[i]] = values[i];
}
buildSegmentTree(1, 0, positionIndex - 1);
}

}

Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
package com.thealgorithms.tree;

import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

public class HeavyLightDecompositionTest {

private HeavyLightDecomposition hld;
private int[] values = new int[]{0, 10, 20, 30, 40, 50}; ;

/**
* Initializes the test environment with a predefined tree structure and values.
*/
@BeforeEach
void setUp() {
hld = new HeavyLightDecomposition(5);
hld.addEdge(1, 2);
hld.addEdge(1, 3);
hld.addEdge(2, 4);
hld.addEdge(2, 5);
hld.initialize(1, values);
}

/**
* Tests the basic initialization of the tree structure.
* Expected: The tree should initialize without errors.
*/
@Test
void testBasicTreeInitialization() {
assertTrue(true, "Basic tree structure initialized successfully");
}

/**
* Tests the maximum value query in a path between nodes.
* Expected: The max value in the path (4,5) should be 50.
* Expected: The max value in the path (3,2) should be 30.
*/
@Test
void testQueryMaxInPath() {
assertEquals(50, hld.queryMaxInPath(4, 5), "Max value in path should be 50");
assertEquals(30, hld.queryMaxInPath(3, 2), "Max value in path should be 30");
}

/**
* Tests updating a node's value and ensuring it's reflected in queries.
* Expected: The updated node's value should affect query results.
*/
@Test
void testUpdateNodeValue() {
hld.updateSegmentTree(1, 0, hld.getPositionIndex() - 1, hld.getPosition(4), 100);
assertEquals(100, hld.queryMaxInPath(4, 5), "Updated value should be reflected in query");
}

/**
* Tests a skewed tree structure to ensure max path queries work correctly.
* Expected: The max value in the path (1,4) should be 40.
*/
@Test
void testSkewedTreeMaxQuery() {
assertEquals(40, hld.queryMaxInPath(1, 4), "Max value in skewed tree should be 40");
}

/**
* Tests a skewed tree structure to ensure max path queries work correctly.
* Expected: When called with u as a deeper node, it should swap correctly.
*/
@Test
void testDepthSwapInPathQuery() {
assertEquals(50, hld.queryMaxInPath(5, 2), "Query should handle depth swap correctly");
assertEquals(40, hld.queryMaxInPath(4, 1), "Query handle swap nodes and return max value");
}
}
Loading