diff --git a/Algorithms.Tests/Crypto/Digests/AsconDigestTests.cs b/Algorithms.Tests/Crypto/Digests/AsconDigestTests.cs index 11d5fe5f..08f62758 100644 --- a/Algorithms.Tests/Crypto/Digests/AsconDigestTests.cs +++ b/Algorithms.Tests/Crypto/Digests/AsconDigestTests.cs @@ -159,7 +159,7 @@ public void AsconHash_WhenGetByteLengthIsCalled_ReturnsCorrectValue() public void Update_ShouldProcessByte_WhenBufferIsFull() { // Arrange - byte[] inputBytes = { 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77 }; // 8 bytes to fill the buffer + byte[] inputBytes = [0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77]; // 8 bytes to fill the buffer // Act foreach (var input in inputBytes) @@ -178,7 +178,7 @@ public void Update_ShouldProcessByte_WhenBufferIsFull() public void Update_ShouldNotProcess_WhenBufferIsNotFull() { // Arrange - byte[] inputBytes = { 0x00, 0x11, 0x22, 0x33 }; // Only 4 bytes (buffer is not full) + byte[] inputBytes = [0x00, 0x11, 0x22, 0x33]; // Only 4 bytes (buffer is not full) // Act foreach (var input in inputBytes) @@ -233,7 +233,7 @@ public void Update_ShouldHandleSingleByteCorrectly() public void Update_ShouldAccumulateStateWithMultipleUpdates() { // Arrange - byte[] inputBytes = { 0x00, 0x11, 0x22 }; // Partial input + byte[] inputBytes = [0x00, 0x11, 0x22]; // Partial input // Act foreach (var input in inputBytes) @@ -242,7 +242,7 @@ public void Update_ShouldAccumulateStateWithMultipleUpdates() } // Add more data to fill the buffer. - byte[] additionalBytes = { 0x33, 0x44, 0x55, 0x66, 0x77 }; + byte[] additionalBytes = [0x33, 0x44, 0x55, 0x66, 0x77]; foreach (var input in additionalBytes) { asconHashA.Update(input); diff --git a/Algorithms.Tests/Crypto/Paddings/Iso7816D4PaddingTests.cs b/Algorithms.Tests/Crypto/Paddings/Iso7816D4PaddingTests.cs index 57e9c74a..ef07cefc 100644 --- a/Algorithms.Tests/Crypto/Paddings/Iso7816D4PaddingTests.cs +++ b/Algorithms.Tests/Crypto/Paddings/Iso7816D4PaddingTests.cs @@ -1,4 +1,4 @@ -using Algorithms.Crypto.Paddings; +using Algorithms.Crypto.Paddings; namespace Algorithms.Tests.Crypto.Paddings; @@ -75,7 +75,7 @@ public void RemovePadding_WhenCalledWithValidInput_shouldReturnCorrectData() var result = padding.RemovePadding(inputData); - result.Should().Equal(new byte[] { 1, 2, 3, 4, 5 }); + result.Should().Equal([1, 2, 3, 4, 5]); } [Test] diff --git a/Algorithms.Tests/Crypto/Utils/ByteEncodingUtils.cs b/Algorithms.Tests/Crypto/Utils/ByteEncodingUtils.cs index c8d5081b..fb90b25a 100644 --- a/Algorithms.Tests/Crypto/Utils/ByteEncodingUtils.cs +++ b/Algorithms.Tests/Crypto/Utils/ByteEncodingUtils.cs @@ -9,7 +9,7 @@ public class ByteEncodingUtilsTests public void BigEndianToUint64_ByteArray_ShouldConvertCorrectly() { // Arrange - byte[] input = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF }; + byte[] input = [0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF]; var expected = 0x0123456789ABCDEFUL; // Act @@ -23,7 +23,7 @@ public void BigEndianToUint64_ByteArray_ShouldConvertCorrectly() public void BigEndianToUint64_ByteArray_WithOffset_ShouldConvertCorrectly() { // Arrange - byte[] input = { 0x00, 0x00, 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF }; + byte[] input = [0x00, 0x00, 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF]; var expected = 0x0123456789ABCDEFUL; // Act @@ -53,7 +53,7 @@ public void UInt64ToBigEndian_ShouldWriteCorrectly() // Arrange var value = 0x0123456789ABCDEFUL; Span output = stackalloc byte[8]; - byte[] expected = { 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF }; + byte[] expected = [0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF]; // Act ByteEncodingUtils.UInt64ToBigEndian(value, output); @@ -66,7 +66,7 @@ public void UInt64ToBigEndian_ShouldWriteCorrectly() public void BigEndianToUint64_InvalidOffset_ShouldThrowException() { // Arrange - byte[] input = { 0x01, 0x23 }; + byte[] input = [0x01, 0x23]; // Act Action act = () => ByteEncodingUtils.BigEndianToUint64(input, 1); diff --git a/Algorithms.Tests/Encoders/NysiisEncoderTests.cs b/Algorithms.Tests/Encoders/NysiisEncoderTests.cs index f1ff9b78..6d3d7f22 100644 --- a/Algorithms.Tests/Encoders/NysiisEncoderTests.cs +++ b/Algorithms.Tests/Encoders/NysiisEncoderTests.cs @@ -5,16 +5,16 @@ namespace Algorithms.Tests.Encoders; public class NysiisEncoderTests { private static readonly string[] Names = - { + [ "Jay", "John", "Jane", "Zayne", "Guerra", "Iga", "Cowan", "Louisa", "Arnie", "Olsen", "Corban", "Nava", "Cynthia Malone", "Amiee MacKee", "MacGyver", "Yasmin Edge", - }; + ]; private static readonly string[] Expected = - { + [ "JY", "JAN", "JAN", "ZAYN", "GAR", "IG", "CAN", "LAS", "ARNY", "OLSAN", "CARBAN", "NAV", "CYNTANALAN", "ANANACY", "MCGYVAR", "YASNANADG", - }; + ]; private static IEnumerable TestData => Names.Zip(Expected, (l, r) => new[] { l, r }); diff --git a/Algorithms.Tests/Encoders/SoundexEncoderTest.cs b/Algorithms.Tests/Encoders/SoundexEncoderTest.cs index d8f534fd..c84503ca 100644 --- a/Algorithms.Tests/Encoders/SoundexEncoderTest.cs +++ b/Algorithms.Tests/Encoders/SoundexEncoderTest.cs @@ -5,11 +5,11 @@ namespace Algorithms.Tests.Encoders; public static class SoundexEncoderTest { private static readonly string[] Names = - { + [ "Robert", "Rupert", "Rubin", "Ashcraft", "Ashcroft", "Tymczak", "Pfister", "Honeyman", - }; + ]; - private static readonly string[] Expected = { "R163", "R163", "R150", "A261", "A261", "T522", "P236", "H555" }; + private static readonly string[] Expected = ["R163", "R163", "R150", "A261", "A261", "T522", "P236", "H555"]; private static IEnumerable TestData => Names.Zip(Expected, (l, r) => new[] { l, r }); diff --git a/Algorithms.Tests/Graph/BellmanFordTests.cs b/Algorithms.Tests/Graph/BellmanFordTests.cs index 592639db..1483abe7 100644 --- a/Algorithms.Tests/Graph/BellmanFordTests.cs +++ b/Algorithms.Tests/Graph/BellmanFordTests.cs @@ -35,7 +35,7 @@ public void CorrectDistancesTest() { vertex5, -4 } }; - var bellmanFord = new BellmanFord(graph, new Dictionary, double>(), new Dictionary, Vertex?>()); + var bellmanFord = new BellmanFord(graph, [], []); var calculatedDistances = bellmanFord.Run(vertex1); @@ -61,7 +61,7 @@ public void NegativeWeightCycleTest() graph.AddEdge(vertex2, vertex3, -2); graph.AddEdge(vertex3, vertex1, -3); - var bellmanFord = new BellmanFord(graph, new Dictionary, double>(), new Dictionary, Vertex?>()); + var bellmanFord = new BellmanFord(graph, [], []); Action action = () => bellmanFord.Run(vertex1); diff --git a/Algorithms.Tests/Graph/BreadthFirstTreeTraversalTests.cs b/Algorithms.Tests/Graph/BreadthFirstTreeTraversalTests.cs index 26b32bc0..048336bb 100644 --- a/Algorithms.Tests/Graph/BreadthFirstTreeTraversalTests.cs +++ b/Algorithms.Tests/Graph/BreadthFirstTreeTraversalTests.cs @@ -9,8 +9,8 @@ public static class BreadthFirstTreeTraversalTests public static void CorrectLevelOrderTraversal() { // Arrange - int[] correctPath = { 7, 4, 13, 2, 5, 11, 15, 14, 16 }; - int[] insertionOrder = { 7, 13, 11, 15, 14, 4, 5, 16, 2 }; + int[] correctPath = [7, 4, 13, 2, 5, 11, 15, 14, 16]; + int[] insertionOrder = [7, 13, 11, 15, 14, 4, 5, 16, 2]; BinarySearchTree testTree = new BinarySearchTree(); foreach (int data in insertionOrder) { @@ -60,7 +60,7 @@ public static void DeepestNodeInTree() { // Arrange BinarySearchTree testTree = new BinarySearchTree(); - int[] insertion = { 7, 13, 11, 15, 4, 5, 12, 2, 9 }; + int[] insertion = [7, 13, 11, 15, 4, 5, 12, 2, 9]; foreach (int data in insertion) { testTree.Add(data); diff --git a/Algorithms.Tests/LinearAlgebra/Eigenvalue/PowerIterationTests.cs b/Algorithms.Tests/LinearAlgebra/Eigenvalue/PowerIterationTests.cs index a325a336..a0da77ae 100644 --- a/Algorithms.Tests/LinearAlgebra/Eigenvalue/PowerIterationTests.cs +++ b/Algorithms.Tests/LinearAlgebra/Eigenvalue/PowerIterationTests.cs @@ -5,7 +5,7 @@ namespace Algorithms.Tests.LinearAlgebra.Eigenvalue; public class PowerIterationTests { private static readonly object[] DominantVectorTestCases = - { + [ new object[] { 3.0, @@ -18,7 +18,7 @@ public class PowerIterationTests new[] { 0.91287093, 0.40824829 }, new[,] { { 2.0, 5.0 }, { 1.0, 2.0 } }, }, - }; + ]; private readonly double epsilon = Math.Pow(10, -5); diff --git a/Algorithms.Tests/Numeric/AutomorphicNumberTests.cs b/Algorithms.Tests/Numeric/AutomorphicNumberTests.cs index 895514e5..23483ec1 100644 --- a/Algorithms.Tests/Numeric/AutomorphicNumberTests.cs +++ b/Algorithms.Tests/Numeric/AutomorphicNumberTests.cs @@ -55,21 +55,21 @@ public void TestInvalidAutomorphicNumbers(int number) [TestCase(1, 100)] public void TestAutomorphicNumberSequence(int lower, int upper) { - List automorphicList = new() { 1, 5, 6, 25, 76 }; + List automorphicList = [1, 5, 6, 25, 76]; Assert.That(AutomorphicNumber.GetAutomorphicNumbers(lower, upper), Is.EqualTo(automorphicList)); } [TestCase(8, 12)] public void TestNoAutomorphicNumberInTheSequence(int lower, int upper) { - List automorphicList = new(); + List automorphicList = []; Assert.That(AutomorphicNumber.GetAutomorphicNumbers(lower, upper), Is.EqualTo(automorphicList)); } [TestCase(25, 25)] public void TestAutomorphicNumberSequenceSameBounds(int lower, int upper) { - List automorphicList = new() { 25 }; + List automorphicList = [25]; Assert.That(AutomorphicNumber.GetAutomorphicNumbers(lower, upper), Is.EqualTo(automorphicList)); } diff --git a/Algorithms.Tests/Numeric/NewtonSquareRootTests.cs b/Algorithms.Tests/Numeric/NewtonSquareRootTests.cs index 33070e2a..28e00661 100644 --- a/Algorithms.Tests/Numeric/NewtonSquareRootTests.cs +++ b/Algorithms.Tests/Numeric/NewtonSquareRootTests.cs @@ -1,9 +1,9 @@ -namespace Algorithms.Tests.Numeric; +namespace Algorithms.Tests.Numeric; public class NewtonSquareRootTests { private static readonly object[] CalculateSquareRootInput = - { + [ new object[] {BigInteger.One, BigInteger.One}, new object[] {new BigInteger(221295376), new BigInteger(14876)}, new object[] {new BigInteger(2530995481), new BigInteger(50309)}, @@ -13,7 +13,7 @@ public class NewtonSquareRootTests new object[] {new BigInteger(5551442064), new BigInteger(74508)}, new object[] {new BigInteger(6980435401), new BigInteger(83549)}, new object[] {new BigInteger(8036226025), new BigInteger(89645)}, - }; + ]; [TestCaseSource(nameof(CalculateSquareRootInput))] public void CalculateSquareRootTest(BigInteger number, BigInteger result) diff --git a/Algorithms.Tests/Other/JulianEasterTests.cs b/Algorithms.Tests/Other/JulianEasterTests.cs index 22305196..152fe4fd 100644 --- a/Algorithms.Tests/Other/JulianEasterTests.cs +++ b/Algorithms.Tests/Other/JulianEasterTests.cs @@ -16,12 +16,12 @@ public static void CalculateTest(int year, DateTime expected) } private static readonly object[] CalculateCases = - { + [ new object[] { 1800, new DateTime(1800, 04, 08, 00, 00, 00, DateTimeKind.Utc) }, new object[] { 1950, new DateTime(1950, 03, 27, 00, 00, 00, DateTimeKind.Utc) }, new object[] { 1991, new DateTime(1991, 03, 25, 00, 00, 00, DateTimeKind.Utc) }, new object[] { 2000, new DateTime(2000, 04, 17, 00, 00, 00, DateTimeKind.Utc) }, new object[] { 2199, new DateTime(2199, 04, 07, 00, 00, 00, DateTimeKind.Utc) } - }; + ]; } diff --git a/Algorithms.Tests/Other/KochSnowflakeTest.cs b/Algorithms.Tests/Other/KochSnowflakeTest.cs index 96f5b39d..61638efd 100644 --- a/Algorithms.Tests/Other/KochSnowflakeTest.cs +++ b/Algorithms.Tests/Other/KochSnowflakeTest.cs @@ -8,7 +8,7 @@ public static class KochSnowflakeTest [Test] public static void TestIterateMethod() { - List vectors = new() { new Vector2(0, 0), new Vector2(1, 0) }; + List vectors = [new Vector2(0, 0), new Vector2(1, 0)]; List result = KochSnowflake.Iterate(vectors, 1); result[0].Should().Be(new Vector2(0, 0)); result[1].Should().Be(new Vector2((float)1 / 3, 0)); diff --git a/Algorithms.Tests/Other/SieveOfEratosthenesTests.cs b/Algorithms.Tests/Other/SieveOfEratosthenesTests.cs index 276a59a3..331584cb 100644 --- a/Algorithms.Tests/Other/SieveOfEratosthenesTests.cs +++ b/Algorithms.Tests/Other/SieveOfEratosthenesTests.cs @@ -5,7 +5,7 @@ namespace Algorithms.Tests.Other; public static class SieveOfEratosthenesTests { private static readonly long[] First10000PrimeNumbers = - { + [ 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331, 337, 347, @@ -662,7 +662,7 @@ public static class SieveOfEratosthenesTests 104383, 104393, 104399, 104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543, 104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659, 104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729, - }; + ]; [Test] public static void First10_000PrimesCorrect() => diff --git a/Algorithms.Tests/Other/WelfordsVarianceTest.cs b/Algorithms.Tests/Other/WelfordsVarianceTest.cs index c99ceb8f..baaccdf0 100644 --- a/Algorithms.Tests/Other/WelfordsVarianceTest.cs +++ b/Algorithms.Tests/Other/WelfordsVarianceTest.cs @@ -64,7 +64,7 @@ public void WelfordVariance_Example4() [Test] public void WelfordVariance_Example5() { - var stats = new WelfordsVariance(new double[] { 2, 2, 5, 7 }); + var stats = new WelfordsVariance([2, 2, 5, 7]); Assert.That(stats.Count, Is.EqualTo(4)); Assert.That(stats.Mean, Is.EqualTo(4).Within(0.0000001)); Assert.That(stats.Variance, Is.EqualTo(4.5).Within(0.0000001)); @@ -75,7 +75,7 @@ public void WelfordVariance_Example5() public void WelfordVariance_Example6() { var stats = new WelfordsVariance(); - stats.AddRange(new double[] { 2, 4, 4, 4, 5, 5, 7, 9 }); + stats.AddRange([2, 4, 4, 4, 5, 5, 7, 9]); Assert.That(stats.Count, Is.EqualTo(8)); Assert.That(stats.Mean, Is.EqualTo(5).Within(0.0000001)); Assert.That(stats.Variance, Is.EqualTo(4).Within(0.0000001)); @@ -86,7 +86,7 @@ public void WelfordVariance_Example6() public void WelfordVariance_Example7() { var stats = new WelfordsVariance(); - stats.AddRange(new double[] { 9, 2, 5, 4, 12, 7, 8, 11, 9, 3, 7, 4, 12, 5, 4, 10, 9, 6, 9, 4 }); + stats.AddRange([9, 2, 5, 4, 12, 7, 8, 11, 9, 3, 7, 4, 12, 5, 4, 10, 9, 6, 9, 4]); Assert.That(stats.Count, Is.EqualTo(20)); Assert.That(stats.Mean, Is.EqualTo(7).Within(0.0000001)); Assert.That(stats.Variance, Is.EqualTo(8.9).Within(0.0000001)); @@ -97,7 +97,7 @@ public void WelfordVariance_Example7() public void WelfordVariance_Example8() { var stats = new WelfordsVariance(); - stats.AddRange(new[] { 51.3, 55.6, 49.9, 52.0 }); + stats.AddRange([51.3, 55.6, 49.9, 52.0]); Assert.That(stats.Count, Is.EqualTo(4)); Assert.That(stats.Mean, Is.EqualTo(52.2).Within(0.0000001)); Assert.That(stats.Variance, Is.EqualTo(4.4250000).Within(0.0000001)); @@ -108,7 +108,7 @@ public void WelfordVariance_Example8() public void WelfordVariance_Example9() { var stats = new WelfordsVariance(); - stats.AddRange(new double[] { -5, -3, -1, 1, 3 }); + stats.AddRange([-5, -3, -1, 1, 3]); Assert.That(stats.Count, Is.EqualTo(5)); Assert.That(stats.Mean, Is.EqualTo(-1).Within(0.0000001)); Assert.That(stats.Variance, Is.EqualTo(8).Within(0.0000001)); @@ -119,7 +119,7 @@ public void WelfordVariance_Example9() public void WelfordVariance_Example10() { var stats = new WelfordsVariance(); - stats.AddRange(new double[] { -1, 0, 1 }); + stats.AddRange([-1, 0, 1]); Assert.That(stats.Count, Is.EqualTo(3)); Assert.That(stats.Mean, Is.EqualTo(0).Within(0.0000001)); Assert.That(stats.Variance, Is.EqualTo(0.6666667).Within(0.0000001)); diff --git a/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/GenerateSingleCoinChangesTests.cs b/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/GenerateSingleCoinChangesTests.cs index 94f1d0a3..179f86ab 100644 --- a/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/GenerateSingleCoinChangesTests.cs +++ b/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/GenerateSingleCoinChangesTests.cs @@ -9,22 +9,22 @@ public class GenerateSingleCoinChangesTests public void GenerateSingleCoinChangesTests_Success() { DynamicCoinChangeSolver - .GenerateSingleCoinChanges(6, new[] { 1, 2, 3 }) + .GenerateSingleCoinChanges(6, [1, 2, 3]) .SequenceEqual(new[] { 3, 4, 5 }) .Should().BeTrue(); DynamicCoinChangeSolver - .GenerateSingleCoinChanges(10, new[] { 1, 2, 3, 7, 12, 15, 14 }) + .GenerateSingleCoinChanges(10, [1, 2, 3, 7, 12, 15, 14]) .SequenceEqual(new[] { 3, 7, 8, 9 }) .Should().BeTrue(); DynamicCoinChangeSolver - .GenerateSingleCoinChanges(1, new[] { 1, 2, 3, 7, 12, 15, 14 }) + .GenerateSingleCoinChanges(1, [1, 2, 3, 7, 12, 15, 14]) .SequenceEqual(new[] { 0 }) .Should().BeTrue(); DynamicCoinChangeSolver - .GenerateSingleCoinChanges(2, new[] { 1, 2, 3, 7, 12, 15, 14 }) + .GenerateSingleCoinChanges(2, [1, 2, 3, 7, 12, 15, 14]) .SequenceEqual(new[] { 0, 1 }) .Should().BeTrue(); } diff --git a/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/MakeCoinChangeDynamicTests.cs b/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/MakeCoinChangeDynamicTests.cs index 0003c806..b89db370 100644 --- a/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/MakeCoinChangeDynamicTests.cs +++ b/Algorithms.Tests/Problems/DynamicProgramming/CoinChange/MakeCoinChangeDynamicTests.cs @@ -9,22 +9,22 @@ public class MakeCoinChangeDynamicTests public void MakeCoinChangeDynamicTest_Success() { DynamicCoinChangeSolver - .MakeCoinChangeDynamic(6, new[] { 1, 3, 4 }) + .MakeCoinChangeDynamic(6, [1, 3, 4]) .SequenceEqual(new[] { 3, 3 }) .Should().BeTrue(); DynamicCoinChangeSolver - .MakeCoinChangeDynamic(8, new[] { 1, 3, 4 }) + .MakeCoinChangeDynamic(8, [1, 3, 4]) .SequenceEqual(new[] { 4, 4 }) .Should().BeTrue(); DynamicCoinChangeSolver - .MakeCoinChangeDynamic(25, new[] { 1, 3, 4, 12, 13, 14 }) + .MakeCoinChangeDynamic(25, [1, 3, 4, 12, 13, 14]) .SequenceEqual(new[] { 13, 12 }) .Should().BeTrue(); DynamicCoinChangeSolver - .MakeCoinChangeDynamic(26, new[] { 1, 3, 4, 12, 13, 14 }) + .MakeCoinChangeDynamic(26, [1, 3, 4, 12, 13, 14]) .SequenceEqual(new[] { 14, 12 }) .Should().BeTrue(); } diff --git a/Algorithms.Tests/Search/AStarTests.cs b/Algorithms.Tests/Search/AStarTests.cs new file mode 100644 index 00000000..873a56bc --- /dev/null +++ b/Algorithms.Tests/Search/AStarTests.cs @@ -0,0 +1,106 @@ + +using System.Reflection; + +using Algorithms.Search.AStar; + +namespace Algorithms.Tests.Search; + +public static class AStarTests +{ + [Test] + public static void ResetNodes_ResetsAllNodeProperties() + { + var node = new Node(new VecN(0, 0), true, 1.0) + { + CurrentCost = 5, + EstimatedCost = 10, + Parent = new Node(new VecN(1, 1), true, 1.0), + State = NodeState.Closed + }; + var nodes = new List { node }; + + AStar.ResetNodes(nodes); + + node.CurrentCost.Should().Be(0); + node.EstimatedCost.Should().Be(0); + node.Parent.Should().BeNull(); + node.State.Should().Be(NodeState.Unconsidered); + } + + [Test] + public static void GeneratePath_ReturnsPathFromTargetToRoot() + { + var start = new Node(new VecN(0, 0), true, 1.0); + var mid = new Node(new VecN(1, 0), true, 1.0) { Parent = start }; + var end = new Node(new VecN(2, 0), true, 1.0) { Parent = mid }; + + var path = AStar.GeneratePath(end); + + path.Should().HaveCount(3); + path[0].Should().BeSameAs(start); + path[1].Should().BeSameAs(mid); + path[2].Should().BeSameAs(end); + } + + [Test] + public static void Compute_ReturnsEmptyList_WhenNoPathExists() + { + var start = new Node(new VecN(0, 0), true, 1.0); + var end = new Node(new VecN(1, 0), true, 1.0); + start.ConnectedNodes = []; + end.ConnectedNodes = []; + + var path = AStar.Compute(start, end); + + path.Should().BeEmpty(); + } + + [Test] + public static void Compute_ReturnsPath_WhenPathExists() + { + var start = new Node(new VecN(0, 0), true, 1.0); + var mid = new Node(new VecN(1, 0), true, 1.0); + var end = new Node(new VecN(2, 0), true, 1.0); + + start.ConnectedNodes = [mid]; + mid.ConnectedNodes = [end]; + end.ConnectedNodes = []; + + var path = AStar.Compute(start, end); + + path.Should().NotBeEmpty(); + path[0].Should().Be(start); + path[^1].Should().Be(end); + } + + [Test] + public static void VecN_Equality_WorksAsExpected() + { + var a = new VecN(1, 2); + var b = new VecN(1, 2); + var c = new VecN(2, 1); + + a.Equals(b).Should().BeTrue(); + a.Equals(c).Should().BeFalse(); + } + + [Test] + public static void AddOrUpdateConnected_ThrowsPathfindingException_OnSelfReference() + { + var node = new Node(new VecN(0, 0), true, 1.0); + node.ConnectedNodes = [node]; + node.State = NodeState.Open; + + var queue = new PriorityQueue(); + + Action act = () => { + // Directly call the private method using reflection, otherwise we can't test this case + var method = typeof(AStar).GetMethod("AddOrUpdateConnected", BindingFlags.NonPublic | BindingFlags.Static); + method!.Invoke(null, [node, node, queue]); + }; + + act.Should().Throw() + .WithInnerException() + .WithMessage("*same node twice*"); + } +} diff --git a/Algorithms.Tests/Sequences/MatchstickTriangleSequenceTests.cs b/Algorithms.Tests/Sequences/MatchstickTriangleSequenceTests.cs index 31959112..d4627135 100644 --- a/Algorithms.Tests/Sequences/MatchstickTriangleSequenceTests.cs +++ b/Algorithms.Tests/Sequences/MatchstickTriangleSequenceTests.cs @@ -5,13 +5,13 @@ namespace Algorithms.Tests.Sequences; [TestFixture] public static class MatchstickTriangleSequenceTests { - private static BigInteger[] _testList = { + private static BigInteger[] _testList = [ 0, 1, 5, 13, 27, 48, 78, 118, 170, 235, 315, 411, 525, 658, 812, 988, 1188, 1413, 1665, 1945, 2255, 2596, 2970, 3378, 3822, 4303, 4823, 5383, 5985, 6630, 7320, 8056, 8840, 9673, 10557, 11493, 12483, 13528, 14630, 15790, 17010, 18291, 19635, 21043, 22517, - }; + ]; /// /// This test uses the list values provided from http://oeis.org/A002717/list. /// diff --git a/Algorithms.Tests/Sequences/OnesCountingSequenceTest.cs b/Algorithms.Tests/Sequences/OnesCountingSequenceTest.cs index 6b70790d..bfcc1c96 100644 --- a/Algorithms.Tests/Sequences/OnesCountingSequenceTest.cs +++ b/Algorithms.Tests/Sequences/OnesCountingSequenceTest.cs @@ -1,4 +1,4 @@ -using Algorithms.Sequences; +using Algorithms.Sequences; namespace Algorithms.Tests.Sequences; @@ -13,7 +13,7 @@ public class OnesCountingSequenceTest /// While the file contains 10,000 values, this only tests 1000. /// /// - private readonly BigInteger[] oeisValues = { + private readonly BigInteger[] oeisValues = [ 0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 4, 4, 5, 2, 3, 3, 4, 3, 4, 4, 5, 3, 4, 4, 5, 4, 5, 5, 6, 1, 2, 2, 3, 2, 3, 3, 4, 2, 3, 3, @@ -54,7 +54,7 @@ public class OnesCountingSequenceTest 7, 7, 8, 4, 5, 5, 6, 5, 6, 6, 7, 5, 6, 6, 7, 6, 7, 7, 8, 5, 6, 6, 7, 6, 7, 7, 8, 6, 7, 7, 8, 7, 8, 8, 9, 4, 5, 5, 6, 5, 6, 6, 7, 5, 6, 6, 7, 6, 7, 7, 8, 5, 6, 6, 7, 6, 7, 7, 8, 6, 7, 7, 8, 7, 8, 8, 9, 5, 6, 6, 7, 6, 7, 7, 8, - }; + ]; /// /// diff --git a/Algorithms.Tests/Sequences/TetrahedralSequenceTests.cs b/Algorithms.Tests/Sequences/TetrahedralSequenceTests.cs index 165368ad..4444980f 100644 --- a/Algorithms.Tests/Sequences/TetrahedralSequenceTests.cs +++ b/Algorithms.Tests/Sequences/TetrahedralSequenceTests.cs @@ -1,17 +1,17 @@ -using Algorithms.Sequences; +using Algorithms.Sequences; namespace Algorithms.Tests.Sequences; [TestFixture] public class TetrahedralSequenceTests { - private static readonly BigInteger[] TestList = { + private static readonly BigInteger[] TestList = [ 0, 1, 4, 10, 20, 35, 56, 84, 120, 165, 220, 286, 364, 455, 560, 680, 816, 969, 1140, 1330, 1540, 1771, 2024, 2300, 2600, 2925, 3276, 3654, 4060, 4495, 4960, 5456, 5984, 6545, 7140, 7770, 8436, 9139, 9880, 10660, 11480, 12341, 13244, 14190, 15180, - }; + ]; /// /// This test uses the list values provided from http://oeis.org/A000292/list. diff --git a/Algorithms.Tests/Stack/NextGreaterElementTests.cs b/Algorithms.Tests/Stack/NextGreaterElementTests.cs index 34aaa2b8..30dcda17 100644 --- a/Algorithms.Tests/Stack/NextGreaterElementTests.cs +++ b/Algorithms.Tests/Stack/NextGreaterElementTests.cs @@ -29,8 +29,8 @@ public void FindNextGreaterElement_InputIsEmpty_ReturnsEmptyArray() public void FindNextGreaterElement_BasicScenario_ReturnsCorrectResult() { // Arrange - int[] input = { 4, 5, 2, 25 }; - int[] expected = { 5, 25, 25, -1 }; + int[] input = [4, 5, 2, 25]; + int[] expected = [5, 25, 25, -1]; // Act var result = FindNextGreaterElement(input); @@ -43,8 +43,8 @@ public void FindNextGreaterElement_BasicScenario_ReturnsCorrectResult() public void FindNextGreaterElement_NoNextGreaterElement_ReturnsCorrectResult() { // Arrange - int[] input = { 13, 7, 6, 12 }; - int[] expected = { -1, 12, 12, -1 }; + int[] input = [13, 7, 6, 12]; + int[] expected = [-1, 12, 12, -1]; // Act var result = FindNextGreaterElement(input); @@ -57,8 +57,8 @@ public void FindNextGreaterElement_NoNextGreaterElement_ReturnsCorrectResult() public void FindNextGreaterElement_AllElementsHaveNoGreaterElement_ReturnsAllNegativeOnes() { // Arrange - int[] input = { 5, 4, 3, 2, 1 }; - int[] expected = { -1, -1, -1, -1, -1 }; + int[] input = [5, 4, 3, 2, 1]; + int[] expected = [-1, -1, -1, -1, -1]; // Act var result = FindNextGreaterElement(input); @@ -71,8 +71,8 @@ public void FindNextGreaterElement_AllElementsHaveNoGreaterElement_ReturnsAllNeg public void FindNextGreaterElement_InputWithDuplicates_ReturnsCorrectResult() { // Arrange - int[] input = { 4, 4, 3, 2, 4 }; - int[] expected = { -1, -1, 4, 4, -1 }; + int[] input = [4, 4, 3, 2, 4]; + int[] expected = [-1, -1, 4, 4, -1]; // Act var result = FindNextGreaterElement(input); @@ -85,8 +85,8 @@ public void FindNextGreaterElement_InputWithDuplicates_ReturnsCorrectResult() public void FindNextGreaterElement_SingleElementArray_ReturnsNegativeOne() { // Arrange - int[] input = { 10 }; - int[] expected = { -1 }; + int[] input = [10]; + int[] expected = [-1]; // Act var result = FindNextGreaterElement(input); diff --git a/Algorithms/Crypto/Digests/Md2Digest.cs b/Algorithms/Crypto/Digests/Md2Digest.cs index 294f74d8..8918ea50 100644 --- a/Algorithms/Crypto/Digests/Md2Digest.cs +++ b/Algorithms/Crypto/Digests/Md2Digest.cs @@ -1,4 +1,4 @@ -namespace Algorithms.Crypto.Digests; +namespace Algorithms.Crypto.Digests; /// /// MD2 is a cryptographic hash function that takes an input message and produces a 128-bit output, also called a message @@ -22,7 +22,7 @@ public class Md2Digest // The S-table is a set of constants generated by shuffling the integers 0 through 255 using a variant of // Durstenfeld's algorithm with a pseudorandom number generator based on decimal digits of pi. private static readonly byte[] STable = - { + [ 41, 46, 67, 201, 162, 216, 124, 1, 61, 54, 84, 161, 236, 240, 6, 19, 98, 167, 5, 243, 192, 199, 115, 140, 152, 147, 43, 217, 188, 76, 130, 202, 30, 155, 87, 60, 253, 212, 224, 22, 103, 66, 111, 24, 138, 23, 229, 18, @@ -39,7 +39,7 @@ public class Md2Digest 120, 136, 149, 139, 227, 99, 232, 109, 233, 203, 213, 254, 59, 0, 29, 57, 242, 239, 183, 14, 102, 88, 208, 228, 166, 119, 114, 248, 235, 117, 75, 10, 49, 68, 80, 180, 143, 237, 31, 26, 219, 153, 141, 51, 159, 17, 131, 20, - }; + ]; // The X buffer is a 48-byte auxiliary block used to compute the message digest. private readonly byte[] xBuffer = new byte[48]; diff --git a/Algorithms/Crypto/Paddings/X932Padding.cs b/Algorithms/Crypto/Paddings/X932Padding.cs index 2e552c38..31eecc33 100644 --- a/Algorithms/Crypto/Paddings/X932Padding.cs +++ b/Algorithms/Crypto/Paddings/X932Padding.cs @@ -1,4 +1,4 @@ -namespace Algorithms.Crypto.Paddings; +namespace Algorithms.Crypto.Paddings; /// /// @@ -15,16 +15,13 @@ /// bytes. /// /// -public class X932Padding : IBlockCipherPadding +/// +/// Initializes a new instance of the class with the specified padding mode. +/// +/// A boolean value that indicates whether to use random bytes as padding or not. +public class X932Padding(bool useRandomPadding) : IBlockCipherPadding { - private readonly bool useRandomPadding; - - /// - /// Initializes a new instance of the class with the specified padding mode. - /// - /// A boolean value that indicates whether to use random bytes as padding or not. - public X932Padding(bool useRandomPadding) => - this.useRandomPadding = useRandomPadding; + private readonly bool useRandomPadding = useRandomPadding; /// /// Adds padding to the input data according to the X9.23 padding scheme. diff --git a/Algorithms/DataCompression/HuffmanCompressor.cs b/Algorithms/DataCompression/HuffmanCompressor.cs index 41a49702..159caeda 100644 --- a/Algorithms/DataCompression/HuffmanCompressor.cs +++ b/Algorithms/DataCompression/HuffmanCompressor.cs @@ -5,17 +5,11 @@ namespace Algorithms.DataCompression; /// /// Greedy lossless compression algorithm. /// -public class HuffmanCompressor +public class HuffmanCompressor(IComparisonSorter sorter, Translator translator) { // TODO: Use partial sorter - private readonly IComparisonSorter sorter; - private readonly Translator translator; - - public HuffmanCompressor(IComparisonSorter sorter, Translator translator) - { - this.sorter = sorter; - this.translator = translator; - } + private readonly IComparisonSorter sorter = sorter; + private readonly Translator translator = translator; /// /// Given an input string, returns a new compressed string diff --git a/Algorithms/DataCompression/ShannonFanoCompressor.cs b/Algorithms/DataCompression/ShannonFanoCompressor.cs index c9b62a5f..7671d05b 100644 --- a/Algorithms/DataCompression/ShannonFanoCompressor.cs +++ b/Algorithms/DataCompression/ShannonFanoCompressor.cs @@ -5,18 +5,12 @@ namespace Algorithms.DataCompression; /// /// Greedy lossless compression algorithm. /// -public class ShannonFanoCompressor +public class ShannonFanoCompressor( + IHeuristicKnapsackSolver<(char Symbol, double Frequency)> splitter, + Translator translator) { - private readonly IHeuristicKnapsackSolver<(char Symbol, double Frequency)> splitter; - private readonly Translator translator; - - public ShannonFanoCompressor( - IHeuristicKnapsackSolver<(char Symbol, double Frequency)> splitter, - Translator translator) - { - this.splitter = splitter; - this.translator = translator; - } + private readonly IHeuristicKnapsackSolver<(char Symbol, double Frequency)> splitter = splitter; + private readonly Translator translator = translator; /// /// Given an input string, returns a new compressed string @@ -117,11 +111,9 @@ private ListNode GetListNodeFromText(string text) /// /// Represents tree structure for the algorithm. /// - public class ListNode + public class ListNode((char Symbol, double Frequency)[] data) { - public ListNode((char Symbol, double Frequency)[] data) => Data = data; - - public (char Symbol, double Frequency)[] Data { get; } + public (char Symbol, double Frequency)[] Data { get; } = data; public ListNode? RightChild { get; set; } diff --git a/Algorithms/Encoders/BlowfishEncoder.cs b/Algorithms/Encoders/BlowfishEncoder.cs index 33582aed..40248fc5 100644 --- a/Algorithms/Encoders/BlowfishEncoder.cs +++ b/Algorithms/Encoders/BlowfishEncoder.cs @@ -1,4 +1,4 @@ -namespace Algorithms.Encoders; +namespace Algorithms.Encoders; /// /// @@ -26,9 +26,8 @@ public class BlowfishEncoder // Initialize the substitution boxes private readonly string[][] s = - { - new[] - { + [ + [ "d1310ba6", "98dfb5ac", "2ffd72db", "d01adfb7", "b8e1afed", "6a267e96", "ba7c9045", "f12c7f99", "24a19947", "b3916cf7", "0801f2e2", "858efc16", "636920d8", "71574e69", "a458fea3", "f4933d7e", "0d95748f", "728eb658", "718bcd58", "82154aee", "7b54a41d", "c25a59b5", "9c30d539", "2af26013", @@ -61,9 +60,8 @@ public class BlowfishEncoder "83260376", "6295cfa9", "11c81968", "4e734a41", "b3472dca", "7b14a94a", "1b510052", "9a532915", "d60f573f", "bc9bc6e4", "2b60a476", "81e67400", "08ba6fb5", "571be91f", "f296ec6b", "2a0dd915", "b6636521", "e7b9f9b6", "ff34052e", "c5855664", "53b02d5d", "a99f8fa1", "08ba4799", "6e85076a", - }, - new[] - { + ], + [ "4b7a70e9", "b5b32944", "db75092e", "c4192623", "ad6ea6b0", "49a7df7d", "9cee60b8", "8fedb266", "ecaa8c71", "699a17ff", "5664526c", "c2b19ee1", "193602a5", "75094c29", "a0591340", "e4183a3e", "3f54989a", "5b429d65", "6b8fe4d6", "99f73fd6", "a1d29c07", "efe830f5", "4d2d38e6", "f0255dc1", @@ -96,9 +94,8 @@ public class BlowfishEncoder "a6078084", "19f8509e", "e8efd855", "61d99735", "a969a7aa", "c50c06c2", "5a04abfc", "800bcadc", "9e447a2e", "c3453484", "fdd56705", "0e1e9ec9", "db73dbd3", "105588cd", "675fda79", "e3674340", "c5c43465", "713e38d8", "3d28f89e", "f16dff20", "153e21e7", "8fb03d4a", "e6e39f2b", "db83adf7", - }, - new[] - { + ], + [ "e93d5a68", "948140f7", "f64c261c", "94692934", "411520f7", "7602d4f7", "bcf46b2e", "d4a20068", "d4082471", "3320f46a", "43b7d4b7", "500061af", "1e39f62e", "97244546", "14214f74", "bf8b8840", "4d95fc1d", "96b591af", "70f4ddd3", "66a02f45", "bfbc09ec", "03bd9785", "7fac6dd0", "31cb8504", @@ -131,9 +128,8 @@ public class BlowfishEncoder "6f05e409", "4b7c0188", "39720a3d", "7c927c24", "86e3725f", "724d9db9", "1ac15bb4", "d39eb8fc", "ed545578", "08fca5b5", "d83d7cd3", "4dad0fc4", "1e50ef5e", "b161e6f8", "a28514d9", "6c51133c", "6fd5c7e7", "56e14ec4", "362abfce", "ddc6c837", "d79a3234", "92638212", "670efa8e", "406000e0", - }, - new[] - { + ], + [ "3a39ce37", "d3faf5cf", "abc27737", "5ac52d1b", "5cb0679e", "4fa33742", "d3822740", "99bc9bbe", "d5118e9d", "bf0f7315", "d62d1c7e", "c700c47b", "b78c1b6b", "21a19045", "b26eb1be", "6a366eb4", "5748ab2f", "bc946e79", "c6a376d2", "6549c2c8", "530ff8ee", "468dde7d", "d5730a1d", "4cd04dc6", @@ -166,15 +162,15 @@ public class BlowfishEncoder "53113ec0", "1640e3d3", "38abbd60", "2547adf0", "ba38209c", "f746ce76", "77afa1c5", "20756060", "85cbfe4e", "8ae88dd8", "7aaaf9b0", "4cf9aa7e", "1948c25c", "02fb8a8c", "01c36ae4", "d6ebe1f9", "90d4f869", "a65cdea0", "3f09252d", "c208e69f", "b74e6132", "ce77e25b", "578fdfe3", "3ac372e6", - }, - }; + ], + ]; // Initialize the P-array sub-keys private readonly string[] p = - { + [ "243f6a88", "85a308d3", "13198a2e", "03707344", "a4093822", "299f31d0", "082efa98", "ec4e6c89", "452821e6", "38d01377", "be5466cf", "34e90c6c", "c0ac29b7", "c97c50dd", "3f84d5b5", "b5470917", "9216d5d9", "8979fb1b", - }; + ]; /// /// Generate a key for the encryption algorithm based on the given string parameter. diff --git a/Algorithms/Encoders/FeistelCipher.cs b/Algorithms/Encoders/FeistelCipher.cs index fb26b6e6..4733c5cb 100644 --- a/Algorithms/Encoders/FeistelCipher.cs +++ b/Algorithms/Encoders/FeistelCipher.cs @@ -118,7 +118,7 @@ public string Decode(string text, uint key) // the last block is extended up to 8 bytes if the tail of the text is smaller than 8 bytes private static List SplitTextToBlocks(string text) { - List blocksListPlain = new(); + List blocksListPlain = []; byte[] textArray = Encoding.ASCII.GetBytes(text); int offset = 8; for (int i = 0; i < text.Length; i += 8) @@ -139,7 +139,7 @@ private static List SplitTextToBlocks(string text) // convert the encoded text to the set of ulong values (blocks for decoding) private static List GetBlocksFromEncodedText(string text) { - List blocksListPlain = new(); + List blocksListPlain = []; for (int i = 0; i < text.Length; i += 16) { ulong block = Convert.ToUInt64(text.Substring(i, 16), 16); diff --git a/Algorithms/Encoders/HillEncoder.cs b/Algorithms/Encoders/HillEncoder.cs index 6fbb6ca0..3dcb9c75 100644 --- a/Algorithms/Encoders/HillEncoder.cs +++ b/Algorithms/Encoders/HillEncoder.cs @@ -184,6 +184,6 @@ private double[] MatrixDeCipher(double[] vector, double[,] key) _ = linearEquationSolver.Solve(augM); - return new[] { augM[0, 3], augM[1, 3], augM[2, 3] }; + return [augM[0, 3], augM[1, 3], augM[2, 3]]; } } diff --git a/Algorithms/Encoders/NysiisEncoder.cs b/Algorithms/Encoders/NysiisEncoder.cs index 503c8585..fa74631b 100644 --- a/Algorithms/Encoders/NysiisEncoder.cs +++ b/Algorithms/Encoders/NysiisEncoder.cs @@ -5,7 +5,7 @@ namespace Algorithms.Encoders; /// public class NysiisEncoder { - private static readonly char[] Vowels = { 'A', 'E', 'I', 'O', 'U' }; + private static readonly char[] Vowels = ['A', 'E', 'I', 'O', 'U']; /// /// Encodes a string using the NYSIIS Algorithm. @@ -66,7 +66,7 @@ private string TrimEnd(string text) private string ReplaceStep(string text, int i) { (string From, string To)[] replacements = - { + [ ("EV", "AF"), ("E", "A"), ("I", "A"), @@ -79,7 +79,7 @@ private string ReplaceStep(string text, int i) ("K", "C"), ("SCH", "SSS"), ("PH", "FF"), - }; + ]; var replaced = TryReplace(text, i, replacements, out text); if (replaced) { diff --git a/Algorithms/Graph/BellmanFord.cs b/Algorithms/Graph/BellmanFord.cs index 075a3ecb..2e11647b 100644 --- a/Algorithms/Graph/BellmanFord.cs +++ b/Algorithms/Graph/BellmanFord.cs @@ -6,18 +6,11 @@ namespace Algorithms.Graph; /// Bellman-Ford algorithm on directed weighted graph. /// /// Generic type of data in the graph. -public class BellmanFord +public class BellmanFord(DirectedWeightedGraph graph, Dictionary, double> distances, Dictionary, Vertex?> predecessors) { - private readonly DirectedWeightedGraph graph; - private readonly Dictionary, double> distances; - private readonly Dictionary, Vertex?> predecessors; - - public BellmanFord(DirectedWeightedGraph graph, Dictionary, double> distances, Dictionary, Vertex?> predecessors) - { - this.graph = graph; - this.distances = distances; - this.predecessors = predecessors; - } + private readonly DirectedWeightedGraph graph = graph; + private readonly Dictionary, double> distances = distances; + private readonly Dictionary, Vertex?> predecessors = predecessors; /// /// Runs the Bellman-Ford algorithm to find the shortest distances from the source vertex to all other vertices. diff --git a/Algorithms/Graph/BreadthFirstSearch.cs b/Algorithms/Graph/BreadthFirstSearch.cs index 1802d1e6..384bc4ef 100644 --- a/Algorithms/Graph/BreadthFirstSearch.cs +++ b/Algorithms/Graph/BreadthFirstSearch.cs @@ -18,7 +18,7 @@ public class BreadthFirstSearch : IGraphSearch where T : IComparable /// Action that needs to be executed on each graph vertex. public void VisitAll(IDirectedWeightedGraph graph, Vertex startVertex, Action>? action = default) { - Bfs(graph, startVertex, action, new HashSet>()); + Bfs(graph, startVertex, action, []); } /// diff --git a/Algorithms/Graph/DepthFirstSearch.cs b/Algorithms/Graph/DepthFirstSearch.cs index d24f07b9..4e4a0466 100644 --- a/Algorithms/Graph/DepthFirstSearch.cs +++ b/Algorithms/Graph/DepthFirstSearch.cs @@ -18,7 +18,7 @@ public class DepthFirstSearch : IGraphSearch where T : IComparable /// Action that needs to be executed on each graph vertex. public void VisitAll(IDirectedWeightedGraph graph, Vertex startVertex, Action>? action = default) { - Dfs(graph, startVertex, action, new HashSet>()); + Dfs(graph, startVertex, action, []); } /// diff --git a/Algorithms/Graph/Dijkstra/DistanceModel.cs b/Algorithms/Graph/Dijkstra/DistanceModel.cs index 133fa6d1..84db45bd 100644 --- a/Algorithms/Graph/Dijkstra/DistanceModel.cs +++ b/Algorithms/Graph/Dijkstra/DistanceModel.cs @@ -7,20 +7,13 @@ namespace Algorithms.Graph.Dijkstra; /// Contains: Vertex, Previous Vertex and minimal distance from start vertex. /// /// Generic parameter. -public class DistanceModel +public class DistanceModel(Vertex? vertex, Vertex? previousVertex, double distance) { - public Vertex? Vertex { get; } + public Vertex? Vertex { get; } = vertex; - public Vertex? PreviousVertex { get; set; } + public Vertex? PreviousVertex { get; set; } = previousVertex; - public double Distance { get; set; } - - public DistanceModel(Vertex? vertex, Vertex? previousVertex, double distance) - { - Vertex = vertex; - PreviousVertex = previousVertex; - Distance = distance; - } + public double Distance { get; set; } = distance; public override string ToString() => $"Vertex: {Vertex} - Distance: {Distance} - Previous: {PreviousVertex}"; diff --git a/Algorithms/Graph/Kosaraju.cs b/Algorithms/Graph/Kosaraju.cs index f61872ca..64b8b13b 100644 --- a/Algorithms/Graph/Kosaraju.cs +++ b/Algorithms/Graph/Kosaraju.cs @@ -74,9 +74,9 @@ public static void Assign(Vertex v, Vertex root, IDirectedWeightedGraph /// A dictionary that assigns to each vertex a root vertex of the SCC they belong. public static Dictionary, Vertex> GetRepresentatives(IDirectedWeightedGraph graph) { - HashSet> visited = new HashSet>(); + HashSet> visited = []; Stack> reversedL = new Stack>(); - Dictionary, Vertex> representatives = new Dictionary, Vertex>(); + Dictionary, Vertex> representatives = []; foreach (var v in graph.Vertices) { @@ -105,7 +105,7 @@ public static Dictionary, Vertex> GetRepresentatives(IDirectedWeigh public static IEnumerable>[] GetScc(IDirectedWeightedGraph graph) { var representatives = GetRepresentatives(graph); - Dictionary, List>> scc = new Dictionary, List>>(); + Dictionary, List>> scc = []; foreach (var kv in representatives) { // Assign all vertex (key) that have the seem root (value) to a single list. @@ -115,7 +115,7 @@ public static IEnumerable>[] GetScc(IDirectedWeightedGraph graph) } else { - scc.Add(kv.Value, new List> { kv.Key }); + scc.Add(kv.Value, [kv.Key]); } } diff --git a/Algorithms/Graph/MinimumSpanningTree/Kruskal.cs b/Algorithms/Graph/MinimumSpanningTree/Kruskal.cs index 80ec8648..201f7540 100644 --- a/Algorithms/Graph/MinimumSpanningTree/Kruskal.cs +++ b/Algorithms/Graph/MinimumSpanningTree/Kruskal.cs @@ -108,7 +108,7 @@ public static Dictionary[] Solve(Dictionary[] adjacencyL var mst = new Dictionary[numNodes]; for (var i = 0; i < numNodes; i++) { - mst[i] = new Dictionary(); + mst[i] = []; } foreach (var (node1, node2) in edges) diff --git a/Algorithms/Knapsack/BranchAndBoundKnapsackSolver.cs b/Algorithms/Knapsack/BranchAndBoundKnapsackSolver.cs index 9056c579..cf4ddd00 100644 --- a/Algorithms/Knapsack/BranchAndBoundKnapsackSolver.cs +++ b/Algorithms/Knapsack/BranchAndBoundKnapsackSolver.cs @@ -102,7 +102,7 @@ public T[] Solve(T[] items, int capacity, Func weightSelector, Func takenItems = new(); + List takenItems = []; // only bogus initial node has no parent for (var current = lastNodeOfPath; current.Parent is not null; current = current.Parent) diff --git a/Algorithms/Knapsack/BranchAndBoundNode.cs b/Algorithms/Knapsack/BranchAndBoundNode.cs index 0a65f16a..9686c1f1 100644 --- a/Algorithms/Knapsack/BranchAndBoundNode.cs +++ b/Algorithms/Knapsack/BranchAndBoundNode.cs @@ -1,9 +1,9 @@ namespace Algorithms.Knapsack; -public class BranchAndBoundNode +public class BranchAndBoundNode(int level, bool taken, BranchAndBoundNode? parent = null) { // isTaken --> true = the item where index = level is taken, vice versa - public bool IsTaken { get; } + public bool IsTaken { get; } = taken; // cumulativeWeight --> um of weight of item associated in each nodes starting from root to this node (only item that is taken) public int CumulativeWeight { get; set; } @@ -15,15 +15,8 @@ public class BranchAndBoundNode public double UpperBound { get; set; } // level --> level of the node in the tree structure - public int Level { get; } + public int Level { get; } = level; // parent node - public BranchAndBoundNode? Parent { get; } - - public BranchAndBoundNode(int level, bool taken, BranchAndBoundNode? parent = null) - { - Level = level; - IsTaken = taken; - Parent = parent; - } + public BranchAndBoundNode? Parent { get; } = parent; } diff --git a/Algorithms/Numeric/EulerMethod.cs b/Algorithms/Numeric/EulerMethod.cs index 770417d1..cb2136b1 100644 --- a/Algorithms/Numeric/EulerMethod.cs +++ b/Algorithms/Numeric/EulerMethod.cs @@ -44,8 +44,8 @@ public static List EulerFull( $"{nameof(stepSize)} should be greater than zero"); } - List points = new(); - double[] firstPoint = { xStart, yStart }; + List points = []; + double[] firstPoint = [xStart, yStart]; points.Add(firstPoint); var yCurrent = yStart; var xCurrent = xStart; @@ -54,7 +54,7 @@ public static List EulerFull( { yCurrent = EulerStep(xCurrent, stepSize, yCurrent, yDerivative); xCurrent += stepSize; - double[] point = { xCurrent, yCurrent }; + double[] point = [xCurrent, yCurrent]; points.Add(point); } diff --git a/Algorithms/Numeric/RungeKuttaMethod.cs b/Algorithms/Numeric/RungeKuttaMethod.cs index 494ef969..797ae81e 100644 --- a/Algorithms/Numeric/RungeKuttaMethod.cs +++ b/Algorithms/Numeric/RungeKuttaMethod.cs @@ -39,8 +39,8 @@ public static List ClassicRungeKuttaMethod( $"{nameof(stepSize)} should be greater than zero"); } - List points = new(); - double[] firstPoint = { xStart, yStart }; + List points = []; + double[] firstPoint = [xStart, yStart]; points.Add(firstPoint); var yCurrent = yStart; @@ -56,7 +56,7 @@ public static List ClassicRungeKuttaMethod( yCurrent += (1.0 / 6.0) * stepSize * (k1 + 2 * k2 + 2 * k3 + k4); xCurrent += stepSize; - double[] newPoint = { xCurrent, yCurrent }; + double[] newPoint = [xCurrent, yCurrent]; points.Add(newPoint); } diff --git a/Algorithms/Other/FloodFill.cs b/Algorithms/Other/FloodFill.cs index 28ddfd16..df91a26b 100644 --- a/Algorithms/Other/FloodFill.cs +++ b/Algorithms/Other/FloodFill.cs @@ -12,7 +12,7 @@ namespace Algorithms.Other; /// public static class FloodFill { - private static readonly List<(int XOffset, int YOffset)> Neighbors = new() { (-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1) }; + private static readonly List<(int XOffset, int YOffset)> Neighbors = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)]; /// /// Implements the flood fill algorithm through a breadth-first approach using a queue. @@ -28,8 +28,10 @@ public static void BreadthFirstSearch(SKBitmap bitmap, (int X, int Y) location, throw new ArgumentOutOfRangeException(nameof(location), $"{nameof(location)} should point to a pixel within the bitmap"); } - var queue = new List<(int X, int Y)>(); - queue.Add(location); + var queue = new List<(int X, int Y)> + { + location, + }; while (queue.Count > 0) { diff --git a/Algorithms/Other/Geofence.cs b/Algorithms/Other/Geofence.cs index 59afab77..98980a55 100644 --- a/Algorithms/Other/Geofence.cs +++ b/Algorithms/Other/Geofence.cs @@ -1,19 +1,12 @@ namespace Algorithms.Other; -public class Geofence +public class Geofence(double latitude, double longitude, double radiusInMeters) { - public double Latitude { get; set; } + public double Latitude { get; set; } = latitude; - public double Longitude { get; set; } + public double Longitude { get; set; } = longitude; - public double RadiusInMeters { get; set; } - - public Geofence(double latitude, double longitude, double radiusInMeters) - { - Latitude = latitude; - Longitude = longitude; - RadiusInMeters = radiusInMeters; - } + public double RadiusInMeters { get; set; } = radiusInMeters; /// /// Checks whether the provided user location (latitude and longitude) is within the geofence boundary. diff --git a/Algorithms/Other/Geohash.cs b/Algorithms/Other/Geohash.cs index f66f9919..0c07cdc7 100644 --- a/Algorithms/Other/Geohash.cs +++ b/Algorithms/Other/Geohash.cs @@ -20,8 +20,8 @@ public static class Geohash /// public static string Encode(double latitude, double longitude) { - double[] latitudeRange = new[] { -90.0, 90.0 }; - double[] longitudeRange = new[] { -180.0, 180.0 }; + double[] latitudeRange = [-90.0, 90.0]; + double[] longitudeRange = [-180.0, 180.0]; bool isEncodingLongitude = true; int currentBit = 0; int base32Index = 0; diff --git a/Algorithms/Other/KochSnowflake.cs b/Algorithms/Other/KochSnowflake.cs index 29803788..04a2d835 100644 --- a/Algorithms/Other/KochSnowflake.cs +++ b/Algorithms/Other/KochSnowflake.cs @@ -65,7 +65,7 @@ public static SKBitmap GetKochSnowflake( var vector1 = new Vector2(offsetX, offsetY); var vector2 = new Vector2(bitmapWidth / 2, (float)Math.Sin(Math.PI / 3) * bitmapWidth * 0.8f + offsetY); var vector3 = new Vector2(bitmapWidth - offsetX, offsetY); - List initialVectors = new() { vector1, vector2, vector3, vector1 }; + List initialVectors = [vector1, vector2, vector3, vector1]; List vectors = Iterate(initialVectors, steps); return GetBitmap(vectors, bitmapWidth, bitmapWidth); } @@ -83,7 +83,7 @@ public static SKBitmap GetKochSnowflake( /// The transformed vectors after the iteration-step. private static List IterationStep(List vectors) { - List newVectors = new(); + List newVectors = []; for (var i = 0; i < vectors.Count - 1; i++) { var startVector = vectors[i]; diff --git a/Algorithms/Problems/StableMarriage/Accepter.cs b/Algorithms/Problems/StableMarriage/Accepter.cs index 317824af..ec5a71de 100644 --- a/Algorithms/Problems/StableMarriage/Accepter.cs +++ b/Algorithms/Problems/StableMarriage/Accepter.cs @@ -4,7 +4,7 @@ public class Accepter { public Proposer? EngagedTo { get; set; } - public List PreferenceOrder { get; set; } = new(); + public List PreferenceOrder { get; set; } = []; public bool PrefersOverCurrent(Proposer newProposer) => EngagedTo is null || diff --git a/Algorithms/RecommenderSystem/CollaborativeFiltering.cs b/Algorithms/RecommenderSystem/CollaborativeFiltering.cs index 1a3c87cc..4e5d8529 100644 --- a/Algorithms/RecommenderSystem/CollaborativeFiltering.cs +++ b/Algorithms/RecommenderSystem/CollaborativeFiltering.cs @@ -1,13 +1,8 @@ namespace Algorithms.RecommenderSystem { - public class CollaborativeFiltering + public class CollaborativeFiltering(ISimilarityCalculator similarityCalculator) { - private readonly ISimilarityCalculator similarityCalculator; - - public CollaborativeFiltering(ISimilarityCalculator similarityCalculator) - { - this.similarityCalculator = similarityCalculator; - } + private readonly ISimilarityCalculator similarityCalculator = similarityCalculator; /// /// Method to calculate similarity between two users using Pearson correlation. diff --git a/Algorithms/Search/AStar/AStar.cs b/Algorithms/Search/AStar/AStar.cs index 2c50fc88..0e7fc387 100644 --- a/Algorithms/Search/AStar/AStar.cs +++ b/Algorithms/Search/AStar/AStar.cs @@ -72,7 +72,7 @@ public static List Compute(Node from, Node to) { ResetNodes(done); ResetNodes(open.GetData()); - return new List(); + return []; } // Selecting next Element from queue diff --git a/Algorithms/Search/AStar/Node.cs b/Algorithms/Search/AStar/Node.cs index 69b98a1b..21a59f9a 100644 --- a/Algorithms/Search/AStar/Node.cs +++ b/Algorithms/Search/AStar/Node.cs @@ -3,15 +3,8 @@ namespace Algorithms.Search.AStar; /// /// Contains Positional and other information about a single node. /// -public class Node : IComparable, IEquatable +public class Node(VecN position, bool traversable, double traverseMultiplier) : IComparable, IEquatable { - public Node(VecN position, bool traversable, double traverseMultiplier) - { - Traversable = traversable; - Position = position; - TraversalCostMultiplier = traverseMultiplier; - } - /// /// Gets the Total cost of the Node. /// The Current Costs + the estimated costs. @@ -26,7 +19,7 @@ public Node(VecN position, bool traversable, double traverseMultiplier) /// /// Gets a value indicating whether how costly it is to traverse over this node. /// - public double TraversalCostMultiplier { get; } + public double TraversalCostMultiplier { get; } = traverseMultiplier; /// /// Gets or sets a value indicating whether to go from the start node to this node. @@ -42,12 +35,12 @@ public Node(VecN position, bool traversable, double traverseMultiplier) /// /// Gets a value indicating whether the node is traversable. /// - public bool Traversable { get; } + public bool Traversable { get; } = traversable; /// /// Gets or sets a list of all connected nodes. /// - public Node[] ConnectedNodes { get; set; } = new Node[0]; + public Node[] ConnectedNodes { get; set; } = []; /// /// Gets or sets he "previous" node that was processed before this node. @@ -57,7 +50,7 @@ public Node(VecN position, bool traversable, double traverseMultiplier) /// /// Gets the positional information of the node. /// - public VecN Position { get; } + public VecN Position { get; } = position; /// /// Compares the Nodes based on their total costs. diff --git a/Algorithms/Search/AStar/PathfindingException.cs b/Algorithms/Search/AStar/PathfindingException.cs index 2987c935..2e798d74 100644 --- a/Algorithms/Search/AStar/PathfindingException.cs +++ b/Algorithms/Search/AStar/PathfindingException.cs @@ -3,10 +3,6 @@ namespace Algorithms.Search.AStar; /// /// A pathfinding exception is thrown when the Pathfinder encounters a critical error and can not continue. /// -public class PathfindingException : Exception +public class PathfindingException(string message) : Exception(message) { - public PathfindingException(string message) - : base(message) - { - } } diff --git a/Algorithms/Search/AStar/PriorityQueue.cs b/Algorithms/Search/AStar/PriorityQueue.cs index 20554463..60a215d9 100644 --- a/Algorithms/Search/AStar/PriorityQueue.cs +++ b/Algorithms/Search/AStar/PriorityQueue.cs @@ -20,7 +20,7 @@ public class PriorityQueue public PriorityQueue(bool isDescending = false) { this.isDescending = isDescending; - list = new List(); + list = []; } /// diff --git a/Algorithms/Search/AStar/VecN.cs b/Algorithms/Search/AStar/VecN.cs index 63fab94b..3592a757 100644 --- a/Algorithms/Search/AStar/VecN.cs +++ b/Algorithms/Search/AStar/VecN.cs @@ -3,15 +3,13 @@ namespace Algorithms.Search.AStar; /// /// Vector Struct with N Dimensions. /// -public struct VecN : IEquatable +/// +/// Initializes a new instance of the struct. +/// +/// Vector components as array. +public struct VecN(params double[] vals) : IEquatable { - private readonly double[] data; - - /// - /// Initializes a new instance of the struct. - /// - /// Vector components as array. - public VecN(params double[] vals) => data = vals; + private readonly double[] data = vals; /// /// Gets the dimension count of this vector. diff --git a/Algorithms/Search/FastSearcher.cs b/Algorithms/Search/FastSearcher.cs index 08081d67..aadf429a 100644 --- a/Algorithms/Search/FastSearcher.cs +++ b/Algorithms/Search/FastSearcher.cs @@ -48,11 +48,11 @@ public int FindIndex(Span array, int item) var indexBinary = array.Length / 2; int[] section = - { + [ array.Length - 1, item - array[0], array[^1] - array[0], - }; + ]; var indexInterpolation = section[0] * section[1] / section[2]; // Left is min and right is max of the indices diff --git a/Algorithms/Sorters/Comparison/BasicTimSorter.cs b/Algorithms/Sorters/Comparison/BasicTimSorter.cs index 0e5135ee..f28a4945 100644 --- a/Algorithms/Sorters/Comparison/BasicTimSorter.cs +++ b/Algorithms/Sorters/Comparison/BasicTimSorter.cs @@ -4,19 +4,14 @@ namespace Algorithms.Sorters.Comparison; /// A basic implementation of the TimSort algorithm for sorting arrays. /// /// The type of elements in the array. -public class BasicTimSorter +/// +/// Initializes a new instance of the class. +/// +/// The comparer to use for comparing elements. +public class BasicTimSorter(IComparer comparer) { private readonly int minRuns = 32; - private readonly IComparer comparer; - - /// - /// Initializes a new instance of the class. - /// - /// The comparer to use for comparing elements. - public BasicTimSorter(IComparer comparer) - { - this.comparer = comparer ?? Comparer.Default; - } + private readonly IComparer comparer = comparer ?? Comparer.Default; /// /// Sorts the specified array using the TimSort algorithm. diff --git a/Algorithms/Sorters/Comparison/CombSorter.cs b/Algorithms/Sorters/Comparison/CombSorter.cs index 8ef67faa..3114ed4f 100644 --- a/Algorithms/Sorters/Comparison/CombSorter.cs +++ b/Algorithms/Sorters/Comparison/CombSorter.cs @@ -4,11 +4,9 @@ namespace Algorithms.Sorters.Comparison; /// Comb sort is a relatively simple sorting algorithm that improves on bubble sort. /// /// Type of array element. -public class CombSorter : IComparisonSorter +public class CombSorter(double shrinkFactor = 1.3) : IComparisonSorter { - public CombSorter(double shrinkFactor = 1.3) => ShrinkFactor = shrinkFactor; - - private double ShrinkFactor { get; } + private double ShrinkFactor { get; } = shrinkFactor; /// /// Sorts array using specified comparer, diff --git a/Algorithms/Sorters/External/Storages/IntFileStorage.cs b/Algorithms/Sorters/External/Storages/IntFileStorage.cs index 154245b6..1fbf03e3 100644 --- a/Algorithms/Sorters/External/Storages/IntFileStorage.cs +++ b/Algorithms/Sorters/External/Storages/IntFileStorage.cs @@ -2,38 +2,28 @@ namespace Algorithms.Sorters.External.Storages; -public class IntFileStorage : ISequentialStorage +public class IntFileStorage(string filename, int length) : ISequentialStorage { - private readonly string filename; + private readonly string filename = filename; - public IntFileStorage(string filename, int length) - { - Length = length; - this.filename = filename; - } - - public int Length { get; } + public int Length { get; } = length; public ISequentialStorageReader GetReader() => new FileReader(filename); public ISequentialStorageWriter GetWriter() => new FileWriter(filename); - private class FileReader : ISequentialStorageReader + private class FileReader(string filename) : ISequentialStorageReader { - private readonly BinaryReader reader; - - public FileReader(string filename) => reader = new BinaryReader(File.OpenRead(filename)); + private readonly BinaryReader reader = new BinaryReader(File.OpenRead(filename)); public void Dispose() => reader.Dispose(); public int Read() => reader.ReadInt32(); } - private class FileWriter : ISequentialStorageWriter + private class FileWriter(string filename) : ISequentialStorageWriter { - private readonly BinaryWriter writer; - - public FileWriter(string filename) => writer = new BinaryWriter(File.OpenWrite(filename)); + private readonly BinaryWriter writer = new BinaryWriter(File.OpenWrite(filename)); public void Write(int value) => writer.Write(value); diff --git a/Algorithms/Sorters/External/Storages/IntInMemoryStorage.cs b/Algorithms/Sorters/External/Storages/IntInMemoryStorage.cs index e0a84031..50e07575 100644 --- a/Algorithms/Sorters/External/Storages/IntInMemoryStorage.cs +++ b/Algorithms/Sorters/External/Storages/IntInMemoryStorage.cs @@ -1,10 +1,8 @@ namespace Algorithms.Sorters.External.Storages; -public class IntInMemoryStorage : ISequentialStorage +public class IntInMemoryStorage(int[] array) : ISequentialStorage { - private readonly int[] storage; - - public IntInMemoryStorage(int[] array) => storage = array; + private readonly int[] storage = array; public int Length => storage.Length; @@ -12,13 +10,11 @@ public class IntInMemoryStorage : ISequentialStorage public ISequentialStorageWriter GetWriter() => new InMemoryWriter(storage); - private class InMemoryReader : ISequentialStorageReader + private class InMemoryReader(int[] storage) : ISequentialStorageReader { - private readonly int[] storage; + private readonly int[] storage = storage; private int offset; - public InMemoryReader(int[] storage) => this.storage = storage; - public void Dispose() { // Nothing to dispose here @@ -27,13 +23,11 @@ public void Dispose() public int Read() => storage[offset++]; } - private class InMemoryWriter : ISequentialStorageWriter + private class InMemoryWriter(int[] storage) : ISequentialStorageWriter { - private readonly int[] storage; + private readonly int[] storage = storage; private int offset; - public InMemoryWriter(int[] storage) => this.storage = storage; - public void Write(int value) => storage[offset++] = value; public void Dispose() diff --git a/Algorithms/Strings/PatternMatching/NaiveStringSearch.cs b/Algorithms/Strings/PatternMatching/NaiveStringSearch.cs index ed29c45a..58d1279c 100644 --- a/Algorithms/Strings/PatternMatching/NaiveStringSearch.cs +++ b/Algorithms/Strings/PatternMatching/NaiveStringSearch.cs @@ -17,7 +17,7 @@ public static int[] NaiveSearch(string content, string pattern) { var m = pattern.Length; var n = content.Length; - List indices = new(); + List indices = []; for (var e = 0; e <= n - m; e++) { int j; diff --git a/Algorithms/Strings/PatternMatching/RabinKarp.cs b/Algorithms/Strings/PatternMatching/RabinKarp.cs index a36f3fce..90aeda6e 100644 --- a/Algorithms/Strings/PatternMatching/RabinKarp.cs +++ b/Algorithms/Strings/PatternMatching/RabinKarp.cs @@ -47,7 +47,7 @@ public static List FindAllOccurrences(string text, string pattern) } // In the next step you iterate over the text with the pattern. - List occurrences = new(); + List occurrences = []; for (var i = 0; i + pattern.Length - 1 < text.Length; i++) { // In each step you calculate the hash value of the substring to be tested. diff --git a/Algorithms/Strings/Permutation.cs b/Algorithms/Strings/Permutation.cs index 970e3ec0..77f473c5 100644 --- a/Algorithms/Strings/Permutation.cs +++ b/Algorithms/Strings/Permutation.cs @@ -10,10 +10,10 @@ public static List GetEveryUniquePermutation(string word) { if (word.Length < 2) { - return new List - { + return + [ word, - }; + ]; } var result = new HashSet(); diff --git a/DataStructures.Tests/AVLTreeTests.cs b/DataStructures.Tests/AVLTreeTests.cs index ac84cc54..0e5f754f 100644 --- a/DataStructures.Tests/AVLTreeTests.cs +++ b/DataStructures.Tests/AVLTreeTests.cs @@ -5,9 +5,9 @@ namespace DataStructures.Tests; internal class AvlTreeTests { - private static readonly int[] Data = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }; - private static readonly int[] PreOrder = { 4, 2, 1, 3, 8, 6, 5, 7, 9, 10 }; - private static readonly int[] PostOrder = { 1, 3, 2, 5, 7, 6, 10, 9, 8, 4 }; + private static readonly int[] Data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; + private static readonly int[] PreOrder = [4, 2, 1, 3, 8, 6, 5, 7, 9, 10]; + private static readonly int[] PostOrder = [1, 3, 2, 5, 7, 6, 10, 9, 8, 4]; [Test] public void Constructor_UseCustomComparer_FormsCorrectTree() diff --git a/DataStructures.Tests/Fenwick/BinaryIndexedTreeTests.cs b/DataStructures.Tests/Fenwick/BinaryIndexedTreeTests.cs index ecfb8dd1..6bd9eda1 100644 --- a/DataStructures.Tests/Fenwick/BinaryIndexedTreeTests.cs +++ b/DataStructures.Tests/Fenwick/BinaryIndexedTreeTests.cs @@ -8,7 +8,7 @@ internal class BinaryIndexedTreeTests [Test] public void GetSum_CreateBITAndRequestSum_ReturnCorrect() { - int[] array = { 2, 1, 1, 3, 2, 3, 4, 5, 6, 7, 8, 9 }; + int[] array = [2, 1, 1, 3, 2, 3, 4, 5, 6, 7, 8, 9]; var tree = new BinaryIndexedTree(array); var expectedSum = 12; @@ -20,7 +20,7 @@ public void GetSum_CreateBITAndRequestSum_ReturnCorrect() [Test] public void UpdateTree_UpdateTreeAndRequestSum_GetSum() { - int[] array = { 2, 1, 1, 3, 2, 3, 4, 5, 6, 7, 8, 9 }; + int[] array = [2, 1, 1, 3, 2, 3, 4, 5, 6, 7, 8, 9]; var tree = new BinaryIndexedTree(array); var expectedSum = 18; diff --git a/DataStructures.Tests/Hashing/NumberTheory/PrimeNumberTests.cs b/DataStructures.Tests/Hashing/NumberTheory/PrimeNumberTests.cs index 77d730be..147f5627 100644 --- a/DataStructures.Tests/Hashing/NumberTheory/PrimeNumberTests.cs +++ b/DataStructures.Tests/Hashing/NumberTheory/PrimeNumberTests.cs @@ -6,7 +6,7 @@ namespace DataStructures.Tests.Hashing.NumberTheory; public static class PrimeNumberTests { private static readonly object[] IsPrimeSource = - { + [ new object[] { 0, false }, new object[] { 1, false }, new object[] { 2, true }, @@ -48,10 +48,10 @@ public static class PrimeNumberTests new object[] { 38, false }, new object[] { 39, false }, new object[] { 40, false }, - }; + ]; private static readonly object[] NextPrimeSource = - { + [ new object[] { 0, 1, false, 2 }, new object[] { 1, 1, false, 2 }, new object[] { 3, 1, false, 5 }, @@ -88,7 +88,7 @@ public static class PrimeNumberTests new object[] { 8, 1, true, 7 }, new object[] { 9, 1, true, 7 }, new object[] { 10, 1, true, 7 } - }; + ]; [TestCaseSource(nameof(IsPrimeSource))] public static void IsPrimeTest(int number, bool expected) diff --git a/DataStructures.Tests/Heap/MinMaxHeapTests.cs b/DataStructures.Tests/Heap/MinMaxHeapTests.cs index 180936c0..1ac6637f 100644 --- a/DataStructures.Tests/Heap/MinMaxHeapTests.cs +++ b/DataStructures.Tests/Heap/MinMaxHeapTests.cs @@ -6,11 +6,11 @@ namespace DataStructures.Tests.Heap; public static class MinMaxHeapTests { private static readonly object[] CollectionsSource = - { + [ new[] { 5, 10, -2, 0, 3, 13, 5, -8, 41, -5, -7, -60, -12 }, new[] { 'e', '4', 'x', 'D', '!', '$', '-', '_', '2', ')', 'Z', 'q' }, new[] { "abc", "abc", "xyz", "bcd", "klm", "opq", "ijk" }, - }; + ]; [Test] public static void CustomComparerTest() diff --git a/DataStructures.Tests/Probabilistic/BloomFilterTests.cs b/DataStructures.Tests/Probabilistic/BloomFilterTests.cs index 584ec8d8..dc9a2842 100644 --- a/DataStructures.Tests/Probabilistic/BloomFilterTests.cs +++ b/DataStructures.Tests/Probabilistic/BloomFilterTests.cs @@ -4,32 +4,20 @@ namespace DataStructures.Tests.Probabilistic; public class BloomFilterTests { - static readonly string[] TestNames = { "kal;jsnfka", "alkjsdfn;lakm", "aljfopiawjf", "afowjeaofeij", "oajwsefoaiwje", "aoiwjfaoiejmf", "aoijfoawiejf" }; + static readonly string[] TestNames = ["kal;jsnfka", "alkjsdfn;lakm", "aljfopiawjf", "afowjeaofeij", "oajwsefoaiwje", "aoiwjfaoiejmf", "aoijfoawiejf"]; - private class SimpleObject + private class SimpleObject(string name, int number) { - public string Name { get; set; } - public int Number { get; set; } - - public SimpleObject(string name, int number) - { - Name = name; - Number = number; - } + public string Name { get; set; } = name; + public int Number { get; set; } = number; } - private class SimpleObjectOverridenHash + private class SimpleObjectOverridenHash(string name, int number) { private const uint FnvPrime = 16777619; private const uint FnvOffsetBasis = 2166136261; - public string Name { get; set; } - public int Number { get; set; } - - public SimpleObjectOverridenHash(string name, int number) - { - Name = name; - Number = number; - } + public string Name { get; set; } = name; + public int Number { get; set; } = number; public override int GetHashCode() { diff --git a/DataStructures.Tests/Probabilistic/CountMinSketchTests.cs b/DataStructures.Tests/Probabilistic/CountMinSketchTests.cs index 2dc1add7..79363e62 100644 --- a/DataStructures.Tests/Probabilistic/CountMinSketchTests.cs +++ b/DataStructures.Tests/Probabilistic/CountMinSketchTests.cs @@ -4,16 +4,10 @@ namespace DataStructures.Tests.Probabilistic; public class CountMinSketchTests { - public class SimpleObject + public class SimpleObject(string name, int number) { - public string Name { get; set; } - public int Number { get; set; } - - public SimpleObject(string name, int number) - { - Name = name; - Number = number; - } + public string Name { get; set; } = name; + public int Number { get; set; } = number; } [Test] diff --git a/DataStructures.Tests/Probabilistic/HyperLogLogTest.cs b/DataStructures.Tests/Probabilistic/HyperLogLogTest.cs index 4e0db8c7..ce7b0148 100644 --- a/DataStructures.Tests/Probabilistic/HyperLogLogTest.cs +++ b/DataStructures.Tests/Probabilistic/HyperLogLogTest.cs @@ -8,7 +8,7 @@ public class HyperLogLogTest public void TestHyperLogLog() { var hll = new HyperLogLog(); - HashSet actual = new(); + HashSet actual = []; var rand = new Random(); var tolerance = .05; @@ -32,7 +32,7 @@ public void TestHyperLogLogMerge() var hll2 = new HyperLogLog(); var rand = new Random(); var tolerance = .05; - HashSet actual = new(); + HashSet actual = []; for (var i = 0; i < 5000; i++) { var k = rand.Next(20000); diff --git a/DataStructures.Tests/SegmentTrees/SegmentTreeApplyTests.cs b/DataStructures.Tests/SegmentTrees/SegmentTreeApplyTests.cs index 7a5c6c67..9df9edc4 100644 --- a/DataStructures.Tests/SegmentTrees/SegmentTreeApplyTests.cs +++ b/DataStructures.Tests/SegmentTrees/SegmentTreeApplyTests.cs @@ -5,7 +5,7 @@ namespace DataStructures.Tests.SegmentTrees; [TestFixture] public class SegmentTreeApplyTests { - private readonly SegmentTreeApply testTree = new(new[] { 8, 9, 1, 4, 8, 7, 2 }); + private readonly SegmentTreeApply testTree = new([8, 9, 1, 4, 8, 7, 2]); [Test] public void Apply_Query_Update_Query_Test() diff --git a/DataStructures.Tests/SegmentTrees/SegmentTreeTests.cs b/DataStructures.Tests/SegmentTrees/SegmentTreeTests.cs index 7dae5135..8cb484da 100644 --- a/DataStructures.Tests/SegmentTrees/SegmentTreeTests.cs +++ b/DataStructures.Tests/SegmentTrees/SegmentTreeTests.cs @@ -5,12 +5,12 @@ namespace DataStructures.Tests.SegmentTrees; [TestFixture] public class SegmentTreeTests { - private readonly SegmentTree testTree = new(new[] { 8, 9, 1, 4, 8, 7, 2 }); + private readonly SegmentTree testTree = new([8, 9, 1, 4, 8, 7, 2]); [Test] public void TreeArray_Test() { - int[] expectedArray = { 0, 39, 22, 17, 17, 5, 15, 2, 8, 9, 1, 4, 8, 7, 2, 0 }; + int[] expectedArray = [0, 39, 22, 17, 17, 5, 15, 2, 8, 9, 1, 4, 8, 7, 2, 0]; Assert.That(testTree.Tree, Is.EqualTo(expectedArray)); } diff --git a/DataStructures.Tests/SegmentTrees/SegmentTreeUpdateTest.cs b/DataStructures.Tests/SegmentTrees/SegmentTreeUpdateTest.cs index 28ca6d7c..95b43818 100644 --- a/DataStructures.Tests/SegmentTrees/SegmentTreeUpdateTest.cs +++ b/DataStructures.Tests/SegmentTrees/SegmentTreeUpdateTest.cs @@ -8,10 +8,10 @@ public class SegmentTreeUpdateTests [SetUp] public void Init() { - testTree = new SegmentTreeUpdate(new[] { 8, 9, 1, 4, 8, 7, 2 }); + testTree = new SegmentTreeUpdate([8, 9, 1, 4, 8, 7, 2]); } - private SegmentTreeUpdate testTree = new(new[] { 8, 9, 1, 4, 8, 7, 2 }); + private SegmentTreeUpdate testTree = new([8, 9, 1, 4, 8, 7, 2]); [TestCase(2, 3, 1, 4, 24)] [TestCase(0, 3, 1, 4, 22)] diff --git a/DataStructures.Tests/Stack/ArrayBasedStackTests.cs b/DataStructures.Tests/Stack/ArrayBasedStackTests.cs index b088bb1b..307c0674 100644 --- a/DataStructures.Tests/Stack/ArrayBasedStackTests.cs +++ b/DataStructures.Tests/Stack/ArrayBasedStackTests.cs @@ -9,14 +9,14 @@ public static class ArrayBasedStackTests [Test] public static void CountTest() { - var stack = new ArrayBasedStack(new[] { 0, 1, 2, 3, 4 }); + var stack = new ArrayBasedStack([0, 1, 2, 3, 4]); stack.Top.Should().Be(4); } [Test] public static void ClearTest() { - var stack = new ArrayBasedStack(new[] { 0, 1, 2, 3, 4 }); + var stack = new ArrayBasedStack([0, 1, 2, 3, 4]); stack.Clear(); @@ -26,7 +26,7 @@ public static void ClearTest() [Test] public static void ContainsTest() { - var stack = new ArrayBasedStack(new[] { 0, 1, 2, 3, 4 }); + var stack = new ArrayBasedStack([0, 1, 2, 3, 4]); Assert.Multiple(() => { @@ -41,7 +41,7 @@ public static void ContainsTest() [Test] public static void PeekTest() { - var stack = new ArrayBasedStack(new[] { 0, 1, 2, 3, 4 }); + var stack = new ArrayBasedStack([0, 1, 2, 3, 4]); Assert.Multiple(() => { @@ -54,7 +54,7 @@ public static void PeekTest() [Test] public static void PopTest() { - var stack = new ArrayBasedStack(new[] { 0, 1, 2, 3, 4 }); + var stack = new ArrayBasedStack([0, 1, 2, 3, 4]); Assert.Multiple(() => { diff --git a/DataStructures.Tests/TimelineTests.cs b/DataStructures.Tests/TimelineTests.cs index d434010a..a17d895d 100644 --- a/DataStructures.Tests/TimelineTests.cs +++ b/DataStructures.Tests/TimelineTests.cs @@ -92,7 +92,7 @@ public static void IndexerSetTest() { new DateTime(2015, 1, 1), "TestTime5" }, }; - timeline[new DateTime(2000, 1, 1)] = new[] { eventName }; + timeline[new DateTime(2000, 1, 1)] = [eventName]; timeline[new DateTime(2000, 1, 1)][0] .Should() @@ -657,9 +657,10 @@ public static void AddDateTimeAndTValueTest() //void Add(DateTime time, TValue v var eventDate = new DateTime(2015, 1, 1); const string eventName = "TestTime"; - var timeline = new Timeline(); - - timeline.Add(eventDate, eventName); + var timeline = new Timeline + { + { eventDate, eventName } + }; timeline.Count .Should() @@ -679,11 +680,13 @@ public static void AddDateTimeAndTValueArrayTest() //void Add(params (DateTime, var eventDate2 = new DateTime(1750, 1, 1); const string eventName2 = "TestTime2"; - var timeline = new Timeline(); - - timeline.Add( - (eventDate1, eventName1), - (eventDate2, eventName2)); + var timeline = new Timeline + { + { + (eventDate1, eventName1), + (eventDate2, eventName2) + } + }; using (new AssertionScope()) { @@ -707,9 +710,10 @@ public static void AddTimelineTest() //void Add(Timeline timeline) var eventDate = new DateTime(2015, 1, 1); const string eventName = "TestTime"; - var timeline = new Timeline(); - - timeline.Add(new Timeline(eventDate, eventName)); + var timeline = new Timeline + { + new Timeline(eventDate, eventName) + }; using (new AssertionScope()) { diff --git a/DataStructures.Tests/Tries/TrieTests.cs b/DataStructures.Tests/Tries/TrieTests.cs index b253de9f..4ed2c714 100644 --- a/DataStructures.Tests/Tries/TrieTests.cs +++ b/DataStructures.Tests/Tries/TrieTests.cs @@ -8,12 +8,12 @@ public static class TrieTests public static void FindWordInTrie() { // Arrange - string[] words = { + string[] words = [ "trie", "node", "none", "treatment", - }; + ]; // Act Trie trie = new(words); @@ -33,12 +33,12 @@ public static void FindWordInTrie() public static void InsertInTrie() { // Arrange - string[] words = { + string[] words = [ "trie", "node", "none", "treatment", - }; + ]; Trie trie = new(); @@ -59,12 +59,12 @@ public static void InsertInTrie() public static void RemoveFromTrie() { // Arrange - string[] words = { + string[] words = [ "trie", "node", "none", "treatment", - }; + ]; Trie trie = new(); diff --git a/DataStructures/AATree/AATree.cs b/DataStructures/AATree/AATree.cs index 1ee09ef4..6f91afd1 100644 --- a/DataStructures/AATree/AATree.cs +++ b/DataStructures/AATree/AATree.cs @@ -11,12 +11,16 @@ namespace DataStructures.AATree; /// More information: https://en.wikipedia.org/wiki/AA_tree . /// /// The type of key for the AA tree. -public class AaTree +/// +/// Initializes a new instance of the class with a custom comparer. +/// +/// The custom comparer to use to compare keys. +public class AaTree(Comparer customComparer) { /// /// The comparer function to use to compare the keys. /// - private readonly Comparer comparer; + private readonly Comparer comparer = customComparer; /// /// Initializes a new instance of the class. @@ -26,12 +30,6 @@ public AaTree() { } - /// - /// Initializes a new instance of the class with a custom comparer. - /// - /// The custom comparer to use to compare keys. - public AaTree(Comparer customComparer) => comparer = customComparer; - /// /// Gets the root of the tree. /// diff --git a/DataStructures/AATree/AATreeNode.cs b/DataStructures/AATree/AATreeNode.cs index 03565e67..3ae0e98b 100644 --- a/DataStructures/AATree/AATreeNode.cs +++ b/DataStructures/AATree/AATreeNode.cs @@ -4,28 +4,22 @@ namespace DataStructures.AATree; /// Generic node class for AATree. /// /// Type of key for node. -public class AaTreeNode +/// +/// Initializes a new instance of the class. +/// +/// The initial key of this node. +/// The level of this node. See for more details. +public class AaTreeNode(TKey key, int level) { - /// - /// Initializes a new instance of the class. - /// - /// The initial key of this node. - /// The level of this node. See for more details. - public AaTreeNode(TKey key, int level) - { - Key = key; - Level = level; - } - /// /// Gets or Sets key for this node. /// - public TKey Key { get; set; } + public TKey Key { get; set; } = key; /// /// Gets or Sets level for this node. /// - public int Level { get; set; } + public int Level { get; set; } = level; /// /// Gets or sets the left subtree of this node. diff --git a/DataStructures/AVLTree/AVLTree.cs b/DataStructures/AVLTree/AVLTree.cs index 1dfc4064..3902f692 100644 --- a/DataStructures/AVLTree/AVLTree.cs +++ b/DataStructures/AVLTree/AVLTree.cs @@ -157,7 +157,7 @@ public TKey GetMax() /// Keys in tree in order from smallest to largest. public IEnumerable GetKeysInOrder() { - List result = new(); + List result = []; InOrderWalk(root); return result; diff --git a/DataStructures/AVLTree/AVLTreeNode.cs b/DataStructures/AVLTree/AVLTreeNode.cs index a0684147..0d342876 100644 --- a/DataStructures/AVLTree/AVLTreeNode.cs +++ b/DataStructures/AVLTree/AVLTreeNode.cs @@ -5,12 +5,17 @@ namespace DataStructures.AVLTree; /// instance. /// /// The type of key for the node. -internal class AvlTreeNode +/// +/// Initializes a new instance of the +/// class. +/// +/// Key value for node. +internal class AvlTreeNode(TKey key) { /// /// Gets or sets key value of node. /// - public TKey Key { get; set; } + public TKey Key { get; set; } = key; /// /// Gets the balance factor of the node. @@ -32,16 +37,6 @@ internal class AvlTreeNode /// private int Height { get; set; } - /// - /// Initializes a new instance of the - /// class. - /// - /// Key value for node. - public AvlTreeNode(TKey key) - { - Key = key; - } - /// /// Update the node's height and balance factor. /// diff --git a/DataStructures/BinarySearchTree/BinarySearchTree.cs b/DataStructures/BinarySearchTree/BinarySearchTree.cs index c832071c..8a6f18a3 100644 --- a/DataStructures/BinarySearchTree/BinarySearchTree.cs +++ b/DataStructures/BinarySearchTree/BinarySearchTree.cs @@ -323,7 +323,7 @@ private IList GetKeysInOrder(BinarySearchTreeNode? node) { if (node is null) { - return new List(); + return []; } var result = new List(); @@ -342,11 +342,13 @@ private IList GetKeysPreOrder(BinarySearchTreeNode? node) { if (node is null) { - return new List(); + return []; } - var result = new List(); - result.Add(node.Key); + var result = new List + { + node.Key, + }; result.AddRange(GetKeysPreOrder(node.Left)); result.AddRange(GetKeysPreOrder(node.Right)); return result; @@ -361,7 +363,7 @@ private IList GetKeysPostOrder(BinarySearchTreeNode? node) { if (node is null) { - return new List(); + return []; } var result = new List(); diff --git a/DataStructures/BinarySearchTree/BinarySearchTreeNode.cs b/DataStructures/BinarySearchTree/BinarySearchTreeNode.cs index 865dd3d0..19b005d2 100644 --- a/DataStructures/BinarySearchTree/BinarySearchTreeNode.cs +++ b/DataStructures/BinarySearchTree/BinarySearchTreeNode.cs @@ -5,18 +5,16 @@ namespace DataStructures.BinarySearchTree; /// Keys for each node are immutable. /// /// Type of key for the node. Keys must implement IComparable. -public class BinarySearchTreeNode +/// +/// Initializes a new instance of the class. +/// +/// The key of this node. +public class BinarySearchTreeNode(TKey key) { - /// - /// Initializes a new instance of the class. - /// - /// The key of this node. - public BinarySearchTreeNode(TKey key) => Key = key; - /// /// Gets the key for this node. /// - public TKey Key { get; } + public TKey Key { get; } = key; /// /// Gets or sets the reference to a child node that precedes/comes before this node. diff --git a/DataStructures/Cache/LfuCache.cs b/DataStructures/Cache/LfuCache.cs index 47ca44ba..83ab1c51 100644 --- a/DataStructures/Cache/LfuCache.cs +++ b/DataStructures/Cache/LfuCache.cs @@ -23,7 +23,11 @@ namespace DataStructures.Cache; /// https://www.educative.io/answers/what-is-least-frequently-used-cache-replace-policy /// https://leetcode.com/problems/lfu-cache/ . /// -public class LfuCache where TKey : notnull +/// +/// Initializes a new instance of the class. +/// +/// The max number of items the cache can store. +public class LfuCache(int capacity = LfuCache.DefaultCapacity) where TKey : notnull { private class CachedItem { @@ -36,15 +40,15 @@ private class CachedItem private const int DefaultCapacity = 100; - private readonly int capacity; + private readonly int capacity = capacity; // Note that Dictionary stores LinkedListNode as it allows // removing the node from the LinkedList in O(1) time. - private readonly Dictionary> cache = new(); + private readonly Dictionary> cache = []; // Map frequency (number of times the item was requested or updated) // to the LRU linked list. - private readonly Dictionary> frequencies = new(); + private readonly Dictionary> frequencies = []; // Track the minimum frequency with non-empty linked list in frequencies. // When the last item with the minFrequency is promoted (after being requested or updated), @@ -52,15 +56,6 @@ private class CachedItem // When a new item is added, the minFrequency is set to 1. private int minFrequency = -1; - /// - /// Initializes a new instance of the class. - /// - /// The max number of items the cache can store. - public LfuCache(int capacity = DefaultCapacity) - { - this.capacity = capacity; - } - public bool Contains(TKey key) => cache.ContainsKey(key); /// diff --git a/DataStructures/Cache/LruCache.cs b/DataStructures/Cache/LruCache.cs index eb0e4631..89108500 100644 --- a/DataStructures/Cache/LruCache.cs +++ b/DataStructures/Cache/LruCache.cs @@ -23,7 +23,11 @@ namespace DataStructures.Cache; /// just remove the last node from the linked list in the method Put /// (replace RemoveFirst with RemoveLast). /// -public class LruCache where TKey : notnull +/// +/// Initializes a new instance of the class. +/// +/// The max number of items the cache can store. +public class LruCache(int capacity = LruCache.DefaultCapacity) where TKey : notnull { private class CachedItem { @@ -34,22 +38,13 @@ private class CachedItem private const int DefaultCapacity = 100; - private readonly int capacity; + private readonly int capacity = capacity; // Note that Dictionary stores LinkedListNode as it allows // removing the node from the LinkedList in O(1) time. - private readonly Dictionary> cache = new(); + private readonly Dictionary> cache = []; private readonly LinkedList lruList = new(); - /// - /// Initializes a new instance of the class. - /// - /// The max number of items the cache can store. - public LruCache(int capacity = DefaultCapacity) - { - this.capacity = capacity; - } - public bool Contains(TKey key) => cache.ContainsKey(key); /// diff --git a/DataStructures/Hashing/Entry.cs b/DataStructures/Hashing/Entry.cs index 745e4aa7..580f7fc9 100644 --- a/DataStructures/Hashing/Entry.cs +++ b/DataStructures/Hashing/Entry.cs @@ -8,15 +8,9 @@ namespace DataStructures.Hashing; /// /// This class is used to store the key-value pairs in the hash table. /// -public class Entry +public class Entry(TKey key, TValue value) { - public TKey? Key { get; set; } + public TKey? Key { get; set; } = key; - public TValue? Value { get; set; } - - public Entry(TKey key, TValue value) - { - Key = key; - Value = value; - } + public TValue? Value { get; set; } = value; } diff --git a/DataStructures/Heap/BinaryHeap.cs b/DataStructures/Heap/BinaryHeap.cs index e69df22b..24b80f9a 100644 --- a/DataStructures/Heap/BinaryHeap.cs +++ b/DataStructures/Heap/BinaryHeap.cs @@ -30,7 +30,7 @@ public class BinaryHeap /// public BinaryHeap() { - data = new List(); + data = []; comparer = Comparer.Default; } @@ -40,7 +40,7 @@ public BinaryHeap() /// The custom comparing function to use to compare elements. public BinaryHeap(Comparer customComparer) { - data = new List(); + data = []; comparer = customComparer; } diff --git a/DataStructures/Heap/PairingHeap/PairingHeap.cs b/DataStructures/Heap/PairingHeap/PairingHeap.cs index 66fcb81d..bc69cb03 100644 --- a/DataStructures/Heap/PairingHeap/PairingHeap.cs +++ b/DataStructures/Heap/PairingHeap/PairingHeap.cs @@ -6,22 +6,16 @@ namespace DataStructures.Heap.PairingHeap; /// A pairing minMax heap implementation. /// /// Base type. -public class PairingHeap : IEnumerable where T : IComparable +public class PairingHeap(Sorting sortDirection = Sorting.Ascending) : IEnumerable where T : IComparable { - private readonly Sorting sorting; - private readonly IComparer comparer; - private readonly Dictionary>> mapping = new(); + private readonly Sorting sorting = sortDirection; + private readonly IComparer comparer = new PairingNodeComparer(sortDirection, Comparer.Default); + private readonly Dictionary>> mapping = []; private PairingHeapNode root = null!; public int Count { get; private set; } - public PairingHeap(Sorting sortDirection = Sorting.Ascending) - { - sorting = sortDirection; - comparer = new PairingNodeComparer(sortDirection, Comparer.Default); - } - /// /// Insert a new Node [O(1)]. /// diff --git a/DataStructures/Heap/PairingHeap/PairingHeapNode.cs b/DataStructures/Heap/PairingHeap/PairingHeapNode.cs index f0ab4865..ebfd561c 100644 --- a/DataStructures/Heap/PairingHeap/PairingHeapNode.cs +++ b/DataStructures/Heap/PairingHeap/PairingHeapNode.cs @@ -4,14 +4,9 @@ namespace DataStructures.Heap.PairingHeap; /// Node represented the value and connections. /// /// Type, supported comparing. -public class PairingHeapNode +public class PairingHeapNode(T value) { - public PairingHeapNode(T value) - { - Value = value; - } - - public T Value { get; set; } + public T Value { get; set; } = value; public PairingHeapNode ChildrenHead { get; set; } = null!; diff --git a/DataStructures/Heap/PairingHeap/PairingNodeComparer.cs b/DataStructures/Heap/PairingHeap/PairingNodeComparer.cs index 2f5ed52c..9edab764 100644 --- a/DataStructures/Heap/PairingHeap/PairingNodeComparer.cs +++ b/DataStructures/Heap/PairingHeap/PairingNodeComparer.cs @@ -4,16 +4,10 @@ namespace DataStructures.Heap.PairingHeap; /// Node comparer. /// /// Node type. -public class PairingNodeComparer : IComparer where T : IComparable +public class PairingNodeComparer(Sorting sortDirection, IComparer comparer) : IComparer where T : IComparable { - private readonly bool isMax; - private readonly IComparer nodeComparer; - - public PairingNodeComparer(Sorting sortDirection, IComparer comparer) - { - isMax = sortDirection == Sorting.Descending; - nodeComparer = comparer; - } + private readonly bool isMax = sortDirection == Sorting.Descending; + private readonly IComparer nodeComparer = comparer; public int Compare(T? x, T? y) { diff --git a/DataStructures/InvertedIndex.cs b/DataStructures/InvertedIndex.cs index 4d25ce1e..bd9b0fe7 100644 --- a/DataStructures/InvertedIndex.cs +++ b/DataStructures/InvertedIndex.cs @@ -9,7 +9,7 @@ namespace DataStructures; /// public class InvertedIndex { - private readonly Dictionary> invertedIndex = new(); + private readonly Dictionary> invertedIndex = []; /// /// Build inverted index with source name and source content. @@ -23,7 +23,7 @@ public void AddToIndex(string sourceName, string sourceContent) { if (!invertedIndex.ContainsKey(word)) { - invertedIndex.Add(word, new List { sourceName }); + invertedIndex.Add(word, [sourceName]); } else { diff --git a/DataStructures/LinkedList/DoublyLinkedList/DoublyLinkedListNode.cs b/DataStructures/LinkedList/DoublyLinkedList/DoublyLinkedListNode.cs index 85951407..7351f66a 100644 --- a/DataStructures/LinkedList/DoublyLinkedList/DoublyLinkedListNode.cs +++ b/DataStructures/LinkedList/DoublyLinkedList/DoublyLinkedListNode.cs @@ -4,18 +4,16 @@ namespace DataStructures.LinkedList.DoublyLinkedList; /// Generic node class for Doubly Linked List. /// /// Generic type. -public class DoublyLinkedListNode +/// +/// Initializes a new instance of the class. +/// +/// Data to be stored in this node. +public class DoublyLinkedListNode(T data) { - /// - /// Initializes a new instance of the class. - /// - /// Data to be stored in this node. - public DoublyLinkedListNode(T data) => Data = data; - /// /// Gets the data stored on this node. /// - public T Data { get; } + public T Data { get; } = data; /// /// Gets or sets the reference to the next node in the Doubly Linked List. diff --git a/DataStructures/LinkedList/SinglyLinkedList/SinglyLinkedListNode.cs b/DataStructures/LinkedList/SinglyLinkedList/SinglyLinkedListNode.cs index 4d0aa4cb..685d240e 100644 --- a/DataStructures/LinkedList/SinglyLinkedList/SinglyLinkedListNode.cs +++ b/DataStructures/LinkedList/SinglyLinkedList/SinglyLinkedListNode.cs @@ -1,14 +1,8 @@ namespace DataStructures.LinkedList.SinglyLinkedList; -public class SinglyLinkedListNode +public class SinglyLinkedListNode(T data) { - public SinglyLinkedListNode(T data) - { - Data = data; - Next = null; - } - - public T Data { get; } + public T Data { get; } = data; public SinglyLinkedListNode? Next { get; set; } } diff --git a/DataStructures/LinkedList/SkipList/SkipListNode.cs b/DataStructures/LinkedList/SkipList/SkipListNode.cs index 73fc3efd..ad5fe45d 100644 --- a/DataStructures/LinkedList/SkipList/SkipListNode.cs +++ b/DataStructures/LinkedList/SkipList/SkipListNode.cs @@ -3,21 +3,13 @@ namespace DataStructures.LinkedList.SkipList; [DebuggerDisplay("Key = {Key}, Height = {Height}, Value = {Value}")] -internal class SkipListNode +internal class SkipListNode(int key, TValue? value, int height) { - public SkipListNode(int key, TValue? value, int height) - { - Key = key; - Value = value; - Height = height; - Next = new SkipListNode[height]; - } + public int Key { get; } = key; - public int Key { get; } + public TValue? Value { get; set; } = value; - public TValue? Value { get; set; } + public SkipListNode[] Next { get; } = new SkipListNode[height]; - public SkipListNode[] Next { get; } - - public int Height { get; } + public int Height { get; } = height; } diff --git a/DataStructures/Probabilistic/HyperLogLog.cs b/DataStructures/Probabilistic/HyperLogLog.cs index aa314e48..16b883b0 100644 --- a/DataStructures/Probabilistic/HyperLogLog.cs +++ b/DataStructures/Probabilistic/HyperLogLog.cs @@ -14,7 +14,7 @@ public HyperLogLog() { var m = 1 << P; registers = new int[m]; - setRegisters = new HashSet(); + setRegisters = []; } /// diff --git a/DataStructures/RedBlackTree/RedBlackTreeNode.cs b/DataStructures/RedBlackTree/RedBlackTreeNode.cs index 91f42df0..e0ad3e7d 100644 --- a/DataStructures/RedBlackTree/RedBlackTreeNode.cs +++ b/DataStructures/RedBlackTree/RedBlackTreeNode.cs @@ -20,12 +20,17 @@ public enum NodeColor : byte /// Generic class to represent nodes in an instance. /// /// The type of key for the node. -public class RedBlackTreeNode +/// +/// Initializes a new instance of the class. +/// +/// Key value for node. +/// Parent of node. +public class RedBlackTreeNode(TKey key, RedBlackTreeNode? parent) { /// /// Gets or sets key value of node. /// - public TKey Key { get; set; } + public TKey Key { get; set; } = key; /// /// Gets or sets the color of the node. @@ -35,7 +40,7 @@ public class RedBlackTreeNode /// /// Gets or sets the parent of the node. /// - public RedBlackTreeNode? Parent { get; set; } + public RedBlackTreeNode? Parent { get; set; } = parent; /// /// Gets or sets left child of the node. @@ -46,15 +51,4 @@ public class RedBlackTreeNode /// Gets or sets the right child of the node. /// public RedBlackTreeNode? Right { get; set; } - - /// - /// Initializes a new instance of the class. - /// - /// Key value for node. - /// Parent of node. - public RedBlackTreeNode(TKey key, RedBlackTreeNode? parent) - { - Key = key; - Parent = parent; - } } diff --git a/DataStructures/ScapegoatTree/Node.cs b/DataStructures/ScapegoatTree/Node.cs index f652a1b7..167e3eb5 100644 --- a/DataStructures/ScapegoatTree/Node.cs +++ b/DataStructures/ScapegoatTree/Node.cs @@ -4,12 +4,12 @@ namespace DataStructures.ScapegoatTree; /// Scapegoat tree node class. /// /// Scapegoat tree node key type. -public class Node where TKey : IComparable +public class Node(TKey key) where TKey : IComparable { private Node? right; private Node? left; - public TKey Key { get; } + public TKey Key { get; } = key; public Node? Right { @@ -39,8 +39,6 @@ public Node? Left } } - public Node(TKey key) => Key = key; - public Node(TKey key, Node? right, Node? left) : this(key) { diff --git a/DataStructures/SegmentTrees/SegmentTreeUpdate.cs b/DataStructures/SegmentTrees/SegmentTreeUpdate.cs index fc3a7734..db7be61c 100644 --- a/DataStructures/SegmentTrees/SegmentTreeUpdate.cs +++ b/DataStructures/SegmentTrees/SegmentTreeUpdate.cs @@ -3,18 +3,13 @@ namespace DataStructures.SegmentTrees; /// /// This is an extension of a segment tree, which allows the update of a single element. /// -public class SegmentTreeUpdate : SegmentTree +/// +/// Initializes a new instance of the class. +/// Runtime complexity: O(n) where n equals the array-length. +/// +/// Array on which the queries should be made. +public class SegmentTreeUpdate(int[] arr) : SegmentTree(arr) { - /// - /// Initializes a new instance of the class. - /// Runtime complexity: O(n) where n equals the array-length. - /// - /// Array on which the queries should be made. - public SegmentTreeUpdate(int[] arr) - : base(arr) - { - } - /// /// Updates a single element of the input array. /// Changes the leaf first and updates its parents afterwards. diff --git a/DataStructures/SortedList.cs b/DataStructures/SortedList.cs index d9250025..fc107882 100644 --- a/DataStructures/SortedList.cs +++ b/DataStructures/SortedList.cs @@ -6,10 +6,14 @@ namespace DataStructures; /// Implementation of SortedList using binary search. /// /// Generic Type. -public class SortedList : IEnumerable +/// +/// Initializes a new instance of the class. +/// +/// Comparer user for binary search. +public class SortedList(IComparer comparer) : IEnumerable { - private readonly IComparer comparer; - private readonly List memory; + private readonly IComparer comparer = comparer; + private readonly List memory = []; /// /// Initializes a new instance of the class. Uses a Comparer.Default for type T. @@ -24,16 +28,6 @@ public SortedList() /// public int Count => memory.Count; - /// - /// Initializes a new instance of the class. - /// - /// Comparer user for binary search. - public SortedList(IComparer comparer) - { - memory = new List(); - this.comparer = comparer; - } - /// /// Adds new item to instance, maintaining the order. /// diff --git a/DataStructures/Timeline.cs b/DataStructures/Timeline.cs index d8628526..a0e110d2 100644 --- a/DataStructures/Timeline.cs +++ b/DataStructures/Timeline.cs @@ -26,7 +26,7 @@ public class Timeline : ICollection<(DateTime Time, TValue Value)>, IEqu /// /// Inner collection storing the timeline events as key-tuples. /// - private readonly List<(DateTime Time, TValue Value)> timeline = new(); + private readonly List<(DateTime Time, TValue Value)> timeline = []; /// /// Initializes a new instance of the class. @@ -41,10 +41,10 @@ public Timeline() /// The time at which the given event occurred. /// The event's content. public Timeline(DateTime time, TValue value) - => timeline = new List<(DateTime, TValue)> - { + => timeline = + [ (time, value), - }; + ]; /// /// Initializes a new instance of the class containing the provided events diff --git a/DataStructures/Tries/TrieNode.cs b/DataStructures/Tries/TrieNode.cs index fbbed64e..de42a8cd 100644 --- a/DataStructures/Tries/TrieNode.cs +++ b/DataStructures/Tries/TrieNode.cs @@ -21,7 +21,7 @@ internal TrieNode(char value) /// The parent or ancestor of the node in the trie structure. internal TrieNode(char value, TrieNode? parent) { - Children = new SortedList(); + Children = []; Parent = parent; Value = value; } diff --git a/DataStructures/UnrolledList/UnrolledLinkedList.cs b/DataStructures/UnrolledList/UnrolledLinkedList.cs index 310697a7..fc1f6417 100644 --- a/DataStructures/UnrolledList/UnrolledLinkedList.cs +++ b/DataStructures/UnrolledList/UnrolledLinkedList.cs @@ -5,23 +5,18 @@ namespace DataStructures.UnrolledList; /// all of the same size where each is so small that the insertion /// or deletion is fast and quick, but large enough to fill the cache line. /// -public class UnrolledLinkedList +/// +/// Initializes a new instance of the class. +/// Create a unrolled list with start chunk size. +/// +/// The size of signe chunk. +public class UnrolledLinkedList(int chunkSize) { - private readonly int sizeNode; + private readonly int sizeNode = chunkSize + 1; private UnrolledLinkedListNode start = null!; private UnrolledLinkedListNode end = null!; - /// - /// Initializes a new instance of the class. - /// Create a unrolled list with start chunk size. - /// - /// The size of signe chunk. - public UnrolledLinkedList(int chunkSize) - { - sizeNode = chunkSize + 1; - } - /// /// Add value to list [O(n)]. /// @@ -66,7 +61,7 @@ public void Insert(int value) public IEnumerable GetRolledItems() { UnrolledLinkedListNode pointer = start; - List result = new(); + List result = []; while (pointer != null) { diff --git a/DataStructures/UnrolledList/UnrolledLinkedListNode.cs b/DataStructures/UnrolledList/UnrolledLinkedListNode.cs index 7c6153c8..c13f9c9a 100644 --- a/DataStructures/UnrolledList/UnrolledLinkedListNode.cs +++ b/DataStructures/UnrolledList/UnrolledLinkedListNode.cs @@ -3,19 +3,11 @@ namespace DataStructures.UnrolledList; /// /// Single node with array buffer for unrolled list. /// -public class UnrolledLinkedListNode +public class UnrolledLinkedListNode(int nodeSize) { - private readonly int[] array; + private readonly int[] array = new int[nodeSize]; - public UnrolledLinkedListNode(int nodeSize) - { - Next = null!; - - Count = 0; - array = new int[nodeSize]; - } - - public UnrolledLinkedListNode Next { get; set; } + public UnrolledLinkedListNode Next { get; set; } = null!; public int Count { get; set; } diff --git a/Utilities.Tests/Extensions/MatrixExtensionsTests.cs b/Utilities.Tests/Extensions/MatrixExtensionsTests.cs index 83793a54..32514e97 100644 --- a/Utilities.Tests/Extensions/MatrixExtensionsTests.cs +++ b/Utilities.Tests/Extensions/MatrixExtensionsTests.cs @@ -3,7 +3,7 @@ namespace Utilities.Tests.Extensions; public class MatrixExtensionsTests { private static readonly object[] MatrixMultiplyTestCases = - { + [ new object[] { new double[,] { { 2, 2, -1 }, { 0, -2, -1 }, { 0, 0, 5 } }, @@ -16,10 +16,10 @@ public class MatrixExtensionsTests new double[,] { { 3, 2, 5 }, { 4, -1, 3 }, { 9, 6, 5 } }, new double[,] { { 11, -22, 29 }, { 9, -27, 32 }, { 13, -17, 26 } }, }, - }; + ]; private static readonly object[] MatrixTransposeTestCases = - { + [ new object[] { new double[,] { { 2, 2, 3 } }, @@ -30,10 +30,10 @@ public class MatrixExtensionsTests new double[,] { { 5, 8 }, { 6, 9 } }, new double[,] { { 5, 6 }, { 8, 9 } }, }, - }; + ]; private static readonly object[] MatrixSubtractTestCases = - { + [ new object[] { new double[,] { { 0, 0 }, { 0, 0 } }, @@ -52,7 +52,7 @@ public class MatrixExtensionsTests new double[,] { { 2, 5, 12 }, { 0, 5, 1 }, { 1, 1, 4 } }, new double[,] { { -3, -7, -12 }, { 2, -8, 1 }, { 2, 3, -3 } }, }, - }; + ]; [Test] public void Multiply_ShouldThrowInvalidOperationException_WhenOperandsAreNotCompatible()