|
| 1 | +# Test Enzyme integration with SciMLOperators |
| 2 | +# Verifies that gradients can be computed through operators with parameter-dependent coefficients |
| 3 | +# Related to issue #319 |
| 4 | + |
| 5 | +using SciMLOperators, Enzyme, LinearAlgebra, SparseArrays, Test |
| 6 | + |
| 7 | +const T = Float64 |
| 8 | + |
| 9 | +# Test basic operator autodiff with Enzyme |
| 10 | +@testset "Enzyme autodiff with ScalarOperator" begin |
| 11 | + # Create operators with parameter-dependent coefficients |
| 12 | + coef1(a, u, p, t) = -p[1] |
| 13 | + coef2(a, u, p, t) = p[2] |
| 14 | + |
| 15 | + A1_data = sparse(T[0.0 1.0; 0.0 0.0]) |
| 16 | + A2_data = sparse(T[0.0 0.0; 1.0 0.0]) |
| 17 | + |
| 18 | + c1 = ScalarOperator(one(T), coef1) |
| 19 | + c2 = ScalarOperator(one(T), coef2) |
| 20 | + |
| 21 | + U = c1 * MatrixOperator(A1_data) + c2 * MatrixOperator(A2_data) |
| 22 | + |
| 23 | + # Simple loss function that uses the operator |
| 24 | + function loss(p) |
| 25 | + x = T[3.0, 4.0] |
| 26 | + t = 0.0 |
| 27 | + |
| 28 | + # Update coefficients and apply operator |
| 29 | + U_updated = update_coefficients(U, x, p, t) |
| 30 | + y = U_updated * x |
| 31 | + |
| 32 | + return sum(abs2, y) |
| 33 | + end |
| 34 | + |
| 35 | + # Test that Enzyme can compute gradients |
| 36 | + p = T[1.0, 2.0] |
| 37 | + dp = Enzyme.make_zero(p) |
| 38 | + |
| 39 | + result = Enzyme.autodiff(Enzyme.Reverse, loss, Active, Duplicated(p, dp)) |
| 40 | + |
| 41 | + # Gradient should not be NaN (the original bug) |
| 42 | + @test !any(isnan, dp) |
| 43 | + @test !any(isinf, dp) |
| 44 | + |
| 45 | + # Gradient should be non-zero (operators depend on parameters) |
| 46 | + @test any(!iszero, dp) |
| 47 | +end |
| 48 | + |
| 49 | +@testset "Enzyme autodiff with MatrixOperator" begin |
| 50 | + # Test with matrix operator that has update function |
| 51 | + update_func(A, u, p, t) = p[1] * A |
| 52 | + |
| 53 | + A_data = T[1.0 2.0; 3.0 4.0] |
| 54 | + L = MatrixOperator(A_data; update_func = update_func) |
| 55 | + |
| 56 | + function loss2(p) |
| 57 | + x = T[1.0, 1.0] |
| 58 | + t = 0.0 |
| 59 | + |
| 60 | + L_updated = update_coefficients(L, x, p, t) |
| 61 | + y = L_updated * x |
| 62 | + |
| 63 | + return sum(abs2, y) |
| 64 | + end |
| 65 | + |
| 66 | + p = T[2.0] |
| 67 | + dp = Enzyme.make_zero(p) |
| 68 | + |
| 69 | + result = Enzyme.autodiff(Enzyme.Reverse, loss2, Active, Duplicated(p, dp)) |
| 70 | + |
| 71 | + # Gradient should be valid |
| 72 | + @test !any(isnan, dp) |
| 73 | + @test !any(isinf, dp) |
| 74 | + @test any(!iszero, dp) |
| 75 | +end |
| 76 | + |
| 77 | +@testset "Enzyme autodiff with composed operators" begin |
| 78 | + # Test more complex operator composition |
| 79 | + coef(a, u, p, t) = p[1] |
| 80 | + |
| 81 | + A = MatrixOperator(T[1.0 0.0; 0.0 1.0]) |
| 82 | + B = MatrixOperator(T[2.0 1.0; 1.0 2.0]) |
| 83 | + α = ScalarOperator(one(T), coef) |
| 84 | + |
| 85 | + # Composed operator: α * A + B |
| 86 | + C = α * A + B |
| 87 | + |
| 88 | + function loss3(p) |
| 89 | + x = T[1.0, 2.0] |
| 90 | + t = 0.0 |
| 91 | + |
| 92 | + C_updated = update_coefficients(C, x, p, t) |
| 93 | + y = C_updated * x |
| 94 | + |
| 95 | + return sum(y) |
| 96 | + end |
| 97 | + |
| 98 | + p = T[3.0] |
| 99 | + dp = Enzyme.make_zero(p) |
| 100 | + |
| 101 | + result = Enzyme.autodiff(Enzyme.Reverse, loss3, Active, Duplicated(p, dp)) |
| 102 | + |
| 103 | + # Gradient should be valid |
| 104 | + @test !any(isnan, dp) |
| 105 | + @test !any(isinf, dp) |
| 106 | +end |
0 commit comments