This repository was archived by the owner on Sep 28, 2024. It is now read-only.
File tree Expand file tree Collapse file tree 5 files changed +27
-7
lines changed
example/FlowOverCircle/src Expand file tree Collapse file tree 5 files changed +27
-7
lines changed Original file line number Diff line number Diff line change @@ -31,7 +31,7 @@ function train()
31
31
Dense (64 , 1 ),
32
32
) |> device
33
33
34
- loss (𝐱, 𝐲) = sum (abs2, 𝐲 .- m (𝐱)) / size (𝐱)[ end ]
34
+ loss (𝐱, 𝐲) = l₂loss ( m (𝐱), 𝐲)
35
35
36
36
opt = Flux. Optimiser (WeightDecay (1f-4 ), Flux. ADAM (1f-3 ))
37
37
Original file line number Diff line number Diff line change @@ -14,6 +14,7 @@ module NeuralOperators
14
14
15
15
include (" Transform/Transform.jl" )
16
16
include (" operator_kernel.jl" )
17
+ include (" loss.jl" )
17
18
include (" model.jl" )
18
19
include (" DeepONet.jl" )
19
20
include (" subnets.jl" )
Original file line number Diff line number Diff line change
1
+ export l₂loss
2
+
3
+ function l₂loss (𝐲̂, 𝐲; agg= mean, grid_normalize= true )
4
+ feature_dims = 2 : (ndims (𝐲)- 1 )
5
+ loss = agg (.√ (sum (abs2, 𝐲̂- 𝐲, dims= feature_dims)))
6
+
7
+ return grid_normalize ? loss/ prod (feature_dims) : loss
8
+ end
Original file line number Diff line number Diff line change
1
+ @testset " loss" begin
2
+ 𝐲 = rand (1 , 3 , 3 , 5 )
3
+ 𝐲̂ = rand (1 , 3 , 3 , 5 )
4
+
5
+ feature_dims = 2 : 3
6
+ loss = mean (.√ (sum (abs2, 𝐲̂- 𝐲, dims= feature_dims)))
7
+
8
+ @test l₂loss (𝐲̂, 𝐲) ≈ loss/ prod (feature_dims)
9
+ end
Original file line number Diff line number Diff line change @@ -4,19 +4,21 @@ using Flux
4
4
using GeometricFlux
5
5
using Graphs
6
6
using Zygote
7
+ using Statistics
7
8
using Test
8
9
9
10
CUDA. allowscalar (false )
10
11
11
12
cuda_tests = [
12
- " cuda" ,
13
+ " cuda.jl " ,
13
14
]
14
15
15
16
tests = [
16
- " Transform/Transform" ,
17
- " operator_kernel" ,
18
- " model" ,
19
- " deeponet" ,
17
+ " Transform/Transform.jl" ,
18
+ " operator_kernel.jl" ,
19
+ " loss.jl" ,
20
+ " model.jl" ,
21
+ " deeponet.jl" ,
20
22
]
21
23
22
24
if CUDA. functional ()
27
29
28
30
@testset " NeuralOperators.jl" begin
29
31
for t in tests
30
- include (" $(t) .jl " )
32
+ include (t )
31
33
end
32
34
end
33
35
You can’t perform that action at this time.
0 commit comments