Skip to content

Commit bab43ee

Browse files
rename gradtest to testlayer
1 parent 9d48678 commit bab43ee

File tree

2 files changed

+14
-14
lines changed

2 files changed

+14
-14
lines changed

test/layers/conv.jl

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@
2727
@testset "GCNConv" begin
2828
l = GCNConv(in_channel => out_channel)
2929
for g in test_graphs
30-
gradtest(l, g, rtol=1e-5)
30+
test_layer(l, g, rtol=1e-5)
3131
end
3232

3333
l = GCNConv(in_channel => out_channel, tanh, bias=false)
3434
for g in test_graphs
35-
gradtest(l, g, rtol=1e-5)
35+
test_layer(l, g, rtol=1e-5)
3636
end
3737
end
3838

@@ -45,10 +45,10 @@
4545
@test l.k == k
4646
for g in test_graphs
4747
if g === g_single_vertex && GRAPH_T == :dense
48-
@test_broken gradtest(l, g, rtol=1e-5, broken_grad_fields=[:weight], test_gpu=false)
48+
@test_broken test_layer(l, g, rtol=1e-5, broken_grad_fields=[:weight], test_gpu=false)
4949
else
50-
gradtest(l, g, rtol=1e-5, broken_grad_fields=[:weight], test_gpu=false)
51-
@test_broken gradtest(l, g, rtol=1e-5, broken_grad_fields=[:weight], test_gpu=true)
50+
test_layer(l, g, rtol=1e-5, broken_grad_fields=[:weight], test_gpu=false)
51+
@test_broken test_layer(l, g, rtol=1e-5, broken_grad_fields=[:weight], test_gpu=true)
5252
end
5353
end
5454

@@ -61,12 +61,12 @@
6161
@testset "GraphConv" begin
6262
l = GraphConv(in_channel => out_channel)
6363
for g in test_graphs
64-
gradtest(l, g, rtol=1e-5)
64+
test_layer(l, g, rtol=1e-5)
6565
end
6666

6767
l = GraphConv(in_channel => out_channel, relu, bias=false)
6868
for g in test_graphs
69-
gradtest(l, g, rtol=1e-5)
69+
test_layer(l, g, rtol=1e-5)
7070
end
7171

7272
@testset "bias=false" begin
@@ -80,7 +80,7 @@
8080
for heads in (1, 2), concat in (true, false)
8181
l = GATConv(in_channel => out_channel; heads, concat)
8282
for g in test_graphs
83-
gradtest(l, g, rtol=1e-4)
83+
test_layer(l, g, rtol=1e-4)
8484
end
8585
end
8686

@@ -96,14 +96,14 @@
9696
@test size(l.weight) == (out_channel, out_channel, num_layers)
9797

9898
for g in test_graphs
99-
gradtest(l, g, rtol=1e-5)
99+
test_layer(l, g, rtol=1e-5)
100100
end
101101
end
102102

103103
@testset "EdgeConv" begin
104104
l = EdgeConv(Dense(2*in_channel, out_channel), aggr=+)
105105
for g in test_graphs
106-
gradtest(l, g, rtol=1e-5)
106+
test_layer(l, g, rtol=1e-5)
107107
end
108108
end
109109

@@ -112,7 +112,7 @@
112112
eps = 0.001f0
113113
l = GINConv(nn, eps=eps)
114114
for g in test_graphs
115-
gradtest(l, g, rtol=1e-5, exclude_grad_fields=[:eps])
115+
test_layer(l, g, rtol=1e-5, exclude_grad_fields=[:eps])
116116
end
117117

118118
@test !in(:eps, Flux.trainable(l))
@@ -125,13 +125,13 @@
125125
l = NNConv(in_channel => out_channel, nn)
126126
for g in test_graphs
127127
g = GNNGraph(g, edata=rand(T, edim, g.num_edges))
128-
gradtest(l, g, rtol=1e-5)
128+
test_layer(l, g, rtol=1e-5)
129129
end
130130

131131
l = NNConv(in_channel => out_channel, nn, tanh, bias=false, aggr=mean)
132132
for g in test_graphs
133133
g = GNNGraph(g, edata=rand(T, edim, g.num_edges))
134-
gradtest(l, g, rtol=1e-5)
134+
test_layer(l, g, rtol=1e-5)
135135
end
136136
end
137137
end

test/test_utils.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ function FiniteDifferences.to_vec(x::Integer)
1212
return Int[x], Integer_from_vec
1313
end
1414

15-
function gradtest(l, g::GNNGraph; atol=1e-7, rtol=1e-5,
15+
function test_layer(l, g::GNNGraph; atol=1e-7, rtol=1e-5,
1616
exclude_grad_fields=[],
1717
broken_grad_fields=[],
1818
verbose = false,

0 commit comments

Comments
 (0)