Skip to content

Commit 2585cb6

Browse files
committed
Merge branch 'nn-lux' of https://github.com/rbSparky/GraphNeuralNetworks.jl into nn-lux
2 parents 32ee61d + fb0bb1d commit 2585cb6

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

GNNLux/src/layers/conv.jl

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -656,7 +656,7 @@ function NNConv(ch::Pair{Int, Int}, nn, σ = identity;
656656
return NNConv(nn, aggr, in_dims, out_dims, use_bias, add_self_loops, use_edge_weight, init_weight, init_bias, σ)
657657
end
658658

659-
function (l::GCNConv)(g, x, edge_weight, ps, st)
659+
function (l::NNConv)(g, x, edge_weight, ps, st)
660660
nn = StatefulLuxLayer{true}(l.nn, ps, st)
661661

662662
# what would be the order of args here?
@@ -680,8 +680,7 @@ end
680680
LuxCore.parameterlength(l::NNConv) = l.use_bias ? l.in_dims * l.out_dims + l.out_dims : l.in_dims * l.out_dims # nn wont affect this right?
681681
LuxCore.outputsize(d::NNConv) = (d.out_dims,)
682682

683-
684-
function Base.show(io::IO, l::NNConv)
683+
function Base.show(io::IO, l::GINConv)
685684
print(io, "NNConv($(l.nn)")
686685
print(io, ", $(l.ϵ)")
687686
l.σ == identity || print(io, ", ", l.σ)

0 commit comments

Comments
 (0)