Skip to content

Commit 7c8dfeb

Browse files
fix documentation and test
1 parent 5600368 commit 7c8dfeb

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

GNNLux/src/layers/temporalconv.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ rng = Random.default_rng()
9393
g = rand_graph(rng, 5, 10)
9494
x = rand(rng, Float32, 2, 5)
9595
96-
# create TGCN layer with default activation (sigmoid)
96+
# create TGCN layer
9797
tgcn = TGCN(2 => 6)
9898
9999
# create TGCN layer with custom activation

GNNLux/test/layers/temporalconv.jl

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,16 @@
1010
tx = [x for _ in 1:5]
1111

1212
@testset "TGCN" begin
13-
# Test with default activation (relu)
13+
# Test with default activation (sigmoid)
1414
l = TGCN(3=>3)
1515
ps = LuxCore.initialparameters(rng, l)
1616
st = LuxCore.initialstates(rng, l)
1717
y1, _ = l(g, x, ps, st)
1818
loss = (x, ps) -> sum(first(l(g, x, ps, st)))
1919
test_gradients(loss, x, ps; atol=1.0f-2, rtol=1.0f-2, skip_backends=[AutoReverseDiff(), AutoTracker(), AutoForwardDiff(), AutoEnzyme()])
2020

21-
# Test with custom activation (sigmoid)
22-
l_relu = TGCN(3=>3, act = sigmoid)
21+
# Test with custom activation (relu)
22+
l_relu = TGCN(3=>3, act = relu)
2323
ps_relu = LuxCore.initialparameters(rng, l_relu)
2424
st_relu = LuxCore.initialstates(rng, l_relu)
2525
y2, _ = l_relu(g, x, ps_relu, st_relu)

GraphNeuralNetworks/src/layers/temporalconv.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -869,12 +869,12 @@ See [`GNNRecurrence`](@ref) for more details.
869869
870870
# Additional Parameters
871871
872-
- `gate_activation`: Activation function for the gate mechanisms. Default `sigmoid`.
872+
- `act`: Activation function for the GCNConv layers. Default `relu`.
873873
874874
# Examples
875875
876876
```jldoctest
877-
julia> using Flux # Ensure relu is available
877+
julia> using Flux # Ensure activation functions are available
878878
879879
julia> num_nodes, num_edges = 5, 10;
880880

0 commit comments

Comments
 (0)