@@ -48,7 +48,7 @@ By default, it computes ``\frac{1}{\sqrt{d}}`` i.e the inverse square root of th
48
48
s = [1,1,2,3]
49
49
t = [2,3,1,1]
50
50
g = GNNGraph(s, t)
51
- x = randn(3, g.num_nodes)
51
+ x = randn(Float32, 3, g.num_nodes)
52
52
53
53
# create layer
54
54
l = GCNConv(3 => 5)
@@ -187,6 +187,22 @@ with ``\hat{L}`` the [`scaled_laplacian`](@ref).
187
187
- `k`: The order of Chebyshev polynomial.
188
188
- `bias`: Add learnable bias.
189
189
- `init`: Weights' initializer.
190
+
191
+ # Examples
192
+
193
+ ```julia
194
+ # create data
195
+ s = [1,1,2,3]
196
+ t = [2,3,1,1]
197
+ g = GNNGraph(s, t)
198
+ x = randn(Float32, 3, g.num_nodes)
199
+
200
+ # create layer
201
+ l = ChebConv(3 => 5, 5)
202
+
203
+ # forward pass
204
+ y = l(g, x) # size: 5 × num_nodes
205
+ ```
190
206
"""
191
207
struct ChebConv{W <: AbstractArray{<:Number, 3} , B} <: GNNLayer
192
208
weight:: W
@@ -248,6 +264,24 @@ where the aggregation type is selected by `aggr`.
248
264
- `aggr`: Aggregation operator for the incoming messages (e.g. `+`, `*`, `max`, `min`, and `mean`).
249
265
- `bias`: Add learnable bias.
250
266
- `init`: Weights' initializer.
267
+
268
+ # Examples
269
+
270
+ ```julia
271
+ # create data
272
+ s = [1,1,2,3]
273
+ t = [2,3,1,1]
274
+ in_channel = 3
275
+ out_channel = 5
276
+ g = GNNGraph(s, t)
277
+ x = randn(Float32, 3, g.num_nodes)
278
+
279
+ # create layer
280
+ l = GraphConv(in_channel => out_channel, relu, bias = false, aggr = mean)
281
+
282
+ # forward pass
283
+ y = l(g, x)
284
+ ```
251
285
"""
252
286
struct GraphConv{W <: AbstractMatrix , B, F, A} <: GNNLayer
253
287
weight1:: W
@@ -318,6 +352,25 @@ and the attention coefficients will be calculated as
318
352
- `concat`: Concatenate layer output or not. If not, layer output is averaged over the heads. Default `true`.
319
353
- `negative_slope`: The parameter of LeakyReLU.Default `0.2`.
320
354
- `add_self_loops`: Add self loops to the graph before performing the convolution. Default `true`.
355
+
356
+
357
+ # Examples
358
+
359
+ ```julia
360
+ # create data
361
+ s = [1,1,2,3]
362
+ t = [2,3,1,1]
363
+ in_channel = 3
364
+ out_channel = 5
365
+ g = GNNGraph(s, t)
366
+ x = randn(Float32, 3, g.num_nodes)
367
+
368
+ # create layer
369
+ l = GATConv(in_channel => out_channel, add_self_loops = false, bias = false; heads=2, concat=true)
370
+
371
+ # forward pass
372
+ y = l(g, x)
373
+ ```
321
374
"""
322
375
struct GATConv{DX <: Dense , DE <: Union{Dense, Nothing} , T, A <: AbstractMatrix , F, B} < :
323
376
GNNLayer
@@ -446,6 +499,27 @@ and the attention coefficients will be calculated as
446
499
- `concat`: Concatenate layer output or not. If not, layer output is averaged over the heads. Default `true`.
447
500
- `negative_slope`: The parameter of LeakyReLU.Default `0.2`.
448
501
- `add_self_loops`: Add self loops to the graph before performing the convolution. Default `true`.
502
+
503
+ # Examples
504
+ ```julia
505
+ # create data
506
+ s = [1,1,2,3]
507
+ t = [2,3,1,1]
508
+ in_channel = 3
509
+ out_channel = 5
510
+ ein = 3
511
+ g = GNNGraph(s, t)
512
+ x = randn(Float32, 3, g.num_nodes)
513
+
514
+ # create layer
515
+ l = GATv2Conv((in_channel, ein) => out_channel, add_self_loops = false)
516
+
517
+ # edge features
518
+ e = randn(Float32, ein, length(s))
519
+
520
+ # forward pass
521
+ y = l(g, x, e)
522
+ ```
449
523
"""
450
524
struct GATv2Conv{T, A1, A2, A3, B, C <: AbstractMatrix , F} <: GNNLayer
451
525
dense_i:: A1
@@ -568,6 +642,23 @@ where ``\mathbf{h}^{(l)}_i`` denotes the ``l``-th hidden variables passing throu
568
642
- `num_layers`: The number of gated recurrent unit.
569
643
- `aggr`: Aggregation operator for the incoming messages (e.g. `+`, `*`, `max`, `min`, and `mean`).
570
644
- `init`: Weight initialization function.
645
+
646
+ # Examples:
647
+
648
+ ```julia
649
+ # create data
650
+ s = [1,1,2,3]
651
+ t = [2,3,1,1]
652
+ out_channel = 5
653
+ num_layers = 3
654
+ g = GNNGraph(s, t)
655
+
656
+ # create layer
657
+ l = GatedGraphConv(out_channel, num_layers)
658
+
659
+ # forward pass
660
+ y = l(g, x)
661
+ ```
571
662
"""
572
663
struct GatedGraphConv{W <: AbstractArray{<:Number, 3} , R, A} <: GNNLayer
573
664
weight:: W
@@ -627,6 +718,23 @@ where `nn` generally denotes a learnable function, e.g. a linear layer or a mult
627
718
628
719
- `nn`: A (possibly learnable) function.
629
720
- `aggr`: Aggregation operator for the incoming messages (e.g. `+`, `*`, `max`, `min`, and `mean`).
721
+
722
+ # Examples:
723
+
724
+ ```julia
725
+ # create data
726
+ s = [1,1,2,3]
727
+ t = [2,3,1,1]
728
+ in_channel = 3
729
+ out_channel = 5
730
+ g = GNNGraph(s, t)
731
+
732
+ # create layer
733
+ l = EdgeConv(Dense(2 * in_channel, out_channel), aggr = +)
734
+
735
+ # forward pass
736
+ y = l(g, x)
737
+ ```
630
738
"""
631
739
struct EdgeConv{NN, A} <: GNNLayer
632
740
nn:: NN
@@ -668,6 +776,26 @@ where ``f_\Theta`` typically denotes a learnable function, e.g. a linear layer o
668
776
669
777
- `f`: A (possibly learnable) function acting on node features.
670
778
- `ϵ`: Weighting factor.
779
+
780
+ # Examples:
781
+
782
+ ```julia
783
+ # create data
784
+ s = [1,1,2,3]
785
+ t = [2,3,1,1]
786
+ in_channel = 3
787
+ out_channel = 5
788
+ g = GNNGraph(s, t)
789
+
790
+ # create dense layer
791
+ nn = Dense(in_channel, out_channel)
792
+
793
+ # create layer
794
+ l = GINConv(nn, 0.01f0, aggr = mean)
795
+
796
+ # forward pass
797
+ y = l(g, x)
798
+ ```
671
799
"""
672
800
struct GINConv{R <: Real , NN, A} <: GNNLayer
673
801
nn:: NN
@@ -720,6 +848,27 @@ For convenience, also functions returning a single `(out*in, num_edges)` matrix
720
848
- `σ`: Activation function.
721
849
- `bias`: Add learnable bias.
722
850
- `init`: Weights' initializer.
851
+
852
+ # Examples:
853
+
854
+ ```julia
855
+ # create data
856
+ s = [1,1,2,3]
857
+ t = [2,3,1,1]
858
+ in_channel = 3
859
+ out_channel = 5
860
+ edim = 10
861
+ g = GNNGraph(s, t)
862
+
863
+ # create dense layer
864
+ nn = Dense(edim, out_channel * in_channel)
865
+
866
+ # create layer
867
+ l = NNConv(in_channel => out_channel, nn, tanh, bias = true, aggr = +)
868
+
869
+ # forward pass
870
+ y = l(g, x)
871
+ ```
723
872
"""
724
873
struct NNConv{W, B, NN, F, A} <: GNNLayer
725
874
weight:: W
@@ -783,6 +932,23 @@ where the aggregation type is selected by `aggr`.
783
932
- `aggr`: Aggregation operator for the incoming messages (e.g. `+`, `*`, `max`, `min`, and `mean`).
784
933
- `bias`: Add learnable bias.
785
934
- `init`: Weights' initializer.
935
+
936
+ # Examples:
937
+
938
+ ```julia
939
+ # create data
940
+ s = [1,1,2,3]
941
+ t = [2,3,1,1]
942
+ in_channel = 3
943
+ out_channel = 5
944
+ g = GNNGraph(s, t)
945
+
946
+ # create layer
947
+ l = SAGEConv(in_channel => out_channel, tanh, bias = false, aggr = +)
948
+
949
+ # forward pass
950
+ y = l(g, x)
951
+ ```
786
952
"""
787
953
struct SAGEConv{W <: AbstractMatrix , B, F, A} <: GNNLayer
788
954
weight:: W
@@ -840,6 +1006,23 @@ where the edge gates ``\eta_{ij}`` are given by
840
1006
- `act`: Activation function.
841
1007
- `init`: Weight matrices' initializing function.
842
1008
- `bias`: Learn an additive bias if true.
1009
+
1010
+ # Examples:
1011
+
1012
+ ```julia
1013
+ # create data
1014
+ s = [1,1,2,3]
1015
+ t = [2,3,1,1]
1016
+ in_channel = 3
1017
+ out_channel = 5
1018
+ g = GNNGraph(s, t)
1019
+
1020
+ # create layer
1021
+ l = ResGatedGraphConv(in_channel => out_channel, tanh, bias = true)
1022
+
1023
+ # forward pass
1024
+ y = l(g, x)
1025
+ ```
843
1026
"""
844
1027
struct ResGatedGraphConv{W, B, F} <: GNNLayer
845
1028
A:: W
@@ -1017,6 +1200,21 @@ and ``\beta`` a trainable parameter if `trainable=true`.
1017
1200
- `init_beta`: The initial value of ``\b eta``. Default 1.0f0.
1018
1201
- `trainable`: If true, ``\b eta`` is trainable. Default `true`.
1019
1202
- `add_self_loops`: Add self loops to the graph before performing the convolution. Default `true`.
1203
+
1204
+ # Examples:
1205
+
1206
+ ```julia
1207
+ # create data
1208
+ s = [1,1,2,3]
1209
+ t = [2,3,1,1]
1210
+ g = GNNGraph(s, t)
1211
+
1212
+ # create layer
1213
+ l = AGNNConv(init_beta=2.0f0)
1214
+
1215
+ # forward pass
1216
+ y = l(g, x)
1217
+ ```
1020
1218
"""
1021
1219
struct AGNNConv{A <: AbstractVector } <: GNNLayer
1022
1220
β:: A
@@ -1074,8 +1272,8 @@ activations.
1074
1272
1075
1273
```julia
1076
1274
g = rand_graph(10, 30)
1077
- x = randn(3, 10)
1078
- e = randn(3, 30)
1275
+ x = randn(Float32, 3, 10)
1276
+ e = randn(Float32, 3, 30)
1079
1277
m = MEGNetConv(3 => 3)
1080
1278
x′, e′ = m(g, x, e)
1081
1279
```
@@ -1265,7 +1463,7 @@ where ``\tilde{A}`` is ``A + I``.
1265
1463
s = [1,1,2,3]
1266
1464
t = [2,3,1,1]
1267
1465
g = GNNGraph(s, t)
1268
- x = randn(3, g.num_nodes)
1466
+ x = randn(Float32, 3, g.num_nodes)
1269
1467
1270
1468
# create layer
1271
1469
l = SGConv(3 => 5; add_self_loops = true)
0 commit comments