@@ -56,8 +56,96 @@ function Base.show(io::IO, tgcn::TGCNCell)
5656 print (io, " TGCNCell($(tgcn. in_dims) => $(tgcn. out_dims) )" )
5757end
5858
59+ """
60+ TGCN(in => out; use_bias = true, init_weight = glorot_uniform, init_state = zeros32, init_bias = zeros32, add_self_loops = false, use_edge_weight = true)
61+
62+ Temporal Graph Convolutional Network (T-GCN) recurrent layer from the paper [T-GCN: A Temporal Graph Convolutional Network for Traffic Prediction](https://arxiv.org/pdf/1811.05320.pdf).
63+
64+ Performs a layer of GCNConv to model spatial dependencies, followed by a Gated Recurrent Unit (GRU) cell to model temporal dependencies.
65+
66+ # Arguments
67+
68+ - `in`: Number of input features.
69+ - `out`: Number of output features.
70+ - `use_bias`: Add learnable bias. Default `true`.
71+ - `init_weight`: Weights' initializer. Default `glorot_uniform`.
72+ - `init_state`: Initial state of the hidden stat of the GRU layer. Default `zeros32`.
73+ - `init_bias`: Bias initializer. Default `zeros32`.
74+ - `add_self_loops`: Add self loops to the graph before performing the convolution. Default `false`.
75+ - `use_edge_weight`: If `true`, consider the edge weights in the input graph (if available).
76+ If `add_self_loops=true` the new weights will be set to 1.
77+ This option is ignored if the `edge_weight` is explicitly provided in the forward pass.
78+ Default `false`.
79+
80+
81+
82+ # Examples
83+
84+ ```julia
85+ using GNNLux, Lux, Random
86+
87+ # initialize random number generator
88+ rng = Random.default_rng()
89+
90+ # create data
91+ g = rand_graph(rng, 5, 10)
92+ x = rand(rng, Float32, 2, 5)
93+
94+ # create TGCN layer
95+ tgcn = TGCN(2 => 6)
96+
97+ # setup layer
98+ ps, st = LuxCore.setup(rng, tgcn)
99+
100+ # forward pass
101+ y, st = tgcn(g, x, ps, st) # result size (6, 5)
102+ ```
103+ """
59104TGCN (ch:: Pair{Int, Int} ; kwargs... ) = GNNLux. StatefulRecurrentCell (TGCNCell (ch; kwargs... ))
60105
106+ """
107+ A3TGCN(in => out; use_bias = true, init_weight = glorot_uniform, init_state = zeros32, init_bias = zeros32, add_self_loops = false, use_edge_weight = true)
108+
109+ Attention Temporal Graph Convolutional Network (A3T-GCN) model from the paper [A3T-GCN: Attention Temporal Graph
110+ Convolutional Network for Traffic Forecasting](https://arxiv.org/pdf/2006.11583.pdf).
111+
112+ Performs a TGCN layer, followed by a soft attention layer.
113+
114+ # Arguments
115+
116+ - `in`: Number of input features.
117+ - `out`: Number of output features.
118+ - `use_bias`: Add learnable bias. Default `true`.
119+ - `init_weight`: Weights' initializer. Default `glorot_uniform`.
120+ - `init_state`: Initial state of the hidden stat of the GRU layer. Default `zeros32`.
121+ - `init_bias`: Bias initializer. Default `zeros32`.
122+ - `add_self_loops`: Add self loops to the graph before performing the convolution. Default `false`.
123+ - `use_edge_weight`: If `true`, consider the edge weights in the input graph (if available).
124+ If `add_self_loops=true` the new weights will be set to 1.
125+ This option is ignored if the `edge_weight` is explicitly provided in the forward pass.
126+ Default `false`.
127+ # Examples
128+
129+ ```julia
130+ using GNNLux, Lux, Random
131+
132+ # initialize random number generator
133+ rng = Random.default_rng()
134+
135+ # create data
136+ g = rand_graph(rng, 5, 10)
137+ x = rand(rng, Float32, 2, 5)
138+
139+ # create A3TGCN layer
140+ l = A3TGCN(2 => 6)
141+
142+ # setup layer
143+ ps, st = LuxCore.setup(rng, l)
144+
145+ # forward pass
146+ y, st = l(g, x, ps, st) # result size (6, 5)
147+ ```
148+ """
61149@concrete struct A3TGCN <: GNNContainerLayer{(:tgcn, :dense1, :dense2)}
62150 in_dims:: Int
63151 out_dims:: Int
@@ -94,7 +182,7 @@ function Base.show(io::IO, l::A3TGCN)
94182 print (io, " A3TGCN($(l. in_dims) => $(l. out_dims) )" )
95183end
96184
97- @concrete struct GConvGRUCell <: GNNContainerLayer{(:conv_x_r, :conv_h_r, :conv_x_z, :conv_h_z, :conv_x_h, :conv_h_h)}
185+ @concrete struct GConvGRUCell <: GNNContainerLayer{(:conv_x_r, :conv_h_r, :conv_x_z, :conv_h_z, :conv_x_h, :conv_h_h)}
98186 in_dims:: Int
99187 out_dims:: Int
100188 k:: Int
147235
148236LuxCore. outputsize (l:: GConvGRUCell ) = (l. out_dims,)
149237
238+ """
239+ GConvGRU(in => out, k; use_bias = true, init_weight = glorot_uniform, init_state = zeros32, init_bias = zeros32)
240+
241+ Graph Convolutional Gated Recurrent Unit (GConvGRU) recurrent layer from the paper [Structured Sequence Modeling with Graph Convolutional Recurrent Networks](https://arxiv.org/pdf/1612.07659).
242+
243+ Performs a layer of ChebConv to model spatial dependencies, followed by a Gated Recurrent Unit (GRU) cell to model temporal dependencies.
244+
245+ # Arguments
246+
247+ - `in`: Number of input features.
248+ - `out`: Number of output features.
249+ - `k`: Chebyshev polynomial order.
250+ - `use_bias`: Add learnable bias. Default `true`.
251+ - `init_weight`: Weights' initializer. Default `glorot_uniform`.
252+ - `init_state`: Initial state of the hidden stat of the GRU layer. Default `zeros32`.
253+ - `init_bias`: Bias initializer. Default `zeros32`.
254+
255+ # Examples
256+
257+ ```julia
258+ using GNNLux, Lux, Random
259+
260+ # initialize random number generator
261+ rng = Random.default_rng()
262+
263+ # create data
264+ g = rand_graph(rng, 5, 10)
265+ x = rand(rng, Float32, 2, 5)
266+
267+ # create layer
268+ l = GConvGRU(2 => 5, 2)
269+
270+ # setup layer
271+ ps, st = LuxCore.setup(rng, l)
272+
273+ # forward pass
274+ y, st = l(g, x, ps, st) # result size (5, 5)
275+ ```
276+ """
150277GConvGRU (ch:: Pair{Int, Int} , k:: Int ; kwargs... ) = GNNLux. StatefulRecurrentCell (GConvGRUCell (ch, k; kwargs... ))
151278
152279@concrete struct GConvLSTMCell <: GNNContainerLayer{(:conv_x_i, :conv_h_i, :dense_i, :conv_x_f, :conv_h_f, :dense_f, :conv_x_c, :conv_h_c, :dense_c, :conv_x_o, :conv_h_o, :dense_o)}
230357
231358LuxCore. outputsize (l:: GConvLSTMCell ) = (l. out_dims,)
232359
360+ """
361+ GConvLSTM(in => out, k; use_bias = true, init_weight = glorot_uniform, init_state = zeros32, init_bias = zeros32)
362+
363+ Graph Convolutional Long Short-Term Memory (GConvLSTM) recurrent layer from the paper [Structured Sequence Modeling with Graph Convolutional Recurrent Networks](https://arxiv.org/pdf/1612.07659).
364+
365+ Performs a layer of ChebConv to model spatial dependencies, followed by a Long Short-Term Memory (LSTM) cell to model temporal dependencies.
366+
367+ # Arguments
368+
369+ - `in`: Number of input features.
370+ - `out`: Number of output features.
371+ - `k`: Chebyshev polynomial order.
372+ - `use_bias`: Add learnable bias. Default `true`.
373+ - `init_weight`: Weights' initializer. Default `glorot_uniform`.
374+ - `init_state`: Initial state of the hidden stat of the GRU layer. Default `zeros32`.
375+ - `init_bias`: Bias initializer. Default `zeros32`.
376+
377+ # Examples
378+
379+ ```julia
380+ using GNNLux, Lux, Random
381+
382+ # initialize random number generator
383+ rng = Random.default_rng()
384+
385+ # create data
386+ g = rand_graph(rng, 5, 10)
387+ x = rand(rng, Float32, 2, 5)
388+
389+ # create GConvLSTM layer
390+ l = GConvLSTM(2 => 5, 2)
391+
392+ # setup layer
393+ ps, st = LuxCore.setup(rng, l)
394+
395+ # forward pass
396+ y, st = l(g, x, ps, st) # result size (5, 5)
397+ ```
398+ """
233399GConvLSTM (ch:: Pair{Int, Int} , k:: Int ; kwargs... ) = GNNLux. StatefulRecurrentCell (GConvLSTMCell (ch, k; kwargs... ))
234400
235401@concrete struct DCGRUCell <: GNNContainerLayer{(:dconv_u, :dconv_r, :dconv_c)}
272438
273439LuxCore. outputsize (l:: DCGRUCell ) = (l. out_dims,)
274440
441+ """
442+ DCGRU(in => out, k; use_bias = true, init_weight = glorot_uniform, init_state = zeros32, init_bias = zeros32)
443+
444+ Diffusion Convolutional Recurrent Neural Network (DCGRU) layer from the paper [Diffusion Convolutional Recurrent Neural
445+ Network: Data-driven Traffic Forecasting](https://arxiv.org/pdf/1707.01926).
446+
447+ Performs a Diffusion Convolutional layer to model spatial dependencies, followed by a Gated Recurrent Unit (GRU) cell to model temporal dependencies.
448+
449+ # Arguments
450+
451+ - `in`: Number of input features.
452+ - `out`: Number of output features.
453+ - `k`: Diffusion step.
454+ - `use_bias`: Add learnable bias. Default `true`.
455+ - `init_weight`: Weights' initializer. Default `glorot_uniform`.
456+ - `init_state`: Initial state of the hidden stat of the GRU layer. Default `zeros32`.
457+ - `init_bias`: Bias initializer. Default `zeros32`.
458+
459+ # Examples
460+
461+ ```julia
462+ using GNNLux, Lux, Random
463+
464+ # initialize random number generator
465+ rng = Random.default_rng()
466+
467+ # create data
468+ g = rand_graph(rng, 5, 10)
469+ x = rand(rng, Float32, 2, 5)
470+
471+ # create layer
472+ l = DCGRU(2 => 5, 2)
473+
474+ # setup layer
475+ ps, st = LuxCore.setup(rng, l)
476+
477+ # forward pass
478+ y, st = l(g, x, ps, st) # result size (5, 5)
479+ ```
480+ """
275481DCGRU (ch:: Pair{Int, Int} , k:: Int ; kwargs... ) = GNNLux. StatefulRecurrentCell (DCGRUCell (ch, k; kwargs... ))
276482
483+ """
484+ EvolveGCNO(ch; use_bias = true, init_weight = glorot_uniform, init_state = zeros32, init_bias = zeros32)
485+
486+ Evolving Graph Convolutional Network (EvolveGCNO) layer from the paper [EvolveGCN: Evolving Graph Convolutional Networks for Dynamic Graphs](https://arxiv.org/pdf/1902.10191).
487+
488+ Perfoms a Graph Convolutional layer with parameters derived from a Long Short-Term Memory (LSTM) layer across the snapshots of the temporal graph.
489+
490+
491+ # Arguments
492+
493+ - `in`: Number of input features.
494+ - `out`: Number of output features.
495+ - `use_bias`: Add learnable bias. Default `true`.
496+ - `init_weight`: Weights' initializer. Default `glorot_uniform`.
497+ - `init_state`: Initial state of the hidden stat of the GRU layer. Default `zeros32`.
498+ - `init_bias`: Bias initializer. Default `zeros32`.
499+
500+ # Examples
501+
502+ ```julia
503+ using GNNLux, Lux, Random
504+
505+ # initialize random number generator
506+ rng = Random.default_rng()
507+
508+ # create data
509+ tg = TemporalSnapshotsGNNGraph([rand_graph(rng, 10, 20; ndata = rand(rng, 4, 10)), rand_graph(rng, 10, 14; ndata = rand(rng, 4, 10)), rand_graph(rng, 10, 22; ndata = rand(rng, 4, 10))])
510+
511+ # create layer
512+ l = EvolveGCNO(4 => 5)
513+
514+ # setup layer
515+ ps, st = LuxCore.setup(rng, l)
516+
517+ # forward pass
518+ y, st = l(tg, tg.ndata.x , ps, st) # result size 3, size y[1] (5, 10)
519+ ```
520+ """
277521@concrete struct EvolveGCNO <: GNNLayer
278522 in_dims:: Int
279523 out_dims:: Int
0 commit comments