|
65 | 65 | end |
66 | 66 | end |
67 | 67 |
|
| 68 | + |
| 69 | +@testitem "GCNConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 70 | + using .TestModule |
| 71 | + l = GCNConv(D_IN => D_OUT) |
| 72 | + for g in TEST_GRAPHS |
| 73 | + g.graph isa AbstractSparseMatrix && continue |
| 74 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 75 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 76 | + end |
| 77 | +end |
| 78 | + |
68 | 79 | @testitem "ChebConv" setup=[TolSnippet, TestModule] begin |
69 | 80 | using .TestModule |
70 | 81 | k = 2 |
|
84 | 95 | end |
85 | 96 | end |
86 | 97 |
|
| 98 | + |
| 99 | +@testitem "ChebConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 100 | + using .TestModule |
| 101 | + k = 2 |
| 102 | + l = ChebConv(D_IN => D_OUT, k) |
| 103 | + for g in TEST_GRAPHS |
| 104 | + g.graph isa AbstractSparseMatrix && continue |
| 105 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 106 | + test_gradients(l, g, g.x, rtol = RTOL_LOW, test_gpu = true, compare_finite_diff = false) |
| 107 | + end |
| 108 | +end |
| 109 | + |
87 | 110 | @testitem "GraphConv" setup=[TolSnippet, TestModule] begin |
88 | 111 | using .TestModule |
89 | 112 | l = GraphConv(D_IN => D_OUT) |
|
104 | 127 | end |
105 | 128 | end |
106 | 129 |
|
| 130 | + |
| 131 | +@testitem "GraphConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 132 | + using .TestModule |
| 133 | + l = GraphConv(D_IN => D_OUT) |
| 134 | + for g in TEST_GRAPHS |
| 135 | + g.graph isa AbstractSparseMatrix && continue |
| 136 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 137 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 138 | + end |
| 139 | +end |
| 140 | + |
| 141 | + |
107 | 142 | @testitem "GATConv" setup=[TolSnippet, TestModule] begin |
108 | 143 | using .TestModule |
109 | 144 | for heads in (1, 2), concat in (true, false) |
|
132 | 167 | end |
133 | 168 | end |
134 | 169 |
|
| 170 | +@testitem "GATConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 171 | + using .TestModule |
| 172 | + for heads in (1, 2), concat in (true, false) |
| 173 | + l = GATConv(D_IN => D_OUT; heads, concat, dropout=0) |
| 174 | + for g in TEST_GRAPHS |
| 175 | + g.graph isa AbstractSparseMatrix && continue |
| 176 | + @test size(l(g, g.x)) == (concat ? heads * D_OUT : D_OUT, g.num_nodes) |
| 177 | + test_gradients(l, g, g.x, rtol = RTOL_LOW, test_gpu = true, compare_finite_diff = false) |
| 178 | + end |
| 179 | + end |
| 180 | +end |
| 181 | + |
135 | 182 | @testitem "GATv2Conv" setup=[TolSnippet, TestModule] begin |
136 | 183 | using .TestModule |
137 | 184 | for heads in (1, 2), concat in (true, false) |
|
160 | 207 | end |
161 | 208 | end |
162 | 209 |
|
| 210 | +@testitem "GATv2Conv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 211 | + using .TestModule |
| 212 | + for heads in (1, 2), concat in (true, false) |
| 213 | + l = GATv2Conv(D_IN => D_OUT, tanh; heads, concat, dropout=0) |
| 214 | + for g in TEST_GRAPHS |
| 215 | + g.graph isa AbstractSparseMatrix && continue |
| 216 | + @test size(l(g, g.x)) == (concat ? heads * D_OUT : D_OUT, g.num_nodes) |
| 217 | + test_gradients(l, g, g.x, rtol = RTOL_LOW, atol=ATOL_LOW, test_gpu = true, compare_finite_diff = false) |
| 218 | + end |
| 219 | + end |
| 220 | +end |
| 221 | + |
163 | 222 | @testitem "GatedGraphConv" setup=[TolSnippet, TestModule] begin |
164 | 223 | using .TestModule |
165 | 224 | num_layers = 3 |
|
172 | 231 | end |
173 | 232 | end |
174 | 233 |
|
| 234 | + |
| 235 | +@testitem "GatedGraphConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 236 | + using .TestModule |
| 237 | + num_layers = 3 |
| 238 | + l = GatedGraphConv(D_OUT, num_layers) |
| 239 | + for g in TEST_GRAPHS |
| 240 | + g.graph isa AbstractSparseMatrix && continue |
| 241 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 242 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 243 | + end |
| 244 | +end |
| 245 | + |
175 | 246 | @testitem "EdgeConv" setup=[TolSnippet, TestModule] begin |
176 | 247 | using .TestModule |
177 | 248 | l = EdgeConv(Dense(2 * D_IN, D_OUT), aggr = +) |
|
194 | 265 | @test !in(:eps, Flux.trainable(l)) |
195 | 266 | end |
196 | 267 |
|
| 268 | +@testitem "GINConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 269 | + using .TestModule |
| 270 | + nn = Dense(D_IN, D_OUT) |
| 271 | + l = GINConv(nn, 0.01, aggr = mean) |
| 272 | + for g in TEST_GRAPHS |
| 273 | + g.graph isa AbstractSparseMatrix && continue |
| 274 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 275 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 276 | + end |
| 277 | +end |
| 278 | + |
| 279 | +@testitem "NNConv" setup=[TolSnippet, TestModule] begin |
| 280 | + using .TestModule |
| 281 | + edim = 10 |
| 282 | + nn = Dense(edim, D_OUT * D_IN) |
| 283 | + |
| 284 | + l = NNConv(D_IN => D_OUT, nn, tanh, bias = true, aggr = +) |
| 285 | + for g in TEST_GRAPHS |
| 286 | + g = GNNGraph(g, edata = rand(Float32, edim, g.num_edges)) |
| 287 | + @test size(l(g, g.x, g.e)) == (D_OUT, g.num_nodes) |
| 288 | + test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH) |
| 289 | + end |
| 290 | +end |
| 291 | + |
197 | 292 | @testitem "NNConv" setup=[TolSnippet, TestModule] begin |
198 | 293 | using .TestModule |
199 | 294 | edim = 10 |
|
219 | 314 | end |
220 | 315 | end |
221 | 316 |
|
| 317 | +@testitem "SAGEConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 318 | + using .TestModule |
| 319 | + l = SAGEConv(D_IN => D_OUT) |
| 320 | + for g in TEST_GRAPHS |
| 321 | + g.graph isa AbstractSparseMatrix && continue |
| 322 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 323 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 324 | + end |
| 325 | +end |
| 326 | + |
222 | 327 | @testitem "ResGatedGraphConv" setup=[TolSnippet, TestModule] begin |
223 | 328 | using .TestModule |
224 | 329 | l = ResGatedGraphConv(D_IN => D_OUT, tanh, bias = true) |
|
228 | 333 | end |
229 | 334 | end |
230 | 335 |
|
| 336 | +@testitem "ResGatedGraphConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 337 | + using .TestModule |
| 338 | + l = ResGatedGraphConv(D_IN => D_OUT, tanh, bias = true) |
| 339 | + for g in TEST_GRAPHS |
| 340 | + g.graph isa AbstractSparseMatrix && continue |
| 341 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 342 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 343 | + end |
| 344 | +end |
| 345 | + |
231 | 346 | @testitem "CGConv" setup=[TolSnippet, TestModule] begin |
232 | 347 | using .TestModule |
233 | 348 |
|
|
246 | 361 | @test l1(g1, g1.ndata.x, nothing) == l1(g1).ndata.x |
247 | 362 | end |
248 | 363 |
|
| 364 | +@testitem "CGConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 365 | + using .TestModule |
| 366 | + edim = 10 |
| 367 | + l = CGConv((D_IN, edim) => D_OUT, tanh, residual = false, bias = true) |
| 368 | + for g in TEST_GRAPHS |
| 369 | + g.graph isa AbstractSparseMatrix && continue |
| 370 | + @test size(l(g, g.x, g.e)) == (D_OUT, g.num_nodes) |
| 371 | + test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 372 | + end |
| 373 | +end |
| 374 | + |
249 | 375 | @testitem "AGNNConv" setup=[TolSnippet, TestModule] begin |
250 | 376 | using .TestModule |
251 | 377 | l = AGNNConv(trainable=false, add_self_loops=false) |
|
265 | 391 | end |
266 | 392 | end |
267 | 393 |
|
| 394 | +@testitem "AGNNConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 395 | + using .TestModule |
| 396 | + l = AGNNConv(trainable=false, add_self_loops=false) |
| 397 | + for g in TEST_GRAPHS |
| 398 | + g.graph isa AbstractSparseMatrix && continue |
| 399 | + @test size(l(g, g.x)) == (D_IN, g.num_nodes) |
| 400 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 401 | + end |
| 402 | +end |
| 403 | + |
268 | 404 | @testitem "MEGNetConv" setup=[TolSnippet, TestModule] begin |
269 | 405 | using .TestModule |
270 | 406 | l = MEGNetConv(D_IN => D_OUT, aggr = +) |
|
281 | 417 | end |
282 | 418 | end |
283 | 419 |
|
| 420 | +@testitem "MEGNetConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 421 | + using .TestModule |
| 422 | + l = MEGNetConv(D_IN => D_OUT, aggr = +) |
| 423 | + for g in TEST_GRAPHS |
| 424 | + g.graph isa AbstractSparseMatrix && continue |
| 425 | + y = l(g, g.x, g.e) |
| 426 | + @test size(y[1]) == (D_OUT, g.num_nodes) |
| 427 | + @test size(y[2]) == (D_OUT, g.num_edges) |
| 428 | + function loss(l, g, x, e) |
| 429 | + y = l(g, x, e) |
| 430 | + return mean(y[1]) + sum(y[2]) |
| 431 | + end |
| 432 | + test_gradients(l, g, g.x, g.e, rtol = RTOL_LOW; loss, test_gpu = true, compare_finite_diff = false) |
| 433 | + end |
| 434 | +end |
| 435 | + |
284 | 436 | @testitem "GMMConv" setup=[TolSnippet, TestModule] begin |
285 | 437 | using .TestModule |
286 | 438 | ein_channel = 10 |
|
293 | 445 | end |
294 | 446 | end |
295 | 447 |
|
| 448 | +@testitem "GMMConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 449 | + using .TestModule |
| 450 | + ein_channel = 10 |
| 451 | + K = 5 |
| 452 | + l = GMMConv((D_IN, ein_channel) => D_OUT, K = K) |
| 453 | + for g in TEST_GRAPHS |
| 454 | + g.graph isa AbstractSparseMatrix && continue |
| 455 | + y = l(g, g.x, g.e) |
| 456 | + test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 457 | + end |
| 458 | +end |
| 459 | + |
296 | 460 | @testitem "SGConv" setup=[TolSnippet, TestModule] begin |
297 | 461 | using .TestModule |
298 | 462 | K = [1, 2, 3] # for different number of hops |
|
311 | 475 | end |
312 | 476 | end |
313 | 477 |
|
| 478 | +@testitem "SGConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 479 | + using .TestModule |
| 480 | + k = 2 |
| 481 | + l = SGConv(D_IN => D_OUT, k, add_self_loops = true) |
| 482 | + for g in TEST_GRAPHS |
| 483 | + g.graph isa AbstractSparseMatrix && continue |
| 484 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 485 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 486 | + end |
| 487 | +end |
| 488 | + |
314 | 489 | @testitem "TAGConv" setup=[TolSnippet, TestModule] begin |
315 | 490 | using .TestModule |
316 | 491 | K = [1, 2, 3] |
|
329 | 504 | end |
330 | 505 | end |
331 | 506 |
|
| 507 | +@testitem "TAGConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 508 | + using .TestModule |
| 509 | + k = 2 |
| 510 | + l = TAGConv(D_IN => D_OUT, k, add_self_loops = true) |
| 511 | + for g in TEST_GRAPHS |
| 512 | + g.graph isa AbstractSparseMatrix && continue |
| 513 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 514 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 515 | + end |
| 516 | +end |
| 517 | + |
332 | 518 | @testitem "EGNNConv" setup=[TolSnippet, TestModule] begin |
333 | 519 | using .TestModule |
334 | 520 | #TODO test gradient |
| 521 | + #TODO test gpu |
335 | 522 | @testset "EGNNConv $GRAPH_T" for GRAPH_T in GRAPH_TYPES |
336 | 523 | hin = 5 |
337 | 524 | hout = 5 |
|
378 | 565 | end |
379 | 566 | end |
380 | 567 |
|
| 568 | +@testitem "TransformerConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 569 | + using .TestModule |
| 570 | + ein = 2 |
| 571 | + heads = 3 |
| 572 | + |
| 573 | + # used like in Shi et al., 2021 |
| 574 | + l = TransformerConv((D_IN, ein) => D_IN; heads, gating = true, |
| 575 | + bias_qkv = true) |
| 576 | + for g in TEST_GRAPHS |
| 577 | + g.graph isa AbstractSparseMatrix && continue |
| 578 | + @test size(l(g, g.x, g.e)) == (D_IN * heads, g.num_nodes) |
| 579 | + test_gradients(l, g, g.x, g.e, rtol = RTOL_LOW, test_gpu = true, compare_finite_diff = false) |
| 580 | + end |
| 581 | +end |
| 582 | + |
| 583 | + |
381 | 584 | @testitem "DConv" setup=[TolSnippet, TestModule] begin |
382 | 585 | using .TestModule |
383 | 586 | K = [1, 2, 3] # for different number of hops |
|
389 | 592 | end |
390 | 593 | end |
391 | 594 | end |
| 595 | + |
| 596 | +@testitem "DConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin |
| 597 | + using .TestModule |
| 598 | + l = DConv(D_IN => D_OUT, 2) |
| 599 | + for g in TEST_GRAPHS |
| 600 | + g.graph isa AbstractSparseMatrix && continue |
| 601 | + @test size(l(g, g.x)) == (D_OUT, g.num_nodes) |
| 602 | + test_gradients(l, g, g.x, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false) |
| 603 | + end |
| 604 | +end |
0 commit comments