Skip to content

Commit 49b6398

Browse files
buildkite for GNNlib + get_graph_type (#530)
1 parent 3ed702b commit 49b6398

File tree

19 files changed

+658
-200
lines changed

19 files changed

+658
-200
lines changed

.buildkite/pipeline.yml

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,3 +49,54 @@ steps:
4949
GNN_TEST_AMDGPU: "true"
5050
GNN_TEST_CPU: "false"
5151
timeout_in_minutes: 60
52+
53+
- label: "GNNlib CUDA"
54+
plugins:
55+
- JuliaCI/julia#v1:
56+
version: "1"
57+
- JuliaCI/julia-coverage#v1:
58+
dirs:
59+
- GNNlib/src
60+
command: |
61+
julia --color=yes --depwarn=yes --project=GNNlib/test -e '
62+
import Pkg
63+
dev_pkgs = Pkg.PackageSpec[]
64+
for pkg in ("GNNGraphs", "GNNlib")
65+
push!(dev_pkgs, Pkg.PackageSpec(path=pkg));
66+
end
67+
Pkg.develop(dev_pkgs)
68+
Pkg.add(["CUDA", "cuDNN"])
69+
Pkg.test("GNNlib")'
70+
agents:
71+
queue: "juliagpu"
72+
cuda: "*"
73+
env:
74+
GNN_TEST_CUDA: "true"
75+
GNN_TEST_CPU: "false"
76+
timeout_in_minutes: 60
77+
78+
- label: "GNNlib AMDGPU"
79+
plugins:
80+
- JuliaCI/julia#v1:
81+
version: "1"
82+
- JuliaCI/julia-coverage#v1:
83+
dirs:
84+
- GNNlib/src
85+
command: |
86+
julia --color=yes --depwarn=yes --project=GNNlib/test -e '
87+
import Pkg
88+
dev_pkgs = Pkg.PackageSpec[]
89+
for pkg in ("GNNGraphs", "GNNlib")
90+
push!(dev_pkgs, Pkg.PackageSpec(path=pkg));
91+
end
92+
Pkg.develop(dev_pkgs)
93+
Pkg.add(["AMDGPU"])
94+
Pkg.test("GNNlib")'
95+
agents:
96+
queue: "juliagpu"
97+
rocm: "*"
98+
rocmgpu: "*"
99+
env:
100+
GNN_TEST_AMDGPU: "true"
101+
GNN_TEST_CPU: "false"
102+
timeout_in_minutes: 60

GNNGraphs/src/GNNGraphs.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ include("query.jl")
4747
export adjacency_list,
4848
edge_index,
4949
get_edge_weight,
50+
get_graph_type,
5051
graph_indicator,
5152
has_multi_edges,
5253
is_directed,

GNNGraphs/src/operators.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@ Intersect two graphs by keeping only the common edges.
66
"""
77
function Base.intersect(g1::GNNGraph, g2::GNNGraph)
88
@assert g1.num_nodes == g2.num_nodes
9-
@assert graph_type_symbol(g1) == graph_type_symbol(g2)
10-
graph_type = graph_type_symbol(g1)
9+
@assert get_graph_type(g1) == get_graph_type(g2)
10+
graph_type = get_graph_type(g1)
1111
num_nodes = g1.num_nodes
1212

1313
idx1, _ = edge_encoding(edge_index(g1)..., num_nodes)

GNNGraphs/src/query.jl

Lines changed: 55 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -80,9 +80,61 @@ function Graphs.has_edge(g::GNNHeteroGraph, edge_t::EType, i::Integer, j::Intege
8080
return any((s .== i) .& (t .== j))
8181
end
8282

83-
graph_type_symbol(::GNNGraph{<:COO_T}) = :coo
84-
graph_type_symbol(::GNNGraph{<:SPARSE_T}) = :sparse
85-
graph_type_symbol(::GNNGraph{<:ADJMAT_T}) = :dense
83+
"""
84+
get_graph_type(g::GNNGraph)
85+
86+
Return the underlying representation for the graph `g` as a symbol.
87+
88+
Possible values are:
89+
- `:coo`: Coordinate list representation. The graph is stored as a tuple of vectors `(s, t, w)`,
90+
where `s` and `t` are the source and target nodes of the edges, and `w` is the edge weights.
91+
- `:sparse`: Sparse matrix representation. The graph is stored as a sparse matrix representing the weighted adjacency matrix.
92+
- `:dense`: Dense matrix representation. The graph is stored as a dense matrix representing the weighted adjacency matrix.
93+
94+
The default representation for graph constructors GNNGraphs.jl is `:coo`.
95+
The underlying representation can be accessed through the `g.graph` field.
96+
97+
See also [`GNNGraph`](@ref).
98+
99+
# Examples
100+
101+
The default representation for graph constructors GNNGraphs.jl is `:coo`.
102+
```jldoctest
103+
julia> g = rand_graph(5, 10)
104+
GNNGraph:
105+
num_nodes: 5
106+
num_edges: 10
107+
108+
julia> get_graph_type(g)
109+
:coo
110+
```
111+
The `GNNGraph` constructor can also be used to create graphs with different representations.
112+
```jldoctest
113+
julia> g = GNNGraph([2,3,5], [1,2,4], graph_type=:sparse)
114+
GNNGraph:
115+
num_nodes: 5
116+
num_edges: 3
117+
118+
julia> g.graph
119+
5×5 SparseArrays.SparseMatrixCSC{Int64, Int64} with 3 stored entries:
120+
⋅ ⋅ ⋅ ⋅ ⋅
121+
1 ⋅ ⋅ ⋅ ⋅
122+
⋅ 1 ⋅ ⋅ ⋅
123+
⋅ ⋅ ⋅ ⋅ ⋅
124+
⋅ ⋅ ⋅ 1 ⋅
125+
126+
julia> get_graph_type(g)
127+
:sparse
128+
129+
julia> gcoo = GNNGraph(g, graph_type=:coo);
130+
131+
julia> gcoo.graph
132+
([2, 3, 5], [1, 2, 4], [1, 1, 1])
133+
```
134+
"""
135+
get_graph_type(::GNNGraph{<:COO_T}) = :coo
136+
get_graph_type(::GNNGraph{<:SPARSE_T}) = :sparse
137+
get_graph_type(::GNNGraph{<:ADJMAT_T}) = :dense
86138

87139
Graphs.nv(g::GNNGraph) = g.num_nodes
88140
Graphs.ne(g::GNNGraph) = g.num_edges

GNNGraphs/src/transform.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -731,6 +731,7 @@ end
731731
Set `w` as edge weights in the returned graph.
732732
"""
733733
function set_edge_weight(g::GNNGraph, w::AbstractVector)
734+
# TODO preserve the representation instead of converting to COO
734735
s, t = edge_index(g)
735736
@assert length(w) == length(s)
736737

GNNGraphs/test/gnngraph.jl

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
# TODO test that the graph type is preserved
2+
# when constructing a GNNGraph from another
3+
14
@testset "Constructor: adjacency matrix" begin
25
A = sprand(10, 10, 0.5)
36
sA, tA, vA = findnz(A)

GNNGraphs/test/query.jl

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,3 +257,20 @@ if GRAPH_T == :coo
257257
end
258258
end
259259

260+
@testset "get_graph_type" begin
261+
g = rand_graph(10, 20, graph_type = GRAPH_T)
262+
@test get_graph_type(g) == GRAPH_T
263+
264+
gsparse = GNNGraph(g, graph_type=:sparse)
265+
@test get_graph_type(gsparse) == :sparse
266+
@test gsparse.graph isa SparseMatrixCSC
267+
268+
gcoo = GNNGraph(g, graph_type=:coo)
269+
@test get_graph_type(gcoo) == :coo
270+
@test gcoo.graph[1:2] isa Tuple{Vector{Int}, Vector{Int}}
271+
272+
273+
gdense = GNNGraph(g, graph_type=:dense)
274+
@test get_graph_type(gdense) == :dense
275+
@test gdense.graph isa Matrix{Int}
276+
end

GNNlib/Project.toml

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,35 +7,35 @@ version = "0.2.2"
77
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
88
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
99
GNNGraphs = "aed8fd31-079b-4b5a-b342-a13352159b8c"
10+
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
1011
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1112
MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54"
1213
NNlib = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
1314
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
1415
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
1516

1617
[weakdeps]
18+
AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e"
1719
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
1820

1921
[extensions]
22+
GNNlibAMDGPUExt = "AMDGPU"
2023
GNNlibCUDAExt = "CUDA"
2124

25+
# GPUArraysCore is not needed as a direct dependency
26+
# but pinning it to 0.1 avoids problems when we do Pkg.add("CUDA") in testing
27+
# See https://github.com/JuliaGPU/CUDA.jl/issues/2564
28+
2229
[compat]
30+
AMDGPU = "1"
2331
CUDA = "4, 5"
2432
ChainRulesCore = "1.24"
2533
DataStructures = "0.18"
2634
GNNGraphs = "1.0"
35+
GPUArraysCore = "0.1"
2736
LinearAlgebra = "1"
2837
MLUtils = "0.4"
2938
NNlib = "0.9"
3039
Random = "1"
3140
Statistics = "1"
3241
julia = "1.10"
33-
34-
[extras]
35-
ReTestItems = "817f1d60-ba6b-4fd5-9520-3cf149f6a823"
36-
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
37-
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
38-
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
39-
40-
[targets]
41-
test = ["Test", "ReTestItems", "Reexport", "SparseArrays"]

GNNlib/ext/GNNlibAMDGPUExt.jl

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
module GNNlibAMDGPUExt
2+
3+
using AMDGPU: AnyROCMatrix
4+
using Random, Statistics, LinearAlgebra
5+
using GNNlib: GNNlib, propagate, copy_xj, e_mul_xj, w_mul_xj
6+
using GNNGraphs: GNNGraph, COO_T, SPARSE_T
7+
8+
###### PROPAGATE SPECIALIZATIONS ####################
9+
10+
## COPY_XJ
11+
12+
## avoid the fast path on gpu until we have better cuda support
13+
function GNNlib.propagate(::typeof(copy_xj), g::GNNGraph{<:Union{COO_T, SPARSE_T}}, ::typeof(+),
14+
xi, xj::AnyROCMatrix, e)
15+
propagate((xi, xj, e) -> copy_xj(xi, xj, e), g, +, xi, xj, e)
16+
end
17+
18+
## E_MUL_XJ
19+
20+
## avoid the fast path on gpu until we have better cuda support
21+
function GNNlib.propagate(::typeof(e_mul_xj), g::GNNGraph{<:Union{COO_T, SPARSE_T}}, ::typeof(+),
22+
xi, xj::AnyROCMatrix, e::AbstractVector)
23+
propagate((xi, xj, e) -> e_mul_xj(xi, xj, e), g, +, xi, xj, e)
24+
end
25+
26+
## W_MUL_XJ
27+
28+
## avoid the fast path on gpu until we have better support
29+
function GNNlib.propagate(::typeof(w_mul_xj), g::GNNGraph{<:Union{COO_T, SPARSE_T}}, ::typeof(+),
30+
xi, xj::AnyROCMatrix, e::Nothing)
31+
propagate((xi, xj, e) -> w_mul_xj(xi, xj, e), g, +, xi, xj, e)
32+
end
33+
34+
end #module

GNNlib/src/msgpass.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ xj_sub_xi(xi, xj, e) = xj .- xi
184184
"""
185185
e_mul_xj(xi, xj, e) = reshape(e, (...)) .* xj
186186
187-
Reshape `e` into broadcast compatible shape with `xj`
187+
Reshape `e` into a broadcast compatible shape with `xj`
188188
(by prepending singleton dimensions) then perform
189189
broadcasted multiplication.
190190
"""

0 commit comments

Comments
 (0)