diff --git a/GNNGraphs/src/convert.jl b/GNNGraphs/src/convert.jl index ea7ae500f..6735a9d27 100644 --- a/GNNGraphs/src/convert.jl +++ b/GNNGraphs/src/convert.jl @@ -207,7 +207,7 @@ function to_sparse(A::ADJMAT_T, T = nothing; dir = :out, num_nodes = nothing, A = sparse(A) end if !weighted - A = map(x -> ifelse(x > 0, T(1), T(0)), A) + A = binarize(A, T) end return A, num_nodes, num_edges end diff --git a/GNNGraphs/src/query.jl b/GNNGraphs/src/query.jl index 482502b4a..2c2f4332a 100644 --- a/GNNGraphs/src/query.jl +++ b/GNNGraphs/src/query.jl @@ -218,7 +218,7 @@ end adjacency_list(g::GNNGraph; dir = :out) = adjacency_list(g, 1:(g.num_nodes); dir) """ - adjacency_matrix(g::GNNGraph, T=eltype(g); dir=:out, weighted=true) + adjacency_matrix(g::GNNGraph, T=eltype(g); dir=:out, weighted=true, fmt=nothing) Return the adjacency matrix `A` for the graph `g`. @@ -227,29 +227,39 @@ If `dir=:in` instead, `A[i,j] > 0` denotes the presence of an edge from node `j` User may specify the eltype `T` of the returned matrix. -If `weighted=true`, the `A` will contain the edge weights if any, otherwise the elements of `A` will be either 0 or 1. -""" -function Graphs.adjacency_matrix(g::GNNGraph{<:COO_T}, T::DataType = eltype(g); dir = :out, - weighted = true) - A, n, m = to_sparse(g.graph, T; num_nodes = g.num_nodes, weighted) +If `weighted=true`, the matrix `A` will contain the edge weights if any, otherwise the elements of `A`. +If the graph does not contain edge weights, or if `weighted=false`, the adjacency matrix will contain only 0s and 1s. + +The argument `fmt` can be used to specify the desired format of the returned matrix. Possible values are: +- `nothing`: return the matrix in the same format as the underlying graph representation. +- `:sparse`: return a sparse matrix (default for COO graphs). +- `:dense`: return a dense matrix (default for adjacency matrix graphs). +""" +function Graphs.adjacency_matrix(g::GNNGraph, T::DataType = eltype(g); dir = :out, + weighted = true, fmt = nothing) + if fmt === nothing + if g.graph isa COO_T + fmt = :sparse + elseif g.graph isa SPARSE_T + fmt = :sparse + else + fmt = :dense + end + end + @assert fmt ∈ [:sparse, :dense] + if fmt == :sparse + A, n, m = to_sparse(g.graph, T; num_nodes = g.num_nodes, weighted) + else + A, n, m = to_dense(g.graph, T; num_nodes = g.num_nodes, weighted) + end @assert size(A) == (n, n) return dir == :out ? A : A' end -function Graphs.adjacency_matrix(g::GNNGraph{<:ADJMAT_T}, T::DataType = eltype(g); - dir = :out, weighted = true) - @assert dir ∈ [:in, :out] - A = g.graph - if !weighted - A = binarize(A, T) - end - A = T != eltype(A) ? T.(A) : A - return dir == :out ? A : A' -end function CRC.rrule(::typeof(adjacency_matrix), g::G, T::DataType; - dir = :out, weighted = true) where {G <: GNNGraph{<:ADJMAT_T}} - A = adjacency_matrix(g, T; dir, weighted) + dir=:out, weighted=true, fmt=nothing) where {G <: GNNGraph{<:ADJMAT_T}} + A = adjacency_matrix(g, T; dir, weighted, fmt) if !weighted function adjacency_matrix_pullback_noweight(Δ) return (CRC.NoTangent(), CRC.ZeroTangent(), CRC.NoTangent()) @@ -266,8 +276,8 @@ function CRC.rrule(::typeof(adjacency_matrix), g::G, T::DataType; end function CRC.rrule(::typeof(adjacency_matrix), g::G, T::DataType; - dir = :out, weighted = true) where {G <: GNNGraph{<:COO_T}} - A = adjacency_matrix(g, T; dir, weighted) + dir=:out, weighted=true, fmt=nothing) where {G <: GNNGraph{<:COO_T}} + A = adjacency_matrix(g, T; dir, weighted, fmt) w = get_edge_weight(g) if !weighted || w === nothing function adjacency_matrix_pullback_noweight(Δ) diff --git a/GNNGraphs/test/Project.toml b/GNNGraphs/test/Project.toml index f18c35628..d4af78e8f 100644 --- a/GNNGraphs/test/Project.toml +++ b/GNNGraphs/test/Project.toml @@ -2,7 +2,6 @@ FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" GNNGraphs = "aed8fd31-079b-4b5a-b342-a13352159b8c" -GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" @@ -20,6 +19,3 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a" TestItems = "1c621080-faea-4a02-84b6-bbd5e436b8fe" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" - -[compat] -GPUArraysCore = "0.1" diff --git a/GNNlib/Project.toml b/GNNlib/Project.toml index b39e4d325..fd64ac6a8 100644 --- a/GNNlib/Project.toml +++ b/GNNlib/Project.toml @@ -16,10 +16,12 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [weakdeps] AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" +Metal = "dde4c033-4e86-420c-a63e-0dd931031962" [extensions] GNNlibAMDGPUExt = "AMDGPU" GNNlibCUDAExt = "CUDA" +GNNlibMetalExt = "Metal" [compat] AMDGPU = "1" @@ -28,6 +30,7 @@ ChainRulesCore = "1.24" DataStructures = "0.18" GNNGraphs = "1.4" LinearAlgebra = "1" +Metal = "1.0" MLUtils = "0.4" NNlib = "0.9" Random = "1" diff --git a/GNNlib/ext/GNNlibCUDAExt.jl b/GNNlib/ext/GNNlibCUDAExt.jl index f745d51da..9cfe45089 100644 --- a/GNNlib/ext/GNNlibCUDAExt.jl +++ b/GNNlib/ext/GNNlibCUDAExt.jl @@ -15,7 +15,14 @@ const CUDA_COO_T = Tuple{T, T, V} where {T <: AnyCuArray{<:Integer}, V <: Union{ ## avoid the fast path on gpu until we have better cuda support function GNNlib.propagate(::typeof(copy_xj), g::GNNGraph{<:COO_T}, ::typeof(+), xi, xj::AnyCuMatrix, e) - A = _adjacency_matrix(g, eltype(xj); weighted = false) + + if !g.is_coalesced + # Revisit after + # https://github.com/JuliaGPU/CUDA.jl/issues/1113 + A = adjacency_matrix(g, eltype(xj); weighted=false, fmt=:dense) + else + A = adjacency_matrix(g, eltype(xj); weighted=false, fmt=:sparse) + end return xj * A end @@ -47,21 +54,4 @@ end # Flux.Zygote.@nograd compute_degree -## CUSTOM ADJACENCY_MATRIX IMPLEMENTATION FOR CUDA COO GRAPHS, returning dense matrix when not coalesced, more efficient - -function _adjacency_matrix(g::GNNGraph{<:CUDA_COO_T}, T::DataType = eltype(g); dir = :out, - weighted = true) - if !g.is_coalesced - # Revisit after - # https://github.com/JuliaGPU/CUDA.jl/issues/1113 - A, n, m = to_dense(g.graph, T; num_nodes = g.num_nodes, weighted) # if not coalesced, construction of sparse matrix is slow - else - A, n, m = to_sparse(g.graph, T; num_nodes = g.num_nodes, weighted, is_coalesced = true) - end - @assert size(A) == (n, n) - return dir == :out ? A : A' -end - -@non_differentiable _adjacency_matrix(x...) - end #module diff --git a/GNNlib/ext/GNNlibMetalExt.jl b/GNNlib/ext/GNNlibMetalExt.jl new file mode 100644 index 000000000..0fc4ec61f --- /dev/null +++ b/GNNlib/ext/GNNlibMetalExt.jl @@ -0,0 +1,23 @@ +module GNNlibMetalExt + +using Metal +using Random, Statistics, LinearAlgebra +using GNNlib: GNNlib, propagate, copy_xj, e_mul_xj, w_mul_xj +using GNNGraphs: GNNGraph, COO_T, SPARSE_T, adjacency_matrix +using ChainRulesCore: @non_differentiable + +const METAL_COO_T = Tuple{T, T, V} where {T <: MtlVector{<:Integer}, V <: Union{Nothing, MtlVector}} + +###### PROPAGATE SPECIALIZATIONS #################### + +## COPY_XJ + +## Metal does not support sparse arrays yet and neither scater. +## Have to use dense adjacency matrix multiplication for now. +function GNNlib.propagate(::typeof(copy_xj), g::GNNGraph{<:METAL_COO_T}, ::typeof(+), + xi, xj::AbstractMatrix, e) + A = adjacency_matrix(g, eltype(xj), weighted=false, fmt=:dense) + return xj * A +end + +end #module diff --git a/GNNlib/test/Project.toml b/GNNlib/test/Project.toml index 36fcae23b..c944e6684 100644 --- a/GNNlib/test/Project.toml +++ b/GNNlib/test/Project.toml @@ -4,7 +4,6 @@ Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" GNNGraphs = "aed8fd31-079b-4b5a-b342-a13352159b8c" GNNlib = "a6a84749-d869-43f8-aacc-be26a1996e48" -GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" MLDataDevices = "7e8f7934-dd98-4c1a-8fe8-92b47a384d40" MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54" @@ -17,6 +16,3 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" - -[compat] -GPUArraysCore = "0.1" diff --git a/GraphNeuralNetworks/docs/src/index.md b/GraphNeuralNetworks/docs/src/index.md index 5eb38897d..c5b52e713 100644 --- a/GraphNeuralNetworks/docs/src/index.md +++ b/GraphNeuralNetworks/docs/src/index.md @@ -30,7 +30,7 @@ pkg> add GraphNeuralNetworks Let's give a brief overview of the package by solving a graph regression problem with synthetic data. -Other usage examples can be found in the [examples](https://github.com/JuliaGraphs/GraphNeuralNetworks.jl/tree/master/GraphNeuralNetworks/examples) folder, in the [notebooks](https://github.com/JuliaGraphs/GraphNeuralNetworks.jl/tree/master/GraphNeuralNetworks/notebooks) folder, and in the [tutorials](https://juliagraphs.org/GraphNeuralNetworks.jl/tutorials/) section of the documentation. +Other usage examples can be found in the [examples](https://github.com/JuliaGraphs/GraphNeuralNetworks.jl/tree/master/GraphNeuralNetworks/examples) folder, in the [notebooks](https://github.com/JuliaGraphs/GraphNeuralNetworks.jl/tree/master/GraphNeuralNetworks/notebooks) folder, and in the [tutorials](https://juliagraphs.org/GraphNeuralNetworks.jl/docs/GraphNeuralNetworks.jl/dev/tutorials/gnn_intro/) section of the documentation. ### Data preparation diff --git a/GraphNeuralNetworks/test/Project.toml b/GraphNeuralNetworks/test/Project.toml index ebdb52172..bf8d43b11 100644 --- a/GraphNeuralNetworks/test/Project.toml +++ b/GraphNeuralNetworks/test/Project.toml @@ -5,10 +5,10 @@ Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" GNNGraphs = "aed8fd31-079b-4b5a-b342-a13352159b8c" GNNlib = "a6a84749-d869-43f8-aacc-be26a1996e48" -GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" GraphNeuralNetworks = "cffab07f-9bc2-4db1-8861-388f63bf7694" Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458" +Metal = "dde4c033-4e86-420c-a63e-0dd931031962" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" @@ -17,5 +17,3 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" -[compat] -GPUArraysCore = "0.1"