File tree Expand file tree Collapse file tree 2 files changed +2
-16
lines changed Expand file tree Collapse file tree 2 files changed +2
-16
lines changed Original file line number Diff line number Diff line change @@ -99,17 +99,3 @@ function broadcast_edges(g::GNNGraph, x)
99
99
gi = graph_indicator (g, edges= true )
100
100
return gather (x, gi)
101
101
end
102
-
103
- # More generic version of
104
- # https://github.com/JuliaDiff/ChainRules.jl/pull/586
105
- # This applies to all arrays
106
- # Withouth this, gradient of T.(A) for A dense gpu matrix errors.
107
- function ChainRulesCore. rrule (:: typeof (Broadcast. broadcasted), T:: Type{<:Number} , x:: AbstractArray )
108
- proj = ProjectTo (x)
109
-
110
- function broadcasted_cast (Δ)
111
- return NoTangent (), NoTangent (), proj (Δ)
112
- end
113
-
114
- return T .(x), broadcasted_cast
115
- end
Original file line number Diff line number Diff line change @@ -41,10 +41,10 @@ tests = [
41
41
42
42
! CUDA. functional () && @warn (" CUDA unavailable, not testing GPU support" )
43
43
44
- @testset " GraphNeuralNetworks: graph format $graph_type " for graph_type in (:dense , :coo , :sparse )
44
+ @testset " GraphNeuralNetworks: graph format $graph_type " for graph_type in (:coo , :dense , :sparse )
45
45
global GRAPH_T = graph_type
46
46
global TEST_GPU = CUDA. functional () && (GRAPH_T != :sparse )
47
-
47
+
48
48
for t in tests
49
49
startswith (t, " examples" ) && GRAPH_T == :dense && continue # not testing :dense since causes OutOfMememory on github's CI
50
50
include (" $t .jl" )
You can’t perform that action at this time.
0 commit comments