Skip to content

Commit 0c2dfbb

Browse files
better negative sampling
1 parent e5e919c commit 0c2dfbb

File tree

2 files changed

+58
-13
lines changed

2 files changed

+58
-13
lines changed

docs/src/index.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
# GraphNeuralNetworks
22

3-
This is the documentation page for the [GraphNeuralNetworks.jl](https://github.com/CarloLucibello/GraphNeuralNetworks.jl) library.
4-
5-
A graph neural network library for Julia based on the deep learning framework [Flux.jl](https://github.com/FluxML/Flux.jl). GNN.jl is largely inspired by python's libraries [PyTorch Geometric](https://pytorch-geometric.readthedocs.io/en/latest/) and [Deep Graph Library](https://docs.dgl.ai/),
6-
and by julia's [GeometricFlux](https://fluxml.ai/GeometricFlux.jl/stable/).
3+
This is the documentation page for [GraphNeuralNetworks.jl](https://github.com/CarloLucibello/GraphNeuralNetworks.jl), a graph neural network library written in Julia and based on the deep learning framework [Flux.jl](https://github.com/FluxML/Flux.jl).
4+
GNN.jl is largely inspired by [PyTorch Geometric](https://pytorch-geometric.readthedocs.io/en/latest/),[Deep Graph Library](https://docs.dgl.ai/),
5+
and [GeometricFlux.jl](https://fluxml.ai/GeometricFlux.jl/stable/).
76

87
Among its features:
98

10-
* Integratation with the JuliaGraphs ecosystem.
9+
* Integration with the JuliaGraphs ecosystem.
1110
* Implementation of common graph convolutional layers.
1211
* Fast operations on batched graphs.
1312
* Easy to define custom layers.
1413
* CUDA support.
14+
* Examples of node-wise, edge-wise, and graph-wise machine learning tasks.
1515

1616

1717
## Package overview

src/GNNGraphs/transform.jl

Lines changed: 53 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -328,15 +328,60 @@ end
328328
"""
329329
negative_sample(g::GNNGraph; num_neg_edges=g.num_edges)
330330
331-
Return a graph containing random negative edges (i.e. non-edges) from graph `g`.
331+
Return a graph containing random negative edges (i.e. non-edges) from graph `g` as edges.
332332
"""
333-
function negative_sample(g::GNNGraph; num_neg_edges=g.num_edges)
334-
adj = adjacency_matrix(g)
335-
adj_neg = 1 .- adj - I
336-
neg_s, neg_t = ci2t(findall(adj_neg .> 0), 2)
337-
neg_eids = randperm(length(neg_s))[1:num_neg_edges]
338-
neg_s, neg_t = neg_s[neg_eids], neg_t[neg_eids]
339-
return GNNGraph(neg_s, neg_t, num_nodes=g.num_nodes)
333+
function negative_sample(g::GNNGraph;
334+
max_trials=3,
335+
num_neg_edges=g.num_edges)
336+
337+
@assert g.num_graphs == 1
338+
# Consider self-loops as positive edges
339+
# Construct new graph dropping features
340+
g = add_self_loops(GNNGraph(edge_index(g)))
341+
342+
s, t = edge_index(g)
343+
n = g.num_nodes
344+
if s isa CuArray
345+
# Convert to gpu since set operations and sampling are not supported by CUDA.jl
346+
device = Flux.gpu
347+
s, t = Flux.cpu(s), Flux.cpu(t)
348+
else
349+
device = Flux.cpu
350+
end
351+
idx_pos, maxid = edge_encoding(s, t, n)
352+
353+
pneg = 1 - g.num_edges / maxid # prob of selecting negative edge
354+
# pneg * sample_prob * maxid == num_neg_edges
355+
sample_prob = min(1, num_neg_edges / (pneg * maxid) * 1.1)
356+
idx_neg = Int[]
357+
for _ in 1:max_trials
358+
rnd = randsubseq(1:maxid, sample_prob)
359+
setdiff!(rnd, idx_pos)
360+
union!(idx_neg, rnd)
361+
if length(idx_neg) >= num_neg_edges
362+
idx_neg = idx_neg[1:num_neg_edges]
363+
break
364+
end
365+
end
366+
s_neg, t_neg = edge_decoding(idx_neg, n)
367+
return GNNGraph(s_neg, t_neg, num_nodes=n) |> device
368+
end
369+
370+
# each edge is represented by a number in
371+
# 1:N^2
372+
function edge_encoding(s, t, n)
373+
idx = (s .- 1) .* n .+ t
374+
maxid = n^2
375+
return idx, maxid
376+
end
377+
378+
# each edge is represented by a number in
379+
# 1:N^2
380+
function edge_decoding(idx, n)
381+
# g = remove_self_loops(g)
382+
s = (idx .- 1) n .+ 1
383+
t = (idx .- 1) .% n .+ 1
384+
return s, t
340385
end
341386

342387
# """

0 commit comments

Comments
 (0)