|
| 1 | +using GraphNeuralNetworks |
| 2 | +using Flux |
| 3 | +using Random, Statistics, LinearAlgebra |
| 4 | +using SparseArrays |
| 5 | + |
| 6 | +""" |
| 7 | +A type representing a conjunctive normal form. |
| 8 | +""" |
| 9 | +struct CNF |
| 10 | + N::Int # num variables |
| 11 | + M::Int # num factors |
| 12 | + clauses::Vector{Vector{Int}} |
| 13 | +end |
| 14 | + |
| 15 | +function CNF(clauses::Vector{Vector{Int}}) |
| 16 | + M = length(clauses) |
| 17 | + N = maximum(maximum(abs.(c)) for c in clauses) |
| 18 | + return CNF(N, M, clauses) |
| 19 | +end |
| 20 | + |
| 21 | +""" |
| 22 | + randomcnf(; N=100, k=3, α=0.1, seed=-1, planted = Vector{Vector{Int}}()) |
| 23 | +
|
| 24 | +Generates a random instance of the k-SAT problem, with `N` variables and `αN` clauses. |
| 25 | +Any configuration in `planted` is guaranteed to be a solution of the problem. |
| 26 | +""" |
| 27 | +function randomcnf(; N::Int = 100, k::Int = 3, α::Float64 = 0.1, seed::Int=-1, |
| 28 | + planted = Vector{Vector{Int}}()) |
| 29 | + seed > 0 && Random.seed!(seed) |
| 30 | + M = round(Int, N*α) |
| 31 | + clauses = Vector{Vector{Int}}() |
| 32 | + for p in planted |
| 33 | + @assert length(p) == N "Wrong size for planted configurations ($N != $(lenght(p)) )" |
| 34 | + end |
| 35 | + for a=1:M |
| 36 | + while true |
| 37 | + c = rand(1:N, k) |
| 38 | + length(union(c)) != k && continue |
| 39 | + c = c .* rand([-1,1], k) |
| 40 | + |
| 41 | + # reject if not satisfies the planted solutions |
| 42 | + sat = Bool[any(i -> i>0, sol[abs.(c)] .* c) for sol in planted] |
| 43 | + !all(sat) && continue |
| 44 | + |
| 45 | + push!(clauses, c) |
| 46 | + break |
| 47 | + end |
| 48 | + end |
| 49 | + return CNF(N, M, clauses) |
| 50 | +end |
| 51 | + |
| 52 | + |
| 53 | +function to_edge_index(cnf::CNF) |
| 54 | + N = cnf.N |
| 55 | + srcV, dstF = Vector{Int}(), Vector{Int}() |
| 56 | + srcF, dstV = Vector{Int}(), Vector{Int}() |
| 57 | + for (a, c) in enumerate(cnf.clauses) |
| 58 | + for v in c |
| 59 | + negated = v < 0 |
| 60 | + push!(srcV, abs(v) + N*negated) |
| 61 | + push!(dstF, a) |
| 62 | + push!(srcF, a) |
| 63 | + push!(dstV, abs(v) + N*negated) |
| 64 | + end |
| 65 | + end |
| 66 | + return srcV, dstF,srcV, dstF |
| 67 | +end |
| 68 | + |
| 69 | +function to_adjacency_matrix(cnf::CNF) |
| 70 | + M, N = cnf.M, cnf.N |
| 71 | + A = spzeros(Int, M, 2*N) |
| 72 | + for (a, c) in enumerate(cnf.clauses) |
| 73 | + for v in c |
| 74 | + negated = v < 0 |
| 75 | + A[a, abs(v) + N*negated] = 1 |
| 76 | + end |
| 77 | + end |
| 78 | + return A |
| 79 | +end |
| 80 | + |
| 81 | +function flip_literals(X::AbstractMatrix) |
| 82 | + n = size(X, 2) ÷ 2 |
| 83 | + return hcat(X[:,n+1:2n], X[:,1:n]) |
| 84 | +end |
| 85 | + |
| 86 | +## Layer |
| 87 | +struct NeuroSAT |
| 88 | + Xv0 |
| 89 | + Xf0 |
| 90 | + MLPv |
| 91 | + MLPf |
| 92 | + MLPout |
| 93 | + LSTMv |
| 94 | + LSTMf |
| 95 | +end |
| 96 | + |
| 97 | +# A # rectangular adjacency matrix |
| 98 | +Flux.@functor NeuroSAT |
| 99 | + |
| 100 | +# Optimisers.trainable(m::NeuroSAT) = (; m.MLPv, m.MLPf, m.MLPout, m.LSTMv, m.LSTMf) |
| 101 | + |
| 102 | +function NeuroSAT(D::Int) |
| 103 | + Xv0 = randn(Float32, D) |
| 104 | + Xf0 = randn(Float32, D) |
| 105 | + MLPv = Chain(Dense(D => 4D, relu), Dense(4D => D)) |
| 106 | + MLPf = Chain(Dense(D => 4D, relu), Dense(4D => D)) |
| 107 | + MLPout = Chain(Dense(D => 4D, relu), Dense(4D => 1)) |
| 108 | + LSTMv = LSTM(2D => D) |
| 109 | + LSTMf = LSTM(D => D) |
| 110 | + return NeuroSAT(Xv0, Xf0, MLPv, MLPf, MLPout, LSTMv, LSTMf) |
| 111 | +end |
| 112 | + |
| 113 | +function (m::NeuroSAT)(A::AbstractArray, Tmax) |
| 114 | + Xv = repeat(m.Xv0, 1, size(A, 2)) |
| 115 | + # Xf = repeat(m.Xf0, 1, size(A, 1)) |
| 116 | + |
| 117 | + for t = 1:Tmax |
| 118 | + Xv = m.MLPv(Xv) |
| 119 | + Mf = Xv * A' |
| 120 | + Xf = m.MLPf(m.LSTMf(Mf)) |
| 121 | + Mv = Xf * A |
| 122 | + Xv = m.LSTMv(vcat(Mv, flip_literals(Xv))) |
| 123 | + end |
| 124 | + return mean(m.MLPout(Xv)) |
| 125 | +end |
| 126 | + |
| 127 | +# function Base.show(io, m::NeuroSAT) |
| 128 | +# D = size(m.Xv0, 1) |
| 129 | +# print(io, "NeuroSAT($(D))") |
| 130 | +# end |
| 131 | + |
| 132 | +N = 100 |
| 133 | +cnf = randomcnf(; N, k=3, α=1.5, seed=-1) |
| 134 | +M = cnf.M |
| 135 | +D = 32 # 128 nel paper |
| 136 | +Xv = randn(Float32, D, 2*N) |
| 137 | +Xf = randn(Float32, D, M) |
| 138 | + |
| 139 | +srcV, dstF, srcF, dstV = to_edge_index(cnf) |
| 140 | +A = to_adjacency_matrix(cnf) |
| 141 | + |
| 142 | + |
| 143 | +model = NeuroSAT(D) |
| 144 | + |
| 145 | +m_vtof = GNNGraphs._gather(Xv, srcV) |
| 146 | +m_ftov = GNNGraphs._gather(Xf, srcF) |
| 147 | + |
| 148 | + |
0 commit comments