Skip to content

Commit 73ac9db

Browse files
committed
fix cuda tests
1 parent e5965de commit 73ac9db

22 files changed

+101
-207
lines changed

NEWS.md

Lines changed: 0 additions & 8 deletions
This file was deleted.

docs/src/tensornetwork.md

Lines changed: 0 additions & 102 deletions
This file was deleted.

src/GenericTensorNetworks.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,6 @@ project_relative_path(xs...) = normpath(joinpath(dirname(dirname(pathof(@__MODUL
8383
include("Mods.jl/src/Mods.jl")
8484
using .Mods
8585

86-
include("utils.jl")
8786
include("arithematics.jl")
8887
include("networks.jl")
8988
include("graph_polynomials.jl")

src/bounding.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ function solution_ad(code::Union{NestedEinsum,SlicedEinsum}, @nospecialize(xsa),
158158
@debug "generating masked tree..."
159159
mt = generate_masktree(SingleConfig(), code, c, ymask, size_dict)
160160
config = read_config!(code, mt, Dict())
161-
if length(config) !== length(labels(code)) # equal to the # of degree of freedoms
161+
if length(config) !== length(uniquelabels(code)) # equal to the # of degree of freedoms
162162
error("configuration `$(config)` is not fully determined!")
163163
end
164164
n, config

src/configurations.jl

Lines changed: 29 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -11,85 +11,70 @@ function config_type(::Type{T}, n, num_flavors; all::Bool, tree_storage::Bool) w
1111
end
1212

1313
"""
14-
largest_solutions(problem; all=false, usecuda=false, invert=false, tree_storage::Bool=false)
14+
largest_solutions(net::GenericTensorNetwork; all=false, usecuda=false, invert=false, tree_storage::Bool=false, T=Float64)
1515
16-
Find optimal solutions with bounding.
17-
18-
* When `all` is true, the program will use set for enumerate all possible solutions, otherwise, it will return one solution for each size.
19-
* `usecuda` can not be true if you want to use set to enumerate all possible solutions.
20-
* If `invert` is true, find the minimum.
21-
* If `tree_storage` is true, use [`SumProductTree`](@ref) as the storage of solutions.
16+
Find optimal solutions, with bounding. Please check [`solutions`](@ref) for argument descriptions.
2217
"""
23-
function largest_solutions(gp::GenericTensorNetwork; all=false, usecuda=false, invert=false, tree_storage::Bool=false, T=Float64)
18+
function largest_solutions(net::GenericTensorNetwork; all=false, usecuda=false, invert=false, tree_storage::Bool=false, T=Float64)
2419
if all && usecuda
2520
throw(ArgumentError("ConfigEnumerator can not be computed on GPU!"))
2621
end
27-
xst = generate_tensors(_x(Tropical{T}; invert), gp)
28-
ymask = trues(fill(num_flavors(gp), length(getiyv(gp.code)))...)
22+
xst = generate_tensors(_x(Tropical{T}; invert), net)
23+
ymask = trues(fill(num_flavors(net), length(getiyv(net.code)))...)
2924
if usecuda
3025
xst = togpu.(xst)
3126
ymask = togpu(ymask)
3227
end
3328
if all
3429
# we use `Float64` as default because we want to support weighted graphs.
35-
T = config_type(CountingTropical{T,T}, length(labels(gp)), num_flavors(gp); all, tree_storage)
36-
xs = generate_tensors(_x(T; invert), gp)
37-
ret = bounding_contract(AllConfigs{1}(), gp.code, xst, ymask, xs)
30+
T = config_type(CountingTropical{T,T}, length(variables(net)), num_flavors(net); all, tree_storage)
31+
xs = generate_tensors(_x(T; invert), net)
32+
ret = bounding_contract(AllConfigs{1}(), net.code, xst, ymask, xs)
3833
return invert ? asarray(post_invert_exponent.(ret), ret) : ret
3934
else
4035
@assert ndims(ymask) == 0
41-
t, res = solution_ad(gp.code, xst, ymask)
36+
t, res = solution_ad(net.code, xst, ymask)
4237
ret = fill(CountingTropical(asscalar(t).n, ConfigSampler(StaticBitVector(map(l->res[l], 1:length(res))))))
4338
return invert ? asarray(post_invert_exponent.(ret), ret) : ret
4439
end
4540
end
4641

4742
"""
48-
solutions(problem, basetype; all, usecuda=false, invert=false, tree_storage::Bool=false)
43+
solutions(net::GenericTensorNetwork, ::Type{BT}; all::Bool, usecuda::Bool=false, invert::Bool=false, tree_storage::Bool=false) where BT
4944
50-
General routine to find solutions without bounding,
45+
Find all solutions, solutions with largest sizes or solutions with smallest sizes. Bounding is not supported.
5146
52-
* `basetype` can be a type with counting field,
47+
### Arguments
48+
- `net` is a [`GenericTensorNetwork`](@ref) instance.
49+
- `BT` is the data types used for computing, which can be
5350
* `CountingTropical{Float64,Float64}` for finding optimal solutions,
5451
* `Polynomial{Float64, :x}` for enumerating all solutions,
5552
* `Max2Poly{Float64,Float64}` for optimal and suboptimal solutions.
56-
* When `all` is true, the program will use set for enumerate all possible solutions, otherwise, it will return one solution for each size.
57-
* `usecuda` can not be true if you want to use set to enumerate all possible solutions.
58-
* If `tree_storage` is true, use [`SumProductTree`](@ref) as the storage of solutions.
53+
54+
### Keyword arguments
55+
- `all` is an indicator whether to find all solutions or just one of them.
56+
- `usecuda` is an indicator of using CUDA or not, which must be false if `all` is true.
57+
- `invert` is an indicator of whether flip the size or not. If true, instead of finding the maximum, it find the minimum.
58+
- `tree_storage` is an indicator of whether using more compact [`SumProductTree`](@ref) as the storage or not.
5959
"""
60-
function solutions(gp::GenericTensorNetwork, ::Type{BT}; all::Bool, usecuda::Bool=false, invert::Bool=false, tree_storage::Bool=false) where BT
60+
function solutions(net::GenericTensorNetwork, ::Type{BT}; all::Bool, usecuda::Bool=false, invert::Bool=false, tree_storage::Bool=false) where BT
6161
if all && usecuda
6262
throw(ArgumentError("ConfigEnumerator can not be computed on GPU!"))
6363
end
64-
T = config_type(BT, length(labels(gp)), num_flavors(gp); all, tree_storage)
65-
ret = contractx(gp, _x(T; invert); usecuda=usecuda)
64+
T = config_type(BT, length(variables(net)), num_flavors(net); all, tree_storage)
65+
ret = contractx(net, _x(T; invert); usecuda=usecuda)
6666
return invert ? asarray(post_invert_exponent.(ret), ret) : ret
6767
end
6868

69-
"""
70-
largest2_solutions(problem; all=true, usecuda=false, invert=false, tree_storage::Bool=false)
71-
72-
Finding optimal and suboptimal solutions.
73-
"""
74-
largest2_solutions(gp::GenericTensorNetwork; all=true, usecuda=false, invert::Bool=false, T=Float64) = solutions(gp, Max2Poly{T,T}; all, usecuda, invert)
75-
76-
function largestk_solutions(gp::GenericTensorNetwork, k::Int; invert::Bool=false, tree_storage::Bool=false, T=Float64)
77-
xst = generate_tensors(_x(Tropical{T}; invert), gp)
78-
ymask = trues(fill(2, length(getiyv(gp.code)))...)
79-
T = config_type(TruncatedPoly{k,T,T}, length(labels(gp)), num_flavors(gp); all=true, tree_storage)
80-
xs = generate_tensors(_x(T; invert), gp)
81-
ret = bounding_contract(AllConfigs{k}(), gp.code, xst, ymask, xs)
69+
function largestk_solutions(net::GenericTensorNetwork, k::Int; invert::Bool=false, tree_storage::Bool=false, T=Float64)
70+
xst = generate_tensors(_x(Tropical{T}; invert), net)
71+
ymask = trues(fill(2, length(getiyv(net.code)))...)
72+
T = config_type(TruncatedPoly{k,T,T}, length(variables(net)), num_flavors(net); all=true, tree_storage)
73+
xs = generate_tensors(_x(T; invert), net)
74+
ret = bounding_contract(AllConfigs{k}(), net.code, xst, ymask, xs)
8275
return invert ? asarray(post_invert_exponent.(ret), ret) : ret
8376
end
8477

85-
"""
86-
all_solutions(problem)
87-
88-
Finding all solutions grouped by size.
89-
e.g. when the problem is [`MaximalIS`](@ref), it computes all maximal independent sets, or the maximal cliques of it complement.
90-
"""
91-
all_solutions(gp::GenericTensorNetwork; T=Float64) = solutions(gp, Polynomial{T,:x}, all=true, usecuda=false, tree_storage=false)
92-
9378
# NOTE: do we have more efficient way to compute it?
9479
# NOTE: doing pair-wise Hamming distance might be biased?
9580
"""

src/deprecate.jl

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,13 @@
99
@deprecate get_weights(args...; kwargs...) weights(args...; kwargs...)
1010
@deprecate chweights(args...; kwargs...) set_weights(args...; kwargs...)
1111

12-
@deprecate GraphProblem() ConstraintSatisfactionProblem()
12+
@deprecate spinglass_energy(args...; kwargs...) energy(args...; kwargs...)
13+
@deprecate unit_disk_graph(args...; kwargs...) UnitDiskGraph(args...; kwargs...)
14+
@deprecate solve(problem::ConstraintSatisfactionProblem, args...; kwargs...) solve(GenericTensorNetwork(problem), args...; kwargs...)
15+
16+
"""
17+
const GraphProblem = ConstraintSatisfactionProblem
18+
19+
Deprecated. Use `ConstraintSatisfactionProblem` instead.
20+
"""
21+
const GraphProblem = ConstraintSatisfactionProblem

src/interfaces.jl

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -390,37 +390,37 @@ Memory estimation in number of bytes to compute certain `property` of a `problem
390390
`T` is the base type.
391391
"""
392392
function estimate_memory(problem::GenericTensorNetwork, property::AbstractProperty; T=Float64)::Real
393-
_estimate_memory(tensor_element_type(T, length(labels(problem)), num_flavors(problem), property), problem)
393+
_estimate_memory(tensor_element_type(T, length(variables(problem)), num_flavors(problem), property), problem)
394394
end
395395
function estimate_memory(problem::GenericTensorNetwork, property::Union{SingleConfigMax{K,BOUNDED},SingleConfigMin{K,BOUNDED}}; T=Float64) where {K, BOUNDED}
396396
tc, sc, rw = contraction_complexity(problem.code, _size_dict(problem))
397397
# caching all tensors is equivalent to counting the total number of writes
398398
if K === Single && BOUNDED
399399
return ceil(Int, exp2(rw - 1)) * sizeof(Tropical{T})
400400
elseif K === Single && !BOUNDED
401-
n, nf = length(labels(problem)), num_flavors(problem)
401+
n, nf = length(variables(problem)), num_flavors(problem)
402402
return peak_memory(problem.code, _size_dict(problem)) * (sizeof(tensor_element_type(T, n, nf, property)))
403403
else
404404
# NOTE: the integer `K` case does not respect bounding
405-
n, nf = length(labels(problem)), num_flavors(problem)
405+
n, nf = length(variables(problem)), num_flavors(problem)
406406
TT = tensor_element_type(T, n, nf, property)
407407
return peak_memory(problem.code, _size_dict(problem)) * (sizeof(tensor_element_type(T, n, nf, SingleConfigMax{Single,BOUNDED}())) * K + sizeof(TT))
408408
end
409409
end
410410
function estimate_memory(problem::GenericTensorNetwork, ::GraphPolynomial{:polynomial}; T=Float64)
411411
# this is the upper bound
412-
return peak_memory(problem.code, _size_dict(problem)) * (sizeof(T) * length(labels(problem)))
412+
return peak_memory(problem.code, _size_dict(problem)) * (sizeof(T) * length(variables(problem)))
413413
end
414414
function estimate_memory(problem::GenericTensorNetwork, ::GraphPolynomial{:laurent}; T=Float64)
415415
# this is the upper bound
416-
return peak_memory(problem.code, _size_dict(problem)) * (sizeof(T) * length(labels(problem)))
416+
return peak_memory(problem.code, _size_dict(problem)) * (sizeof(T) * length(variables(problem)))
417417
end
418418
function estimate_memory(problem::GenericTensorNetwork, ::Union{SizeMax{K},SizeMin{K}}; T=Float64) where K
419419
return peak_memory(problem.code, _size_dict(problem)) * (sizeof(T) * _asint(K))
420420
end
421421

422422
function _size_dict(problem)
423-
lbs = labels(problem)
423+
lbs = variables(problem)
424424
nf = num_flavors(problem)
425425
return Dict([lb=>nf for lb in lbs])
426426
end

src/networks.jl

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,10 @@ function GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertic
3232
code = _optimize_code(rcode, uniformsize_fix(rcode, num_flavors(problem), fixedvertices), optimizer, MergeVectors())
3333
return GenericTensorNetwork(problem, code, Dict{labeltype(code),Int}(fixedvertices))
3434
end
35+
# a unified interface to optimize the contraction code
36+
_optimize_code(code, size_dict, optimizer::Nothing, simplifier) = code
37+
_optimize_code(code, size_dict, optimizer, simplifier) = optimize_code(code, size_dict, optimizer, simplifier)
38+
3539
function Base.show(io::IO, tn::GenericTensorNetwork)
3640
println(io, "$(typeof(tn))")
3741
println(io, "- open vertices: $(getiyv(tn.code))")
@@ -184,6 +188,9 @@ function _pow(x::LaurentPolynomial{BS,X}, i) where {BS,X}
184188
end
185189

186190
####### Extra utilities #######
191+
"""Upload a tensor network to GPU, need `using CUDA` to activate this extension."""
192+
function togpu end
193+
187194
"""
188195
mis_compactify!(tropicaltensor; potential=nothing)
189196

src/utils.jl

Lines changed: 0 additions & 19 deletions
This file was deleted.

test/configurations.jl

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
using GenericTensorNetworks, Test, Graphs
22
using OMEinsum
33
using TropicalNumbers: CountingTropicalF64
4-
using GenericTensorNetworks: _onehotv, _x, sampler_type, set_type, largest_solutions, largest2_solutions, solutions, all_solutions, largestk_solutions, AllConfigs, SingleConfig, max_size, max_size_count
4+
using GenericTensorNetworks: _onehotv, _x, sampler_type, set_type, largest_solutions, solutions, largestk_solutions, AllConfigs, SingleConfig, max_size, max_size_count
55
using GenericTensorNetworks: graph_polynomial
66

77
@testset "Config types" begin
@@ -48,14 +48,11 @@ end
4848
res5 = largest_solutions(code; all=false)[]
4949
@test res5.n == res0
5050
@test res5.c.data res2.c.data
51-
res6 = largest2_solutions(code; all=true)[]
52-
res6_ = largestk_solutions(code, 2)[]
53-
res7 = all_solutions(code)[]
51+
res6 = largestk_solutions(code, 2)[]
52+
res7 = GenericTensorNetworks.solutions(code, Polynomial{Float64,:x}, all=true, usecuda=false, tree_storage=false)[]
5453
idp = graph_polynomial(code, Val(:finitefield))[]
5554
@test all(x->x res7.coeffs[end-1].data, res6.coeffs[1].data)
5655
@test all(x->x res7.coeffs[end].data, res6.coeffs[2].data)
57-
@test all(x->x res7.coeffs[end-1].data, res6_.coeffs[1].data)
58-
@test all(x->x res7.coeffs[end].data, res6_.coeffs[2].data)
5956
for (i, (s, c)) in enumerate(zip(res7.coeffs, idp.coeffs))
6057
@test length(s) == c
6158
@test all(x->count_ones(x)==(i-1), s.data)

0 commit comments

Comments
 (0)