|
1 | 1 | using Graphs: vertices |
2 | | -using ITensors: ITensor, contract, inds, dim |
3 | | -using ITensors.ContractionSequenceOptimization: deepmap |
| 2 | +using ITensors: ITensor |
4 | 3 | using ITensors.NDTensors: Algorithm, @Algorithm_str |
5 | 4 | using NamedGraphs.Keys: Key |
6 | 5 | using NamedGraphs.OrdinalIndexing: th |
7 | | -using TensorOperations: optimaltree |
8 | 6 |
|
9 | 7 | function contraction_sequence(tn::Vector{ITensor}; alg="optimal", kwargs...) |
10 | 8 | return contraction_sequence(Algorithm(alg), tn; kwargs...) |
11 | 9 | end |
12 | 10 |
|
| 11 | +function deepmap(f, tree; filter=(x -> x isa AbstractArray)) |
| 12 | + return filter(tree) ? map(t -> deepmap(f, t; filter=filter), tree) : f(tree) |
| 13 | +end |
| 14 | + |
13 | 15 | function contraction_sequence(tn::AbstractITensorNetwork; kwargs...) |
14 | 16 | # TODO: Use `token_vertex` and/or `token_vertices` here. |
15 | 17 | ts = map(v -> tn[v], (1:nv(tn))th) |
16 | 18 | seq_linear_index = contraction_sequence(ts; kwargs...) |
17 | 19 | # TODO: Use `Functors.fmap` or `StructWalk`? |
18 | 20 | return deepmap(n -> Key(vertices(tn)[n * th]), seq_linear_index) |
19 | 21 | end |
20 | | - |
21 | | -function contraction_sequence(::Algorithm"optimal", tn::Vector{ITensor}) |
22 | | - return optimal_contraction_sequence(tn) |
23 | | -end |
24 | | - |
25 | | -function ITensors.optimal_contraction_sequence(tensors::Vector{<:ITensor}) |
26 | | - network = collect.(inds.(tensors)) |
27 | | - inds_to_dims = Dict(i => dim(i) for i in unique(reduce(vcat, network))) |
28 | | - seq, _ = optimaltree(network, inds_to_dims) |
29 | | - return seq |
30 | | -end |
0 commit comments