From c8459471e67c4163539808b8d432032bdde93838 Mon Sep 17 00:00:00 2001 From: Joey Date: Fri, 13 Sep 2024 10:19:49 -0400 Subject: [PATCH 01/13] Blah --- examples/test.jl | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 examples/test.jl diff --git a/examples/test.jl b/examples/test.jl new file mode 100644 index 00000000..6bf1bfcf --- /dev/null +++ b/examples/test.jl @@ -0,0 +1,21 @@ +using ITensorNetworks: IndsNetwork, siteinds, ttn +using ITensorNetworks.ModelHamiltonians: ising +using ITensors: Index, OpSum, terms, sites +using NamedGraphs.NamedGraphGenerators: named_grid +using NamedGraphs.GraphsExtensions: rem_vertex + +function filter_terms(H, verts) + H_new = OpSum() + for term in terms(H) + if isempty(filter(v -> v ∈ verts, sites(term))) + H_new += term + end + end + return H_new +end + +g = named_grid((8,1)) +s = siteinds("S=1/2", g) +H = ising(s) +H_mod = filter_terms(H, [(4,1)]) +ttno = ttn(H_mod, s) \ No newline at end of file From 6ff0cd572c947e9b1ed3642e690b43233277beb0 Mon Sep 17 00:00:00 2001 From: Joey Date: Thu, 17 Oct 2024 14:56:22 +0100 Subject: [PATCH 02/13] Bug fix in current ortho. Change test --- .../alternating_update/region_update.jl | 45 ++++++++----------- .../test_solvers/test_dmrg.jl | 12 ++--- 2 files changed, 25 insertions(+), 32 deletions(-) diff --git a/src/solvers/alternating_update/region_update.jl b/src/solvers/alternating_update/region_update.jl index b92adc8c..97241c20 100644 --- a/src/solvers/alternating_update/region_update.jl +++ b/src/solvers/alternating_update/region_update.jl @@ -7,36 +7,27 @@ function current_ortho(sweep_plan, which_region_update) if !isa(region, AbstractEdge) && length(region) == 1 return only(current_verts) end - if which_region_update == length(regions) - # look back by one should be sufficient, but may be brittle? - overlapping_vertex = only( - intersect(current_verts, support(regions[which_region_update - 1])) - ) - return overlapping_vertex - else - # look forward - other_regions = filter( - x -> !(issetequal(x, current_verts)), support.(regions[(which_region_update + 1):end]) + # look forward + other_regions = filter( + x -> !(issetequal(x, current_verts)), support.(regions[(which_region_update + 1):end]) + ) + # find the first region that has overlapping support with current region + ind = findfirst(x -> !isempty(intersect(support(x), support(region))), other_regions) + if isnothing(ind) + # look backward + other_regions = reverse( + filter( + x -> !(issetequal(x, current_verts)), support.(regions[1:(which_region_update - 1)]) + ), ) - # find the first region that has overlapping support with current region ind = findfirst(x -> !isempty(intersect(support(x), support(region))), other_regions) - if isnothing(ind) - # look backward - other_regions = reverse( - filter( - x -> !(issetequal(x, current_verts)), - support.(regions[1:(which_region_update - 1)]), - ), - ) - ind = findfirst(x -> !isempty(intersect(support(x), support(region))), other_regions) - end - @assert !isnothing(ind) - future_verts = union(support(other_regions[ind])) - # return ortho_ceter as the vertex in current region that does not overlap with following one - overlapping_vertex = intersect(current_verts, future_verts) - nonoverlapping_vertex = only(setdiff(current_verts, overlapping_vertex)) - return nonoverlapping_vertex end + @assert !isnothing(ind) + future_verts = union(support(other_regions[ind])) + # return ortho_ceter as the vertex in current region that does not overlap with following one + overlapping_vertex = intersect(current_verts, future_verts) + nonoverlapping_vertex = only(setdiff(current_verts, overlapping_vertex)) + return nonoverlapping_vertex end function region_update( diff --git a/test/test_treetensornetworks/test_solvers/test_dmrg.jl b/test/test_treetensornetworks/test_solvers/test_dmrg.jl index cf8a1caf..004ec561 100644 --- a/test/test_treetensornetworks/test_solvers/test_dmrg.jl +++ b/test/test_treetensornetworks/test_solvers/test_dmrg.jl @@ -1,7 +1,7 @@ @eval module $(gensym()) using DataGraphs: edge_data, vertex_data using Dictionaries: Dictionary -using Graphs: nv, vertices +using Graphs: nv, vertices, uniform_tree using ITensorMPS: ITensorMPS using ITensorNetworks: ITensorNetworks, @@ -19,6 +19,7 @@ using ITensorNetworks.ITensorsExtensions: replace_vertices using ITensorNetworks.ModelHamiltonians: ModelHamiltonians using ITensors: ITensors using KrylovKit: eigsolve +using NamedGraphs: NamedGraph, rename_vertices using NamedGraphs.NamedGraphGenerators: named_comb_tree using Observers: observer using StableRNGs: StableRNG @@ -313,11 +314,12 @@ end nsites = 2 nsweeps = 10 - c = named_comb_tree((3, 2)) - s = siteinds("S=1/2", c) - os = ModelHamiltonians.heisenberg(c) - H = ttn(os, s) rng = StableRNG(1234) + g = NamedGraph(uniform_tree(10)) + g = rename_vertices(v -> (v, 1), g) + s = siteinds("S=1/2", g) + os = ModelHamiltonians.heisenberg(g) + H = ttn(os, s) psi = random_ttn(rng, s; link_space=5) e, psi = dmrg(H, psi; nsweeps, maxdim, nsites) From d0967229e2c9c0d645ad110bb3944566b52d3385 Mon Sep 17 00:00:00 2001 From: Joey Date: Tue, 26 Nov 2024 13:50:41 -0500 Subject: [PATCH 03/13] Fix bug --- src/abstractitensornetwork.jl | 40 ++++++++++++------- .../abstracttreetensornetwork.jl | 12 ++++-- 2 files changed, 34 insertions(+), 18 deletions(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index fc0edce4..afdbbb41 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -19,6 +19,7 @@ using Graphs: using ITensors: ITensors, ITensor, + @Algorithm_str, addtags, combiner, commoninds, @@ -44,7 +45,7 @@ using MacroTools: @capture using NamedGraphs: NamedGraphs, NamedGraph, not_implemented, steiner_tree using NamedGraphs.GraphsExtensions: ⊔, directed_graph, incident_edges, rename_vertices, vertextype -using NDTensors: NDTensors, dim +using NDTensors: NDTensors, dim, Algorithm using SplitApplyCombine: flatten abstract type AbstractITensorNetwork{V} <: AbstractDataGraph{V,ITensor,ITensor} end @@ -585,17 +586,22 @@ function LinearAlgebra.factorize(tn::AbstractITensorNetwork, edge::Pair; kwargs. end # For ambiguity error; TODO: decide whether to use graph mutating methods when resulting graph is unchanged? -function orthogonalize_walk(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) - return orthogonalize_walk(tn, [edge]; kwargs...) +function gauge_walk( + alg::Algorithm, tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs... +) + return gauge_walk(tn, [edge]; kwargs...) end -function orthogonalize_walk(tn::AbstractITensorNetwork, edge::Pair; kwargs...) - return orthogonalize_walk(tn, edgetype(tn)(edge); kwargs...) +function gauge_walk(alg::Algorithm, tn::AbstractITensorNetwork, edge::Pair; kwargs...) + return gauge_walk(alg::Algorithm, tn, edgetype(tn)(edge); kwargs...) end # For ambiguity error; TODO: decide whether to use graph mutating methods when resulting graph is unchanged? -function orthogonalize_walk( - tn::AbstractITensorNetwork, edges::Vector{<:AbstractEdge}; kwargs... +function gauge_walk( + alg::Algorithm"orthogonalize", + tn::AbstractITensorNetwork, + edges::Vector{<:AbstractEdge}; + kwargs..., ) # tn = factorize(tn, edge; kwargs...) # # TODO: Implement as `only(common_neighbors(tn, src(edge), dst(edge)))` @@ -612,22 +618,28 @@ function orthogonalize_walk( return tn end -function orthogonalize_walk(tn::AbstractITensorNetwork, edges::Vector{<:Pair}; kwargs...) - return orthogonalize_walk(tn, edgetype(tn).(edges); kwargs...) +function gauge_walk( + alg::Algorithm, tn::AbstractITensorNetwork, edges::Vector{<:Pair}; kwargs... +) + return gauge_walk(alg, tn, edgetype(tn).(edges); kwargs...) end -# Orthogonalize an ITensorNetwork towards a region, treating +# Gauge a ITensorNetwork towards a region, treating # the network as a tree spanned by a spanning tree. -function tree_orthogonalize(ψ::AbstractITensorNetwork, region::Vector) +function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, region::Vector) region_center = length(region) != 1 ? first(center(steiner_tree(ψ, region))) : only(region) path = post_order_dfs_edges(bfs_tree(ψ, region_center), region_center) path = filter(e -> !((src(e) ∈ region) && (dst(e) ∈ region)), path) - return orthogonalize_walk(ψ, path) + return gauge_walk(alg, ψ, path) +end + +function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, region) + return tree_gauge(alg, ψ, [region]) end -function tree_orthogonalize(ψ::AbstractITensorNetwork, region) - return tree_orthogonalize(ψ, [region]) +function tree_orthogonalize(ψ::AbstractITensorNetwork, region; kwargs...) + return tree_gauge(Algorithm("orthogonalize"), ψ, region; kwargs...) end # TODO: decide whether to use graph mutating methods when resulting graph is unchanged? diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index 8815b33f..f6c8f49f 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -8,7 +8,7 @@ using NamedGraphs.GraphsExtensions: a_star using NamedGraphs: namedgraph_a_star, steiner_tree using IsApprox: IsApprox, Approx -using ITensors: ITensors, @Algorithm_str, directsum, hasinds, permute, plev +using ITensors: ITensors, Algorithm, @Algorithm_str, directsum, hasinds, permute, plev using ITensorMPS: ITensorMPS, linkind, loginner, lognorm, orthogonalize using TupleTools: TupleTools @@ -35,19 +35,23 @@ function set_ortho_region(tn::AbstractTTN, new_region) return error("Not implemented") end -function ITensorMPS.orthogonalize(ttn::AbstractTTN, region::Vector; kwargs...) +function gauge(alg::Algorithm, ttn::AbstractTTN, region::Vector; kwargs...) issetequal(region, ortho_region(ttn)) && return ttn st = steiner_tree(ttn, union(region, ortho_region(ttn))) path = post_order_dfs_edges(st, first(region)) path = filter(e -> !((src(e) ∈ region) && (dst(e) ∈ region)), path) if !isempty(path) - ttn = typeof(ttn)(orthogonalize_walk(ITensorNetwork(ttn), path; kwargs...)) + ttn = typeof(ttn)(gauge_walk(alg, ITensorNetwork(ttn), path; kwargs...)) end return set_ortho_region(ttn, region) end +function gauge(alg::Algorithm, ttn::AbstractTTN, region; kwargs...) + return gauge(alg, ttn, [region]; kwargs...) +end + function ITensorMPS.orthogonalize(ttn::AbstractTTN, region; kwargs...) - return orthogonalize(ttn, [region]; kwargs...) + return gauge(Algorithm("orthogonalize"), ttn, region; kwargs...) end function tree_orthogonalize(ttn::AbstractTTN, args...; kwargs...) From 9d6c1bcb7aa8b36163df43e95b063dd9cd03761f Mon Sep 17 00:00:00 2001 From: Joey Date: Wed, 19 Mar 2025 12:07:10 -0700 Subject: [PATCH 04/13] File removed --- examples/test.jl | 21 --------------------- 1 file changed, 21 deletions(-) delete mode 100644 examples/test.jl diff --git a/examples/test.jl b/examples/test.jl deleted file mode 100644 index 6bf1bfcf..00000000 --- a/examples/test.jl +++ /dev/null @@ -1,21 +0,0 @@ -using ITensorNetworks: IndsNetwork, siteinds, ttn -using ITensorNetworks.ModelHamiltonians: ising -using ITensors: Index, OpSum, terms, sites -using NamedGraphs.NamedGraphGenerators: named_grid -using NamedGraphs.GraphsExtensions: rem_vertex - -function filter_terms(H, verts) - H_new = OpSum() - for term in terms(H) - if isempty(filter(v -> v ∈ verts, sites(term))) - H_new += term - end - end - return H_new -end - -g = named_grid((8,1)) -s = siteinds("S=1/2", g) -H = ising(s) -H_mod = filter_terms(H, [(4,1)]) -ttno = ttn(H_mod, s) \ No newline at end of file From 4aa0ca22ae9070ff7c2dae2616d6b4d8c5ad3b32 Mon Sep 17 00:00:00 2001 From: Joey Date: Sun, 23 Mar 2025 18:03:26 -0700 Subject: [PATCH 05/13] Preliminary fitting based on forms --- examples/test_fitting.jl | 113 ++++++++++++++++++ src/abstractitensornetwork.jl | 34 ++++-- .../abstracttreetensornetwork.jl | 8 +- 3 files changed, 141 insertions(+), 14 deletions(-) create mode 100644 examples/test_fitting.jl diff --git a/examples/test_fitting.jl b/examples/test_fitting.jl new file mode 100644 index 00000000..5f13b232 --- /dev/null +++ b/examples/test_fitting.jl @@ -0,0 +1,113 @@ +using ITensorNetworks: ITensorNetworks, ITensorNetwork, random_tensornetwork, siteinds, QuadraticFormNetwork, prime, + BilinearFormNetwork, environment, subgraph, BeliefPropagationCache, partitioned_graph, + ket_network, tree_gauge, tree_orthogonalize, gauge_path, gauge_walk, update_factors, partitioned_tensornetwork, + update, update_factor, region_scalar +using NamedGraphs.NamedGraphGenerators: named_grid +using NamedGraphs: NamedEdge +using TensorOperations: TensorOperations +using ITensors: ITensors, ITensor, contract, dag +using Graphs: is_tree +using NamedGraphs.PartitionedGraphs: partitioned_graph, partitionedges, partitionvertex, partitionvertices +using NamedGraphs.GraphsExtensions: bfs_tree, leaf_vertices, post_order_dfs_edges, src, dst, vertices +using NDTensors: Algorithm +using Dictionaries +using LinearAlgebra: norm_sqr + +function update_sequence(tn::ITensorNetwork; nsites::Int64=1) + @assert is_tree(tn) + if nsites == 1 || nsites == 2 + es = post_order_dfs_edges(tn, first(leaf_vertices(tn))) + vs = [[src(e), dst(e)] for e in es] + nsites == 2 && return vs + nsites == 1 && return [[v] for v in unique(reduce(vcat, vs))] + else + error("Nsites > 2 sequences not currently supported") + end +end + +function default_fit_updater(xAy_bpc::BeliefPropagationCache, y::ITensorNetwork, prev_region::Vector, region::Vector) + path = gauge_path(y, prev_region, region) + y = gauge_walk(Algorithm("orthogonalize"), y, path) + verts = unique(vcat(src.(path), dst.(path))) + factors = [dag(y[v]) for v in verts] + xAy_bpc = update_factors(xAy_bpc, Dictionary([(v, "ket") for v in verts], factors)) + pe_path = partitionedges(partitioned_tensornetwork(xAy_bpc), [NamedEdge((src(e), "ket") => (dst(e), "ket")) for e in path]) + xAy_bpc = update(Algorithm("bp"), xAy_bpc, pe_path; message_update_function_kwargs = (; normalize = false)) + return xAy_bpc, y +end + +function default_fit_extracter(xAy_bpc::BeliefPropagationCache, region::Vector) + return environment(xAy_bpc, [(v, "ket") for v in region]) +end + +function default_fit_inserter(∂xAy_bpc_∂r::Vector{ITensor}, xAy_bpc::BeliefPropagationCache, y::ITensorNetwork, region::Vector; + normalize = true) + if length(region) == 1 + v = only(region) + yv = contract(∂xAy_bpc_∂r; sequence = "automatic") + if normalize + yv /= sqrt(norm_sqr(yv)) + end + y[v] = yv + xAy_bpc = update_factor(xAy_bpc, (v, "ket"), dag(yv)) + else + error("Updates with regions bigger than 1 not supported") + end + return y, xAy_bpc +end + +function default_costfunction(xAy::BeliefPropagationCache, region) + if length(region) == 1 + pv = only(partitionvertices(xAy, [(only(region), "ket")])) + c = region_scalar(xAy, pv) + return sqrt(c * conj(c)) + else + error("Cost Functions with regions bigger than 1 not supported") + end +end + + +function fit_ket(xAy::BilinearFormNetwork, partition_verts::Vector; + updater = default_fit_updater, + extracter = default_fit_extracter, + inserter = default_fit_inserter, + costfunction = default_costfunction, + normalize = true, + niters::Int64 = 10, nsites::Int64=1) + xAy_bpc = BeliefPropagationCache(xAy, partition_verts) + y = dag(ket_network(xAy)) + @assert is_tree(partitioned_graph(xAy_bpc)) + seq = update_sequence(y; nsites) + + for i in 1:niters + prev_region = collect(vertices(y)) + for region in seq + xAy_bpc, y = updater(xAy_bpc, y, prev_region, region) + ∂xAy_bpc_∂r = extracter(xAy_bpc, region) + y, xAy_bpc = inserter(∂xAy_bpc_∂r, xAy_bpc, y, region; normalize) + c = default_costfunction(xAy_bpc, region) + @show c + prev_region = region + end + end + +end + +ITensors.disable_warn_order() + +g = named_grid((3,4)) +s = siteinds("S=1/2", g) + +a = random_tensornetwork(g; link_space = 2) + +a1 = subgraph(a, [(1,1), (1,2), (1,3), (1,4)]) +a2 = subgraph(a, [(2,1), (2,2), (2,3), (2,4)]) +a3 = subgraph(a, [(3,1), (3,2), (3,3), (3,4)]) + +c = BilinearFormNetwork(a2, a1, a3; dual_site_index_map = dag, dual_link_index_map = dag) + +partition_verts = [[((1,1), "bra"), ((3,1), "ket"), ((2,1), "operator")], + [((1,2), "bra"), ((3,2), "ket"), ((2,2), "operator")], [((1,3), "bra"), ((3,3), "ket"), ((2,3), "operator")], [((1,4), "bra"), ((3,4), "ket"), ((2,4), "operator")]] + +fit_ket(c, partition_verts) + diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 73c915a1..754a481b 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -623,18 +623,38 @@ function gauge_walk( return gauge_walk(alg, tn, edgetype(tn).(edges); kwargs...) end +function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, region) + return tree_gauge(alg, ψ, [region]) +end + +#Get the path that moves the gauge from a to b (minimum path between a and b) +#TODO: Moved to NamedGraphs +function gauge_path(g::AbstractGraph, region_a::Vector, region_b::Vector) + st = steiner_tree(g, union(region_a, region_b)) + path = post_order_dfs_edges(st, first(region_b)) + path = filter(e -> !((src(e) ∈ region_b) && (dst(e) ∈ region_b)), path) + return path +end + +# Gauge a ITensorNetwork from cur_region towards new_region, treating +# the network as a tree spanned by a spanning tree. +function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, cur_region::Vector, new_region::Vector; kwargs...) + issetequal(new_region, cur_region) && return ψ + path = gauge_path(cur_region, new_region) + if !isempty(path) + ψ = typeof(ψ)(gauge_walk(alg, ψ, path; kwargs...)) + end + return ψ +end + # Gauge a ITensorNetwork towards a region, treating # the network as a tree spanned by a spanning tree. function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, region::Vector) - region_center = - length(region) != 1 ? first(center(steiner_tree(ψ, region))) : only(region) - path = post_order_dfs_edges(bfs_tree(ψ, region_center), region_center) - path = filter(e -> !((src(e) ∈ region) && (dst(e) ∈ region)), path) - return gauge_walk(alg, ψ, path) + return tree_gauge(alg, ψ, collect(vertices(ψ)), region) end -function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, region) - return tree_gauge(alg, ψ, [region]) +function tree_orthogonalize(ψ::AbstractITensorNetwork, cur_region, new_region; kwargs...) + return tree_gauge(Algorithm("orthogonalize"), ψ, cur_region, new_region; kwargs...) end function tree_orthogonalize(ψ::AbstractITensorNetwork, region; kwargs...) diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index f6c8f49f..c4d0b7d1 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -36,13 +36,7 @@ function set_ortho_region(tn::AbstractTTN, new_region) end function gauge(alg::Algorithm, ttn::AbstractTTN, region::Vector; kwargs...) - issetequal(region, ortho_region(ttn)) && return ttn - st = steiner_tree(ttn, union(region, ortho_region(ttn))) - path = post_order_dfs_edges(st, first(region)) - path = filter(e -> !((src(e) ∈ region) && (dst(e) ∈ region)), path) - if !isempty(path) - ttn = typeof(ttn)(gauge_walk(alg, ITensorNetwork(ttn), path; kwargs...)) - end + ttn = gauge(alg, ttn, ortho_region(ttn), region; kwargs...) return set_ortho_region(ttn, region) end From da2480dfe12a2b16cdf784cd07a445716936b467 Mon Sep 17 00:00:00 2001 From: Joey Date: Mon, 24 Mar 2025 16:51:18 -0400 Subject: [PATCH 06/13] Improved fitting routines --- src/ITensorNetworks.jl | 1 + src/abstractitensornetwork.jl | 4 +- .../solvers/fit_tensornetworks.jl | 83 +++++++++++-------- test/test_tensornetwork_fitting.jl | 39 +++++++++ 4 files changed, 89 insertions(+), 38 deletions(-) rename examples/test_fitting.jl => src/solvers/fit_tensornetworks.jl (51%) create mode 100644 test/test_tensornetwork_fitting.jl diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 57d13e88..44594dc0 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -58,6 +58,7 @@ include("solvers/tdvp.jl") include("solvers/dmrg.jl") include("solvers/dmrg_x.jl") include("solvers/contract.jl") +include("solvers/fit_tensornetworks.jl") include("solvers/linsolve.jl") include("solvers/sweep_plans/sweep_plans.jl") include("apply.jl") diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 754a481b..eb790e26 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -42,7 +42,7 @@ using ITensorMPS: ITensorMPS, add, linkdim, linkinds, siteinds using .ITensorsExtensions: ITensorsExtensions, indtype, promote_indtype using LinearAlgebra: LinearAlgebra, factorize using MacroTools: @capture -using NamedGraphs: NamedGraphs, NamedGraph, not_implemented, steiner_tree +using NamedGraphs: NamedGraphs, NamedGraph, vertextype, not_implemented, steiner_tree using NamedGraphs.GraphsExtensions: ⊔, directed_graph, incident_edges, rename_vertices, vertextype using NDTensors: NDTensors, dim, Algorithm @@ -630,6 +630,7 @@ end #Get the path that moves the gauge from a to b (minimum path between a and b) #TODO: Moved to NamedGraphs function gauge_path(g::AbstractGraph, region_a::Vector, region_b::Vector) + issetequal(region_a, region_b) && return edgetype(g)[] st = steiner_tree(g, union(region_a, region_b)) path = post_order_dfs_edges(st, first(region_b)) path = filter(e -> !((src(e) ∈ region_b) && (dst(e) ∈ region_b)), path) @@ -639,7 +640,6 @@ end # Gauge a ITensorNetwork from cur_region towards new_region, treating # the network as a tree spanned by a spanning tree. function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, cur_region::Vector, new_region::Vector; kwargs...) - issetequal(new_region, cur_region) && return ψ path = gauge_path(cur_region, new_region) if !isempty(path) ψ = typeof(ψ)(gauge_walk(alg, ψ, path; kwargs...)) diff --git a/examples/test_fitting.jl b/src/solvers/fit_tensornetworks.jl similarity index 51% rename from examples/test_fitting.jl rename to src/solvers/fit_tensornetworks.jl index 5f13b232..89f35f7b 100644 --- a/examples/test_fitting.jl +++ b/src/solvers/fit_tensornetworks.jl @@ -1,4 +1,4 @@ -using ITensorNetworks: ITensorNetworks, ITensorNetwork, random_tensornetwork, siteinds, QuadraticFormNetwork, prime, +using ITensorNetworks: AbstractITensorNetwork, AbstractBeliefPropagationCache, ITensorNetworks, ITensorNetwork, random_tensornetwork, siteinds, QuadraticFormNetwork, prime, BilinearFormNetwork, environment, subgraph, BeliefPropagationCache, partitioned_graph, ket_network, tree_gauge, tree_orthogonalize, gauge_path, gauge_walk, update_factors, partitioned_tensornetwork, update, update_factor, region_scalar @@ -13,21 +13,24 @@ using NDTensors: Algorithm using Dictionaries using LinearAlgebra: norm_sqr -function update_sequence(tn::ITensorNetwork; nsites::Int64=1) +default_fit_algorithm() = "orthogonalize" +default_fit_kwargs() = (; niters = 20, nsites = 1, tolerance = 1e-10, normalize = true) + +function default_update_sequence(tn::AbstractITensorNetwork; nsites::Int64=1) @assert is_tree(tn) if nsites == 1 || nsites == 2 es = post_order_dfs_edges(tn, first(leaf_vertices(tn))) vs = [[src(e), dst(e)] for e in es] - nsites == 2 && return vs - nsites == 1 && return [[v] for v in unique(reduce(vcat, vs))] + regions = nsites == 2 ? vs : [[v] for v in unique(reduce(vcat, vs))] + return vcat(regions, reverse(reverse.(regions))) else error("Nsites > 2 sequences not currently supported") end end -function default_fit_updater(xAy_bpc::BeliefPropagationCache, y::ITensorNetwork, prev_region::Vector, region::Vector) +function default_fit_updater(alg::Algorithm"orthogonalize", xAy_bpc::AbstractBeliefPropagationCache, y::AbstractITensorNetwork, prev_region::Vector, region::Vector) path = gauge_path(y, prev_region, region) - y = gauge_walk(Algorithm("orthogonalize"), y, path) + y = gauge_walk(alg, y, path) verts = unique(vcat(src.(path), dst.(path))) factors = [dag(y[v]) for v in verts] xAy_bpc = update_factors(xAy_bpc, Dictionary([(v, "ket") for v in verts], factors)) @@ -36,12 +39,11 @@ function default_fit_updater(xAy_bpc::BeliefPropagationCache, y::ITensorNetwork, return xAy_bpc, y end -function default_fit_extracter(xAy_bpc::BeliefPropagationCache, region::Vector) +function default_fit_extracter(xAy_bpc::AbstractBeliefPropagationCache, region::Vector) return environment(xAy_bpc, [(v, "ket") for v in region]) end -function default_fit_inserter(∂xAy_bpc_∂r::Vector{ITensor}, xAy_bpc::BeliefPropagationCache, y::ITensorNetwork, region::Vector; - normalize = true) +function default_fit_inserter(∂xAy_bpc_∂r::Vector{ITensor}, xAy_bpc::AbstractBeliefPropagationCache, y::AbstractITensorNetwork, region::Vector; normalize = true) if length(region) == 1 v = only(region) yv = contract(∂xAy_bpc_∂r; sequence = "automatic") @@ -56,7 +58,7 @@ function default_fit_inserter(∂xAy_bpc_∂r::Vector{ITensor}, xAy_bpc::BeliefP return y, xAy_bpc end -function default_costfunction(xAy::BeliefPropagationCache, region) +function default_costfunction(xAy::AbstractBeliefPropagationCache, region) if length(region) == 1 pv = only(partitionvertices(xAy, [(only(region), "ket")])) c = region_scalar(xAy, pv) @@ -66,48 +68,57 @@ function default_costfunction(xAy::BeliefPropagationCache, region) end end - -function fit_ket(xAy::BilinearFormNetwork, partition_verts::Vector; +#Optimize over y to maximize * / based on a designated partitioning of the bilinearform +function maximize_bilinearform( + alg::Algorithm"orthogonalize", + xAy::BilinearFormNetwork, + y::ITensorNetwork = dag(ket_network(xAy)), + partition_verts = group(v -> first(v), vertices(xAy)); updater = default_fit_updater, extracter = default_fit_extracter, inserter = default_fit_inserter, costfunction = default_costfunction, - normalize = true, - niters::Int64 = 10, nsites::Int64=1) + sequence = default_update_sequence, + normalize::Bool, + niters::Int64, + nsites::Int64, + tolerance::Float64) + xAy_bpc = BeliefPropagationCache(xAy, partition_verts) - y = dag(ket_network(xAy)) @assert is_tree(partitioned_graph(xAy_bpc)) - seq = update_sequence(y; nsites) + seq = sequence(y; nsites) + prev_region = collect(vertices(y)) + cs = zeros(ComplexF64, (niters, length(seq))) for i in 1:niters - prev_region = collect(vertices(y)) - for region in seq - xAy_bpc, y = updater(xAy_bpc, y, prev_region, region) + for (j, region) in enumerate(seq) + xAy_bpc, y = updater(alg, xAy_bpc, y, prev_region, region) ∂xAy_bpc_∂r = extracter(xAy_bpc, region) y, xAy_bpc = inserter(∂xAy_bpc_∂r, xAy_bpc, y, region; normalize) - c = default_costfunction(xAy_bpc, region) - @show c + cs[i, j] = costfunction(xAy_bpc, region) prev_region = region end + if i >= 2 && abs(sum(cs[i, :]) - sum(cs[i-1, :])) / length(seq) <= tolerance + return y + end end + return y end -ITensors.disable_warn_order() - -g = named_grid((3,4)) -s = siteinds("S=1/2", g) - -a = random_tensornetwork(g; link_space = 2) - -a1 = subgraph(a, [(1,1), (1,2), (1,3), (1,4)]) -a2 = subgraph(a, [(2,1), (2,2), (2,3), (2,4)]) -a3 = subgraph(a, [(3,1), (3,2), (3,3), (3,4)]) - -c = BilinearFormNetwork(a2, a1, a3; dual_site_index_map = dag, dual_link_index_map = dag) +function Base.truncate(x::AbstractITensorNetwork; maxdim::Int64, kwargs...) + y = random_tensornetwork(scalartype(x), siteinds(x); link_space = maxdim) + xIy = BilinearFormNetwork(x, y) + return dag(maximize_bilinearform(xIy, y; kwargs...)) +end -partition_verts = [[((1,1), "bra"), ((3,1), "ket"), ((2,1), "operator")], - [((1,2), "bra"), ((3,2), "ket"), ((2,2), "operator")], [((1,3), "bra"), ((3,3), "ket"), ((2,3), "operator")], [((1,4), "bra"), ((3,4), "ket"), ((2,4), "operator")]] +function ITensors.apply(A::AbstractITensorNetwork, x::AbstractITensorNetwork; maxdim::Int64, kwargs...) + y = random_tensornetwork(scalartype(x), siteinds(x); link_space = maxdim) + xAy = BilinearFormNetwork(A, x, y) + return dag(maximize_bilinearform(xAy, y; kwargs...)) +end -fit_ket(c, partition_verts) +function maximize_bilinearform(xAy::BilinearFormNetwork, args...; fit_alg = default_fit_algorithm(), fit_kwargs = default_fit_kwargs()) + return maximize_bilinearform(Algorithm(fit_alg), xAy, args...; fit_kwargs...) +end diff --git a/test/test_tensornetwork_fitting.jl b/test/test_tensornetwork_fitting.jl new file mode 100644 index 00000000..f1433cae --- /dev/null +++ b/test/test_tensornetwork_fitting.jl @@ -0,0 +1,39 @@ +@eval module $(gensym()) +using ITensorNetworks: random_tensornetwork, siteinds, BilinearFormNetwork, subgraph, inner, truncate, maximize_bilinearform, union_all_inds +using Graphs: vertices +using NamedGraphs.NamedGraphGenerators: named_grid, named_comb_tree +using SplitApplyCombine: group +using StableRNGs: StableRNG +using TensorOperations: TensorOperations +using Test: @test, @test_broken, @testset +using ITensors: apply, dag, prime + + +@testset "TNS Fitting" for elt in ( + Float32, Float64, Complex{Float32}, Complex{Float64} + ) + begin + + rng = StableRNG(1234) + + g = named_comb_tree((3,2)) + s = siteinds("S=1/2", g) + + a = random_tensornetwork(rng, elt, s; link_space = 3) + b = truncate(a; maxdim = 3) + f = inner(a, b; alg = "exact") / sqrt(inner(a, a; alg = "exact") * inner(b, b; alg = "exact")) + @show f * conj(f) + #@test f * conj(f) ≈ 1.0 atol = 10*eps(real(elt)) + + H = random_tensornetwork(elt, union_all_inds(s, prime(s)); link_space = 3) + + Ha1 = apply(H, a; maxdim = 6) + Ha2 = apply(H, a; maxdim = 9) + + f = inner(Ha1, Ha2; alg = "exact") / sqrt(inner(Ha1, Ha1; alg = "exact") * inner(Ha2, Ha2; alg = "exact")) + @show f * conj(f) + + end +end + +end From 159966998230301d523f3ade013039a6f2db587f Mon Sep 17 00:00:00 2001 From: Joey Date: Tue, 25 Mar 2025 12:42:09 -0400 Subject: [PATCH 07/13] Improved solver interface --- src/ITensorNetworks.jl | 2 +- .../maximise_bilinearformnetwork.jl} | 91 ++++++++++--------- test/test_maximisebilinearform.jl | 50 ++++++++++ test/test_tensornetwork_fitting.jl | 39 -------- 4 files changed, 99 insertions(+), 83 deletions(-) rename src/{solvers/fit_tensornetworks.jl => formnetworks/maximise_bilinearformnetwork.jl} (53%) create mode 100644 test/test_maximisebilinearform.jl delete mode 100644 test/test_tensornetwork_fitting.jl diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 44594dc0..f9cdc7ed 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -27,6 +27,7 @@ include("partitioneditensornetwork.jl") include("edge_sequences.jl") include("formnetworks/abstractformnetwork.jl") include("formnetworks/bilinearformnetwork.jl") +include("formnetworks/maximise_bilinearformnetwork.jl") include("formnetworks/quadraticformnetwork.jl") include("caches/abstractbeliefpropagationcache.jl") include("caches/beliefpropagationcache.jl") @@ -58,7 +59,6 @@ include("solvers/tdvp.jl") include("solvers/dmrg.jl") include("solvers/dmrg_x.jl") include("solvers/contract.jl") -include("solvers/fit_tensornetworks.jl") include("solvers/linsolve.jl") include("solvers/sweep_plans/sweep_plans.jl") include("apply.jl") diff --git a/src/solvers/fit_tensornetworks.jl b/src/formnetworks/maximise_bilinearformnetwork.jl similarity index 53% rename from src/solvers/fit_tensornetworks.jl rename to src/formnetworks/maximise_bilinearformnetwork.jl index 89f35f7b..a5eec607 100644 --- a/src/solvers/fit_tensornetworks.jl +++ b/src/formnetworks/maximise_bilinearformnetwork.jl @@ -13,13 +13,12 @@ using NDTensors: Algorithm using Dictionaries using LinearAlgebra: norm_sqr -default_fit_algorithm() = "orthogonalize" -default_fit_kwargs() = (; niters = 20, nsites = 1, tolerance = 1e-10, normalize = true) +default_solver_algorithm() = "orthogonalize" +default_solver_kwargs() = (; niters = 25, nsites = 1, tolerance = 1e-10, normalize = true, maxdim = nothing, cutoff = nothing) -function default_update_sequence(tn::AbstractITensorNetwork; nsites::Int64=1) - @assert is_tree(tn) +function blf_update_sequence(g::AbstractGraph; nsites::Int64=1) if nsites == 1 || nsites == 2 - es = post_order_dfs_edges(tn, first(leaf_vertices(tn))) + es = post_order_dfs_edges(g, first(leaf_vertices(g))) vs = [[src(e), dst(e)] for e in es] regions = nsites == 2 ? vs : [[v] for v in unique(reduce(vcat, vs))] return vcat(regions, reverse(reverse.(regions))) @@ -28,7 +27,7 @@ function default_update_sequence(tn::AbstractITensorNetwork; nsites::Int64=1) end end -function default_fit_updater(alg::Algorithm"orthogonalize", xAy_bpc::AbstractBeliefPropagationCache, y::AbstractITensorNetwork, prev_region::Vector, region::Vector) +function blf_updater(alg::Algorithm"orthogonalize", xAy_bpc::AbstractBeliefPropagationCache, y::AbstractITensorNetwork, prev_region::Vector, region::Vector) path = gauge_path(y, prev_region, region) y = gauge_walk(alg, y, path) verts = unique(vcat(src.(path), dst.(path))) @@ -39,33 +38,38 @@ function default_fit_updater(alg::Algorithm"orthogonalize", xAy_bpc::AbstractBel return xAy_bpc, y end -function default_fit_extracter(xAy_bpc::AbstractBeliefPropagationCache, region::Vector) +function blf_extracter(xAy_bpc::AbstractBeliefPropagationCache, region::Vector) return environment(xAy_bpc, [(v, "ket") for v in region]) end -function default_fit_inserter(∂xAy_bpc_∂r::Vector{ITensor}, xAy_bpc::AbstractBeliefPropagationCache, y::AbstractITensorNetwork, region::Vector; normalize = true) +function blf_inserter(∂xAy_bpc_∂r::Vector{ITensor}, xAy_bpc::AbstractBeliefPropagationCache, y::AbstractITensorNetwork, region::Vector; normalize, maxdim, cutoff) + yr = contract(∂xAy_bpc_∂r; sequence = "automatic") if length(region) == 1 v = only(region) - yv = contract(∂xAy_bpc_∂r; sequence = "automatic") if normalize - yv /= sqrt(norm_sqr(yv)) + yr /= sqrt(norm_sqr(yr)) end - y[v] = yv - xAy_bpc = update_factor(xAy_bpc, (v, "ket"), dag(yv)) + y[v] = yr + elseif length(region) == 2 + v1, v2 = first(region), last(region) + linds, cind = uniqueinds(y[v1], y[v2]), commonind(y[v1], y[v2]) + yv1, yv2 = factorize(yr, linds; ortho = "left", tags=tags(cind), cutoff, maxdim) + if normalize + yv2 /= sqrt(norm_sqr(yv2)) + end + y[v1], y[v2] = yv1, yv2 else - error("Updates with regions bigger than 1 not supported") + error("Updates with regions bigger than 2 not currently supported") end + vertices = [(v, "ket") for v in region] + factors = [y[v] for v in region] + xAy_bpc = update_factors(xAy_bpc, Dictionary(vertices, factors)) return y, xAy_bpc end -function default_costfunction(xAy::AbstractBeliefPropagationCache, region) - if length(region) == 1 - pv = only(partitionvertices(xAy, [(only(region), "ket")])) - c = region_scalar(xAy, pv) - return sqrt(c * conj(c)) - else - error("Cost Functions with regions bigger than 1 not supported") - end +function blf_costfunction(xAy::AbstractBeliefPropagationCache, region) + verts = [(v, "ket") for v in region] + return contract([environment(xAy, verts); factors(xAy, verts)]; sequence = "automatic")[] end #Optimize over y to maximize * / based on a designated partitioning of the bilinearform @@ -74,18 +78,19 @@ function maximize_bilinearform( xAy::BilinearFormNetwork, y::ITensorNetwork = dag(ket_network(xAy)), partition_verts = group(v -> first(v), vertices(xAy)); - updater = default_fit_updater, - extracter = default_fit_extracter, - inserter = default_fit_inserter, - costfunction = default_costfunction, - sequence = default_update_sequence, - normalize::Bool, - niters::Int64, - nsites::Int64, - tolerance::Float64) + updater = blf_updater, + extracter = blf_extracter, + inserter = blf_inserter, + costfunction = blf_costfunction, + sequence = blf_update_sequence, + normalize::Bool = true, + niters::Int64 = 25, + nsites::Int64 = 1, + tolerance = nothing, + maxdim = nothing, + cutoff = nothing) xAy_bpc = BeliefPropagationCache(xAy, partition_verts) - @assert is_tree(partitioned_graph(xAy_bpc)) seq = sequence(y; nsites) prev_region = collect(vertices(y)) @@ -94,31 +99,31 @@ function maximize_bilinearform( for (j, region) in enumerate(seq) xAy_bpc, y = updater(alg, xAy_bpc, y, prev_region, region) ∂xAy_bpc_∂r = extracter(xAy_bpc, region) - y, xAy_bpc = inserter(∂xAy_bpc_∂r, xAy_bpc, y, region; normalize) + y, xAy_bpc = inserter(∂xAy_bpc_∂r, xAy_bpc, y, region; normalize, maxdim, cutoff) cs[i, j] = costfunction(xAy_bpc, region) prev_region = region end - if i >= 2 && abs(sum(cs[i, :]) - sum(cs[i-1, :])) / length(seq) <= tolerance - return y + if i >= 2 && (abs(sum(cs[i, :]) - sum(cs[i-1, :]))) / length(seq) <= tolerance + return dag(y) end end - return y + return dag(y) end -function Base.truncate(x::AbstractITensorNetwork; maxdim::Int64, kwargs...) - y = random_tensornetwork(scalartype(x), siteinds(x); link_space = maxdim) +function Base.truncate(x::AbstractITensorNetwork; maxdim_init::Int64, kwargs...) + y = ITensorNetwork(v -> inds -> delta(inds), siteinds(x); link_space = maxdim_init) xIy = BilinearFormNetwork(x, y) - return dag(maximize_bilinearform(xIy, y; kwargs...)) + return maximize_bilinearform(xIy, y; kwargs...) end -function ITensors.apply(A::AbstractITensorNetwork, x::AbstractITensorNetwork; maxdim::Int64, kwargs...) - y = random_tensornetwork(scalartype(x), siteinds(x); link_space = maxdim) +function ITensors.apply(A::AbstractITensorNetwork, x::AbstractITensorNetwork; maxdim_init::Int64, kwargs...) + y = ITensorNetwork(v -> inds -> delta(inds), siteinds(x); link_space = maxdim_init) xAy = BilinearFormNetwork(A, x, y) - return dag(maximize_bilinearform(xAy, y; kwargs...)) + return maximize_bilinearform(xAy, y; kwargs...) end -function maximize_bilinearform(xAy::BilinearFormNetwork, args...; fit_alg = default_fit_algorithm(), fit_kwargs = default_fit_kwargs()) - return maximize_bilinearform(Algorithm(fit_alg), xAy, args...; fit_kwargs...) +function maximize_bilinearform(xAy::BilinearFormNetwork, args...; alg = default_solver_algorithm(), solver_kwargs = default_solver_kwargs()) + return maximize_bilinearform(Algorithm(alg), xAy, args...; solver_kwargs...) end diff --git a/test/test_maximisebilinearform.jl b/test/test_maximisebilinearform.jl new file mode 100644 index 00000000..4730dd57 --- /dev/null +++ b/test/test_maximisebilinearform.jl @@ -0,0 +1,50 @@ +@eval module $(gensym()) +using ITensorNetworks: BilinearFormNetwork, ITensorNetwork, random_tensornetwork, siteinds, subgraph, ttn, inner, truncate, maximize_bilinearform, union_all_inds +using ITensorNetworks.ModelHamiltonians: heisenberg +using Graphs: vertices +using NamedGraphs.NamedGraphGenerators: named_grid, named_comb_tree +using SplitApplyCombine: group +using StableRNGs: StableRNG +using TensorOperations: TensorOperations +using Test: @test, @test_broken, @testset +using ITensors: apply, dag, delta, prime + + +@testset "Maximise BilinearForm" for elt in ( + Float32, Float64, Complex{Float32}, Complex{Float64} + ) + begin + + rng = StableRNG(1234) + + g = named_comb_tree((3,2)) + s = siteinds("S=1/2", g) + + #One-site truncation + a = random_tensornetwork(rng, elt, s; link_space = 3) + b = truncate(a; maxdim_init = 3) + f = inner(a, b; alg = "exact") / sqrt(inner(a, a; alg = "exact") * inner(b, b; alg = "exact")) + @test f * conj(f) ≈ 1.0 atol = 10*eps(real(elt)) + + #Two-site truncation + a = random_tensornetwork(rng, elt, s; link_space = 3) + b = truncate(a; maxdim_init = 1, solver_kwargs= (; maxdim = 3, cutoff = 1e-16, nsites = 2, tolerance = 1e-8)) + f = inner(a, b; alg = "exact") / sqrt(inner(a, a; alg = "exact") * inner(b, b; alg = "exact")) + @test f * conj(f) ≈ 1.0 atol = 10*eps(real(elt)) + + #One-site apply (no normalization) + a = random_tensornetwork(rng, elt, s; link_space = 2) + H = ITensorNetwork(ttn(heisenberg(g), s)) + Ha = apply(H, a; maxdim_init = 4, solver_kwargs = (; niters = 20, nsites = 1, tolerance = 1e-8, normalize = false)) + @test inner(Ha, a; alg = "exact") / inner(a, H, a; alg = "exact") ≈ 1.0 atol = 10*eps(real(elt)) + + #Two-site apply (no normalization) + a = random_tensornetwork(rng, elt, s; link_space = 2) + H = ITensorNetwork(ttn(heisenberg(g), s)) + Ha = apply(H, a; maxdim_init = 1, solver_kwargs= (; maxdim = 4, cutoff = 1e-16, nsites = 2, tolerance = 1e-8, normalize = false)) + @test inner(Ha, a; alg = "exact") / inner(a, H, a; alg = "exact") ≈ 1.0 atol = 10*eps(real(elt)) + + end +end + +end diff --git a/test/test_tensornetwork_fitting.jl b/test/test_tensornetwork_fitting.jl deleted file mode 100644 index f1433cae..00000000 --- a/test/test_tensornetwork_fitting.jl +++ /dev/null @@ -1,39 +0,0 @@ -@eval module $(gensym()) -using ITensorNetworks: random_tensornetwork, siteinds, BilinearFormNetwork, subgraph, inner, truncate, maximize_bilinearform, union_all_inds -using Graphs: vertices -using NamedGraphs.NamedGraphGenerators: named_grid, named_comb_tree -using SplitApplyCombine: group -using StableRNGs: StableRNG -using TensorOperations: TensorOperations -using Test: @test, @test_broken, @testset -using ITensors: apply, dag, prime - - -@testset "TNS Fitting" for elt in ( - Float32, Float64, Complex{Float32}, Complex{Float64} - ) - begin - - rng = StableRNG(1234) - - g = named_comb_tree((3,2)) - s = siteinds("S=1/2", g) - - a = random_tensornetwork(rng, elt, s; link_space = 3) - b = truncate(a; maxdim = 3) - f = inner(a, b; alg = "exact") / sqrt(inner(a, a; alg = "exact") * inner(b, b; alg = "exact")) - @show f * conj(f) - #@test f * conj(f) ≈ 1.0 atol = 10*eps(real(elt)) - - H = random_tensornetwork(elt, union_all_inds(s, prime(s)); link_space = 3) - - Ha1 = apply(H, a; maxdim = 6) - Ha2 = apply(H, a; maxdim = 9) - - f = inner(Ha1, Ha2; alg = "exact") / sqrt(inner(Ha1, Ha1; alg = "exact") * inner(Ha2, Ha2; alg = "exact")) - @show f * conj(f) - - end -end - -end From 3bef2866b57c583a2d5a976491377509a8fb1e33 Mon Sep 17 00:00:00 2001 From: Joey Date: Tue, 25 Mar 2025 12:58:19 -0400 Subject: [PATCH 08/13] Improved function returns --- src/ITensorNetworks.jl | 4 ++-- src/caches/abstractbeliefpropagationcache.jl | 3 --- src/formnetworks/abstractformnetwork.jl | 4 ++++ .../maximise_bilinearformnetwork.jl | 22 +++++++++++-------- 4 files changed, 19 insertions(+), 14 deletions(-) diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index f9cdc7ed..be6056ce 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -25,12 +25,12 @@ include("specialitensornetworks.jl") include("boundarymps.jl") include("partitioneditensornetwork.jl") include("edge_sequences.jl") +include("caches/abstractbeliefpropagationcache.jl") +include("caches/beliefpropagationcache.jl") include("formnetworks/abstractformnetwork.jl") include("formnetworks/bilinearformnetwork.jl") include("formnetworks/maximise_bilinearformnetwork.jl") include("formnetworks/quadraticformnetwork.jl") -include("caches/abstractbeliefpropagationcache.jl") -include("caches/beliefpropagationcache.jl") include("contraction_tree_to_graph.jl") include("gauging.jl") include("utils.jl") diff --git a/src/caches/abstractbeliefpropagationcache.jl b/src/caches/abstractbeliefpropagationcache.jl index 01c90c04..b5163d3b 100644 --- a/src/caches/abstractbeliefpropagationcache.jl +++ b/src/caches/abstractbeliefpropagationcache.jl @@ -40,9 +40,6 @@ default_messages(ptn::PartitionedGraph) = Dictionary() return default_bp_maxiter(undirected_graph(underlying_graph(g))) end default_partitioned_vertices(ψ::AbstractITensorNetwork) = group(v -> v, vertices(ψ)) -function default_partitioned_vertices(f::AbstractFormNetwork) - return group(v -> original_state_vertex(f, v), vertices(f)) -end partitioned_tensornetwork(bpc::AbstractBeliefPropagationCache) = not_implemented() messages(bpc::AbstractBeliefPropagationCache) = not_implemented() diff --git a/src/formnetworks/abstractformnetwork.jl b/src/formnetworks/abstractformnetwork.jl index 66776ec8..07ddffdf 100644 --- a/src/formnetworks/abstractformnetwork.jl +++ b/src/formnetworks/abstractformnetwork.jl @@ -80,3 +80,7 @@ operator_vertex(f::AbstractFormNetwork, v) = operator_vertex_map(f)(v) bra_vertex(f::AbstractFormNetwork, v) = bra_vertex_map(f)(v) ket_vertex(f::AbstractFormNetwork, v) = ket_vertex_map(f)(v) original_state_vertex(f::AbstractFormNetwork, v) = inv_vertex_map(f)(v) + +function default_partitioned_vertices(f::AbstractFormNetwork) + return group(v -> original_state_vertex(f, v), vertices(f)) +end diff --git a/src/formnetworks/maximise_bilinearformnetwork.jl b/src/formnetworks/maximise_bilinearformnetwork.jl index a5eec607..d72b4471 100644 --- a/src/formnetworks/maximise_bilinearformnetwork.jl +++ b/src/formnetworks/maximise_bilinearformnetwork.jl @@ -1,10 +1,5 @@ -using ITensorNetworks: AbstractITensorNetwork, AbstractBeliefPropagationCache, ITensorNetworks, ITensorNetwork, random_tensornetwork, siteinds, QuadraticFormNetwork, prime, - BilinearFormNetwork, environment, subgraph, BeliefPropagationCache, partitioned_graph, - ket_network, tree_gauge, tree_orthogonalize, gauge_path, gauge_walk, update_factors, partitioned_tensornetwork, - update, update_factor, region_scalar using NamedGraphs.NamedGraphGenerators: named_grid using NamedGraphs: NamedEdge -using TensorOperations: TensorOperations using ITensors: ITensors, ITensor, contract, dag using Graphs: is_tree using NamedGraphs.PartitionedGraphs: partitioned_graph, partitionedges, partitionvertex, partitionvertices @@ -16,7 +11,9 @@ using LinearAlgebra: norm_sqr default_solver_algorithm() = "orthogonalize" default_solver_kwargs() = (; niters = 25, nsites = 1, tolerance = 1e-10, normalize = true, maxdim = nothing, cutoff = nothing) +#TODO: Come up with reasonable sequence for non-trees function blf_update_sequence(g::AbstractGraph; nsites::Int64=1) + @assert is_tree(g) if nsites == 1 || nsites == 2 es = post_order_dfs_edges(g, first(leaf_vertices(g))) vs = [[src(e), dst(e)] for e in es] @@ -27,6 +24,7 @@ function blf_update_sequence(g::AbstractGraph; nsites::Int64=1) end end +#TODO: biorthogonal updater and gauging function blf_updater(alg::Algorithm"orthogonalize", xAy_bpc::AbstractBeliefPropagationCache, y::AbstractITensorNetwork, prev_region::Vector, region::Vector) path = gauge_path(y, prev_region, region) y = gauge_walk(alg, y, path) @@ -73,6 +71,7 @@ function blf_costfunction(xAy::AbstractBeliefPropagationCache, region) end #Optimize over y to maximize * / based on a designated partitioning of the bilinearform +#For now, y should be a tree tensor network and should be a tree under the partitioning function maximize_bilinearform( alg::Algorithm"orthogonalize", xAy::BilinearFormNetwork, @@ -90,7 +89,10 @@ function maximize_bilinearform( maxdim = nothing, cutoff = nothing) + #These assertions can easily be lessened in the future + @assert is_tree(y) xAy_bpc = BeliefPropagationCache(xAy, partition_verts) + @assert is_tree(partitioned_graph(xAy_bpc)) seq = sequence(y; nsites) prev_region = collect(vertices(y)) @@ -104,23 +106,25 @@ function maximize_bilinearform( prev_region = region end if i >= 2 && (abs(sum(cs[i, :]) - sum(cs[i-1, :]))) / length(seq) <= tolerance - return dag(y) + return xAy_bpc, dag(y) end end - return dag(y) + return xAy_bpc, dag(y) end function Base.truncate(x::AbstractITensorNetwork; maxdim_init::Int64, kwargs...) y = ITensorNetwork(v -> inds -> delta(inds), siteinds(x); link_space = maxdim_init) xIy = BilinearFormNetwork(x, y) - return maximize_bilinearform(xIy, y; kwargs...) + xIy_bpc, y_out = maximize_bilinearform(xIy, y; kwargs...) + return y_out end function ITensors.apply(A::AbstractITensorNetwork, x::AbstractITensorNetwork; maxdim_init::Int64, kwargs...) y = ITensorNetwork(v -> inds -> delta(inds), siteinds(x); link_space = maxdim_init) xAy = BilinearFormNetwork(A, x, y) - return maximize_bilinearform(xAy, y; kwargs...) + xAy_bpc, y_out = maximize_bilinearform(xAy, y; kwargs...) + return y_out end function maximize_bilinearform(xAy::BilinearFormNetwork, args...; alg = default_solver_algorithm(), solver_kwargs = default_solver_kwargs()) From 6c62d6e7617e91ac3a32df3f1d40061069550270 Mon Sep 17 00:00:00 2001 From: Joey Date: Tue, 25 Mar 2025 15:33:25 -0400 Subject: [PATCH 09/13] Bug fix --- src/abstractitensornetwork.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index eb790e26..bf176127 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -640,7 +640,7 @@ end # Gauge a ITensorNetwork from cur_region towards new_region, treating # the network as a tree spanned by a spanning tree. function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, cur_region::Vector, new_region::Vector; kwargs...) - path = gauge_path(cur_region, new_region) + path = gauge_path(ψ, cur_region, new_region) if !isempty(path) ψ = typeof(ψ)(gauge_walk(alg, ψ, path; kwargs...)) end From 2ee33751f1a783b6e5069d90e8f404ade8d47b7d Mon Sep 17 00:00:00 2001 From: Joey Date: Tue, 25 Mar 2025 17:56:14 -0400 Subject: [PATCH 10/13] Bug Fix --- src/abstractitensornetwork.jl | 2 +- src/treetensornetworks/abstracttreetensornetwork.jl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index bf176127..882a7347 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -642,7 +642,7 @@ end function tree_gauge(alg::Algorithm, ψ::AbstractITensorNetwork, cur_region::Vector, new_region::Vector; kwargs...) path = gauge_path(ψ, cur_region, new_region) if !isempty(path) - ψ = typeof(ψ)(gauge_walk(alg, ψ, path; kwargs...)) + ψ = gauge_walk(alg, ψ, path; kwargs...) end return ψ end diff --git a/src/treetensornetworks/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl index c4d0b7d1..f2f82ef1 100644 --- a/src/treetensornetworks/abstracttreetensornetwork.jl +++ b/src/treetensornetworks/abstracttreetensornetwork.jl @@ -36,7 +36,7 @@ function set_ortho_region(tn::AbstractTTN, new_region) end function gauge(alg::Algorithm, ttn::AbstractTTN, region::Vector; kwargs...) - ttn = gauge(alg, ttn, ortho_region(ttn), region; kwargs...) + ttn = tree_gauge(alg, ttn, collect(ortho_region(ttn)), region; kwargs...) return set_ortho_region(ttn, region) end From 134aa0cc0f978eb154a2354eb6e65f66a45c95cc Mon Sep 17 00:00:00 2001 From: Joey Date: Tue, 1 Apr 2025 10:01:43 -0400 Subject: [PATCH 11/13] Remove duplicate lines --- src/ITensorNetworks.jl | 2 -- src/abstractitensornetwork.jl | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index a85b2704..ea86ed13 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -28,8 +28,6 @@ include("partitioneditensornetwork.jl") include("edge_sequences.jl") include("caches/abstractbeliefpropagationcache.jl") include("caches/beliefpropagationcache.jl") -include("caches/abstractbeliefpropagationcache.jl") -include("caches/beliefpropagationcache.jl") include("formnetworks/abstractformnetwork.jl") include("formnetworks/bilinearformnetwork.jl") include("formnetworks/maximise_bilinearformnetwork.jl") diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index b288568a..6c80e36e 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -42,7 +42,7 @@ using ITensorMPS: ITensorMPS, add, linkdim, linkinds, siteinds using .ITensorsExtensions: ITensorsExtensions, indtype, promote_indtype using LinearAlgebra: LinearAlgebra, factorize using MacroTools: @capture -using NamedGraphs: NamedGraphs, NamedGraph, vertextype, not_implemented, steiner_tree +using NamedGraphs: NamedGraphs, NamedGraph, not_implemented, steiner_tree using NamedGraphs.GraphsExtensions: ⊔, directed_graph, incident_edges, rename_vertices, vertextype using NDTensors: NDTensors, dim, Algorithm From e29b922427a4689e6164b81e76ea4f8a4dfa0bf0 Mon Sep 17 00:00:00 2001 From: Joey Date: Thu, 3 Apr 2025 10:04:16 -0400 Subject: [PATCH 12/13] Move towards network solver --- .../maximise_bilinearformnetwork_V2.jl | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 src/formnetworks/maximise_bilinearformnetwork_V2.jl diff --git a/src/formnetworks/maximise_bilinearformnetwork_V2.jl b/src/formnetworks/maximise_bilinearformnetwork_V2.jl new file mode 100644 index 00000000..8075ea49 --- /dev/null +++ b/src/formnetworks/maximise_bilinearformnetwork_V2.jl @@ -0,0 +1,25 @@ +@kwdef mutable struct OrthogonalLinearFormProblem{State, LinearFormNetwork} + state::State + linearformnetwork::LinearFormNetwork + squared_scalar::Number = 0 +end + +squared_scalar(O::OrthogonalLinearFormProblem) = O.squared_scalar +state(O::OrthogonalLinearFormProblem) = O.state +linearformnetwork(O::OrthogonalLinearFormProblem) = O.linearformnetwork + +function set(O::OrthogonalLinearFormProblem; state = state(O), linearformnetwork = linearformnetwork(O), squared_scalar = squared_scalar(O)) + return OrthogonalLinearFormProblem(; state, linearformnetwork, squared_scalar) +end + +function updater!(O::OrthogonalLinearFormProblem, local_tensor, region; outputlevel, kws...) + O.squared_scalar, local_tensor = linearform_updater + +function maximize_linearformnetwork_sq(linearformnetwork, init_state; nsweeps, nsites=1, outputlevel = 0, update_kwargs = (;), inserter_kwargs = (;), kws...) + init_prob = OrthogonalLinearFormProblem(; state = copy(init_state), linearformnetwork = linearformnetwork) + common_sweep_kwargs = (; nsites, outputlevel, updater_kwargs, inserter_kwargs) + kwargs_array = [(; common_sweep_kwargs..., sweep = s) for s in 1:nsweeps] + sweep_iter = sweep_iterator(init_prob, kwargs_array) + converged_prob = alternating_update(sweep_iter; outputlevel, kws...) + return squared_scalar(converged_prob), state(converged_prob) +end \ No newline at end of file From db42eb8d51374684daecd50679c2094f633163d8 Mon Sep 17 00:00:00 2001 From: Joey Date: Thu, 3 Apr 2025 17:22:57 -0400 Subject: [PATCH 13/13] Better naming --- .../maximise_bilinearformnetwork_V2.jl | 22 +++++++++---------- 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/formnetworks/maximise_bilinearformnetwork_V2.jl b/src/formnetworks/maximise_bilinearformnetwork_V2.jl index 8075ea49..5bef1e34 100644 --- a/src/formnetworks/maximise_bilinearformnetwork_V2.jl +++ b/src/formnetworks/maximise_bilinearformnetwork_V2.jl @@ -1,22 +1,20 @@ -@kwdef mutable struct OrthogonalLinearFormProblem{State, LinearFormNetwork} +@kwdef mutable struct FittingProblem{State, OverlapNetwork} state::State - linearformnetwork::LinearFormNetwork + overlapnetwork::OverlapNetwork squared_scalar::Number = 0 end -squared_scalar(O::OrthogonalLinearFormProblem) = O.squared_scalar -state(O::OrthogonalLinearFormProblem) = O.state -linearformnetwork(O::OrthogonalLinearFormProblem) = O.linearformnetwork +squared_scalar(F::FittingProblem) = F.squared_scalar +state(F::FittingProblem) = F.state +overlapnetwork(F::FittingProblem) = F.overlapnetwork -function set(O::OrthogonalLinearFormProblem; state = state(O), linearformnetwork = linearformnetwork(O), squared_scalar = squared_scalar(O)) - return OrthogonalLinearFormProblem(; state, linearformnetwork, squared_scalar) +function set(F::FittingProblem; state = state(F), overlapnetwork = overlapnetwork(F), squared_scalar = squared_scalar(F)) + return FittingProblem(; state, linearformnetwork, squared_scalar) end -function updater!(O::OrthogonalLinearFormProblem, local_tensor, region; outputlevel, kws...) - O.squared_scalar, local_tensor = linearform_updater - -function maximize_linearformnetwork_sq(linearformnetwork, init_state; nsweeps, nsites=1, outputlevel = 0, update_kwargs = (;), inserter_kwargs = (;), kws...) - init_prob = OrthogonalLinearFormProblem(; state = copy(init_state), linearformnetwork = linearformnetwork) +function fit_tensornetwork(tn::AbstractITensorNetwork, init_state::AbstractITensorNetwork, vertex_partitioning) + overlap_bpc = BeliefPropagationCache(inner_network(tn, init_state), vertex_partitioning) + init_prob = FittingProblem(; state = copy(init_state), overlapnetwork = overlap_bpc) common_sweep_kwargs = (; nsites, outputlevel, updater_kwargs, inserter_kwargs) kwargs_array = [(; common_sweep_kwargs..., sweep = s) for s in 1:nsweeps] sweep_iter = sweep_iterator(init_prob, kwargs_array)