Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "ITensorNetworks"
uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7"
authors = ["Matthew Fishman <[email protected]>, Joseph Tindall <[email protected]> and contributors"]
version = "0.11.27"
version = "0.12.0"

[deps]
AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
Expand Down Expand Up @@ -41,13 +41,15 @@ EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
GraphsFlows = "06909019-6f44-4949-96fc-b9d9aaa02889"
OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715"
Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0"
TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2"

[extensions]
ITensorNetworksAdaptExt = "Adapt"
ITensorNetworksEinExprsExt = "EinExprs"
ITensorNetworksGraphsFlowsExt = "GraphsFlows"
ITensorNetworksOMEinsumContractionOrdersExt = "OMEinsumContractionOrders"
ITensorNetworksObserversExt = "Observers"
ITensorNetworksTensorOperationsExt = "TensorOperations"

[compat]
AbstractTrees = "0.4.4"
Expand Down Expand Up @@ -80,6 +82,7 @@ SplitApplyCombine = "1.2"
StaticArrays = "1.5.12"
StructWalk = "0.2"
Suppressor = "0.2"
TensorOperations = "5.1.4"
TimerOutputs = "0.5.22"
TupleTools = "1.4"
julia = "1.10"
Expand All @@ -90,6 +93,7 @@ EinExprs = "b1794770-133b-4de1-afb4-526377e9f4c5"
GraphsFlows = "06909019-6f44-4949-96fc-b9d9aaa02889"
OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715"
Observers = "338f10d5-c7f1-4033-a7d1-f9dec39bcaa0"
TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
Expand Down
13 changes: 9 additions & 4 deletions ext/ITensorNetworksEinExprsExt/ITensorNetworksEinExprsExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,15 @@ module ITensorNetworksEinExprsExt

using ITensors: Index, ITensor, @Algorithm_str, inds, noncommoninds
using ITensorNetworks:
ITensorNetworks, ITensorNetwork, vertextype, vertex_data, contraction_sequence
ITensorNetworks,
ITensorList,
ITensorNetwork,
vertextype,
vertex_data,
contraction_sequence
using EinExprs: EinExprs, EinExpr, einexpr, SizedEinExpr

function to_einexpr(ts::Vector{ITensor})
function to_einexpr(ts::ITensorList)
IndexType = Any

tensor_exprs = EinExpr{IndexType}[]
Expand All @@ -21,7 +26,7 @@ function to_einexpr(ts::Vector{ITensor})
return SizedEinExpr(sum(tensor_exprs; skip=externalinds_tn), inds_dims)
end

function tensor_inds_to_vertex(ts::Vector{ITensor})
function tensor_inds_to_vertex(ts::ITensorList)
IndexType = Any
VertexType = Int

Expand All @@ -36,7 +41,7 @@ function tensor_inds_to_vertex(ts::Vector{ITensor})
end

function ITensorNetworks.contraction_sequence(
::Algorithm"einexpr", tn::Vector{ITensor}; optimizer=EinExprs.Exhaustive()
::Algorithm"einexpr", tn::ITensorList; optimizer=EinExprs.Exhaustive()
)
expr = to_einexpr(tn)
path = einexpr(optimizer, expr)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
module ITensorNetworksOMEinsumContractionOrdersExt
using DocStringExtensions: TYPEDSIGNATURES
using ITensorNetworks: ITensorNetworks
using ITensorNetworks: ITensorNetworks, ITensorList
using ITensors: ITensors, Index, ITensor, inds
using NDTensors: dim
using NDTensors.AlgorithmSelection: @Algorithm_str
Expand All @@ -9,8 +9,6 @@ using OMEinsumContractionOrders: OMEinsumContractionOrders
# OMEinsumContractionOrders wrapper for ITensors
# Slicing is not supported, because it might require extra work to slice an `ITensor` correctly.

const ITensorList = Union{Vector{ITensor},Tuple{Vararg{ITensor}}}

# infer the output tensor labels
# TODO: Use `symdiff` instead.
function infer_output(inputs::AbstractVector{<:AbstractVector{<:Index}})
Expand Down Expand Up @@ -126,7 +124,9 @@ Optimize the einsum contraction pattern using the simulated annealing on tensor
### References
* [Recursive Multi-Tensor Contraction for XEB Verification of Quantum Circuits](https://arxiv.org/abs/2108.05665)
"""
function ITensorNetworks.contraction_sequence(::Algorithm"tree_sa", tn; kwargs...)
function ITensorNetworks.contraction_sequence(
::Algorithm"tree_sa", tn::ITensorList; kwargs...
)
return optimize_contraction_sequence(
tn; optimizer=OMEinsumContractionOrders.TreeSA(; kwargs...)
)
Expand All @@ -153,7 +153,9 @@ Then finds the contraction order inside each group with the greedy search algori
### References
* [Hyper-optimized tensor network contraction](https://arxiv.org/abs/2002.01935)
"""
function ITensorNetworks.contraction_sequence(::Algorithm"sa_bipartite", tn; kwargs...)
function ITensorNetworks.contraction_sequence(
::Algorithm"sa_bipartite", tn::ITensorList; kwargs...
)
return optimize_contraction_sequence(
tn; optimizer=OMEinsumContractionOrders.SABipartite(; kwargs...)
)
Expand All @@ -177,7 +179,9 @@ Then finds the contraction order inside each group with the greedy search algori
* [Hyper-optimized tensor network contraction](https://arxiv.org/abs/2002.01935)
* [Simulating the Sycamore quantum supremacy circuits](https://arxiv.org/abs/2103.03074)
"""
function ITensorNetworks.contraction_sequence(::Algorithm"kahypar_bipartite", tn; kwargs...)
function ITensorNetworks.contraction_sequence(
::Algorithm"kahypar_bipartite", tn::ITensorList; kwargs...
)
return optimize_contraction_sequence(
tn; optimizer=OMEinsumContractionOrders.KaHyParBipartite(; kwargs...)
)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
module ITensorNetworksTensorOperationsExt

using ITensors: ITensors, ITensor, dim, inds
using ITensorNetworks: ITensorNetworks, ITensorList
using NDTensors.AlgorithmSelection: @Algorithm_str
using TensorOperations: TensorOperations, optimaltree

function ITensorNetworks.contraction_sequence(::Algorithm"optimal", tn::ITensorList)
network = collect.(inds.(tn))
inds_to_dims = Dict(i => dim(i) for i in unique(reduce(vcat, network)))
seq, _ = optimaltree(network, inds_to_dims)
return seq
end

end
1 change: 0 additions & 1 deletion src/contract.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using ITensors: ITensor, scalar
using ITensors.ContractionSequenceOptimization: deepmap
using ITensors.NDTensors: NDTensors, Algorithm, @Algorithm_str, contract
using LinearAlgebra: normalize!
using NamedGraphs: NamedGraphs
Expand Down
24 changes: 17 additions & 7 deletions src/contraction_sequences.jl
Original file line number Diff line number Diff line change
@@ -1,22 +1,32 @@
using Graphs: vertices
using ITensors: ITensor, contract
using ITensors.ContractionSequenceOptimization: deepmap, optimal_contraction_sequence
using ITensors: ITensor
using ITensors.NDTensors: Algorithm, @Algorithm_str
using NamedGraphs.Keys: Key
using NamedGraphs.OrdinalIndexing: th

function contraction_sequence(tn::Vector{ITensor}; alg="optimal", kwargs...)
const ITensorList = Union{Vector{ITensor},Tuple{Vararg{ITensor}}}

function contraction_sequence(tn::ITensorList; alg="optimal", kwargs...)
return contraction_sequence(Algorithm(alg), tn; kwargs...)
end

function contraction_sequence(alg::Algorithm, tn::ITensorList)
return throw(
ArgumentError(
"Algorithm $alg isn't defined for contraction sequence finding. Try loading a backend package like
TensorOperations.jl or OMEinsumContractionOrders.jl.",
),
)
end

function deepmap(f, tree; filter=(x -> x isa AbstractArray))
return filter(tree) ? map(t -> deepmap(f, t; filter=filter), tree) : f(tree)
end

function contraction_sequence(tn::AbstractITensorNetwork; kwargs...)
# TODO: Use `token_vertex` and/or `token_vertices` here.
ts = map(v -> tn[v], (1:nv(tn))th)
seq_linear_index = contraction_sequence(ts; kwargs...)
# TODO: Use `Functors.fmap` or `StructWalk`?
return deepmap(n -> Key(vertices(tn)[n * th]), seq_linear_index)
end

function contraction_sequence(::Algorithm"optimal", tn::Vector{ITensor})
return optimal_contraction_sequence(tn)
end
1 change: 1 addition & 0 deletions test/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
SplitApplyCombine = "03a91e81-4c3e-53e1-a0a4-9c0c8f19dd66"
StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3"
Suppressor = "fd094767-a336-5f1f-9728-57cf17d0bbfb"
TensorOperations = "6aa20fa7-93e2-5fca-9bc0-fbd0db3c71a2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
UnicodePlots = "b8865327-cd53-5732-bb35-84acbb429228"
Weave = "44d3d7a6-8a23-5bf8-98c5-b353f8df5ec9"
1 change: 1 addition & 0 deletions test/test_additensornetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ using ITensorNetworks: ITensorNetwork, inner_network, random_tensornetwork, site
using ITensors: ITensors, apply, op, scalar, inner
using LinearAlgebra: norm_sqr
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset
@testset "add_itensornetworks" begin
g = named_grid((2, 2))
Expand Down
1 change: 1 addition & 0 deletions test/test_apply.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ using NamedGraphs.NamedGraphGenerators: named_grid
using NamedGraphs.PartitionedGraphs: PartitionVertex
using SplitApplyCombine: group
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset
@testset "apply" begin
g_dims = (2, 2)
Expand Down
1 change: 1 addition & 0 deletions test/test_belief_propagation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ using NamedGraphs.NamedGraphGenerators: named_comb_tree, named_grid
using NamedGraphs.PartitionedGraphs: PartitionVertex, partitionedges
using SplitApplyCombine: group
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset

@testset "belief_propagation (eltype=$elt)" for elt in (
Expand Down
1 change: 1 addition & 0 deletions test/test_binary_tree_partition.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ using NamedGraphs.GraphsExtensions:
is_binary_arborescence, post_order_dfs_vertices, root_vertex
using OMEinsumContractionOrders: OMEinsumContractionOrders
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset

@testset "test mincut functions on top of MPS" begin
Expand Down
1 change: 1 addition & 0 deletions test/test_contraction_sequence.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ using ITensors: ITensors, contract
using NamedGraphs.NamedGraphGenerators: named_grid
using OMEinsumContractionOrders: OMEinsumContractionOrders
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset
@testset "contraction_sequence" begin
ITensors.@disable_warn_order begin
Expand Down
1 change: 1 addition & 0 deletions test/test_contraction_sequence_to_graph.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ using NamedGraphs.GraphsExtensions:
is_leaf_vertex, leaf_vertices, non_leaf_edges, root_vertex
using NamedGraphs.NamedGraphGenerators: named_grid
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset
@testset "contraction_sequence_to_graph" begin
n = 3
Expand Down
1 change: 1 addition & 0 deletions test/test_expect.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ using ITensorNetworks:
original_state_vertex
using SplitApplyCombine: group
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset
@testset "Test Expect" begin
#Test on a tree
Expand Down
1 change: 1 addition & 0 deletions test/test_forms.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ using ITensorNetworks:
using ITensors: contract, dag, inds, prime, random_itensor
using LinearAlgebra: norm
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset
@testset "FormNetworks" begin
g = named_grid((1, 4))
Expand Down
1 change: 1 addition & 0 deletions test/test_inner.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ using SplitApplyCombine: group
using Graphs: SimpleGraph, uniform_tree
using NamedGraphs: NamedGraph
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @testset
@testset "Inner products, BP vs exact comparison" begin
L = 4
Expand Down
1 change: 1 addition & 0 deletions test/test_itensornetwork.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ using NamedGraphs.NamedGraphGenerators: named_comb_tree, named_grid
using NDTensors: NDTensors, dim
using Random: randn!
using StableRNGs: StableRNG
using TensorOperations: TensorOperations
using Test: @test, @test_broken, @testset
const elts = (Float32, Float64, Complex{Float32}, Complex{Float64})
@testset "ITensorNetwork tests" begin
Expand Down
Loading