Skip to content

Commit e4b647c

Browse files
committed
[ITensorsNamedDimsArraysExt] Convert symmetric tensors
1 parent 4996dca commit e4b647c

File tree

4 files changed

+36
-7
lines changed

4 files changed

+36
-7
lines changed

NDTensors/Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "NDTensors"
22
uuid = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
33
authors = ["Matthew Fishman <[email protected]>"]
4-
version = "0.3.62"
4+
version = "0.3.63"
55

66
[deps]
77
Accessors = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697"

NDTensors/src/lib/GradedAxes/src/gradedunitrangedual.jl

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,21 @@ function blockedunitrange_getindices(
9494
return flip_blockvector(v)
9595
end
9696

97+
# Fixes ambiguity error.
98+
# TODO: Write this in terms of `blockedunitrange_getindices(dual(a), indices)`.
99+
function blockedunitrange_getindices(
100+
a::GradedUnitRangeDual, indices::AbstractBlockVector{<:Block{1}}
101+
)
102+
blks = map(bs -> mortar(map(b -> a[b], bs)), blocks(indices))
103+
# We pass `length.(blks)` to `mortar` in order
104+
# to pass block labels to the axes of the output,
105+
# if they exist. This makes it so that
106+
# `only(axes(a[indices])) isa `GradedUnitRange`
107+
# if `a isa `GradedUnitRange`, for example.
108+
v = mortar(blks, labelled_length.(blks))
109+
return flip_blockvector(v)
110+
end
111+
97112
function flip_blockvector(v::BlockVector)
98113
block_axes = flip.(axes(v))
99114
flipped = mortar(vec.(blocks(v)), block_axes)

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "ITensors"
22
uuid = "9136182c-28ba-11e9-034c-db9fb085ebd5"
33
authors = ["Matthew Fishman <[email protected]>", "Miles Stoudenmire <[email protected]>"]
4-
version = "0.7.4"
4+
version = "0.7.5"
55

66
[deps]
77
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"

src/lib/ITensorsNamedDimsArraysExt/src/to_nameddimsarray.jl

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
using ..NDTensors: data, inds
21
using ITensors: ITensor
2+
using ..NDTensors: data, inds
33

44
# TODO: Delete this, it is a hack to decide
55
# if an Index is blocked.
@@ -34,21 +34,35 @@ function to_nameddimsarray(x::DiagTensor)
3434
return named(DiagonalArray(data(x), size(x)), name.(inds(x)))
3535
end
3636

37-
using ..NDTensors: BlockSparseTensor
37+
using ITensors: ITensors, dir, qn
38+
using ..NDTensors: BlockSparseTensor, array, blockdim, datatype, nblocks, nzblocks
3839
using ..NDTensors.BlockSparseArrays: BlockSparseArray
40+
using ..NDTensors.BlockSparseArrays.BlockArrays: BlockArrays, blockedrange
41+
using ..NDTensors.GradedAxes: dual, gradedrange
42+
using ..NDTensors.TypeParameterAccessors: set_ndims
3943
# TODO: Delete once `BlockSparse` is removed.
4044
function to_nameddimsarray(x::BlockSparseTensor)
41-
blockinds = map(i -> [blockdim(i, b) for b in 1:nblocks(i)], inds(x))
45+
blockinds = map(inds(x)) do i
46+
r = gradedrange([qn(i, b) => blockdim(i, b) for b in 1:nblocks(i)])
47+
if dir(i) == ITensors.In
48+
return dual(r)
49+
end
50+
return r
51+
end
4252
blocktype = set_ndims(datatype(x), ndims(x))
4353
# TODO: Make a simpler constructor:
4454
# BlockSparseArray(blocktype, blockinds)
45-
arraystorage = BlockSparseArray{eltype(x),ndims(x),blocktype}(blockinds)
55+
arraystorage = BlockSparseArray{eltype(x),ndims(x),blocktype}(undef, blockinds)
4656
for b in nzblocks(x)
47-
arraystorage[BlockArrays.Block(Tuple(b)...)] = x[b]
57+
arraystorage[BlockArrays.Block(Int.(Tuple(b))...)] = array(x[b])
4858
end
4959
return named(arraystorage, name.(inds(x)))
5060
end
5161

62+
using ITensors: QN
63+
using ..NDTensors.GradedAxes: GradedAxes
64+
GradedAxes.fuse_labels(l1::QN, l2::QN) = l1 + l2
65+
5266
## TODO: Add this back, define `CombinerArrays` library in NDTensors!
5367
## using ..NDTensors: CombinerTensor, CombinerArray, storage
5468
## # TODO: Delete when we directly use `CombinerArray` as storage.

0 commit comments

Comments
 (0)