Skip to content

Commit a11107c

Browse files
committed
remove UnitRangeDual
1 parent 14c95ca commit a11107c

File tree

7 files changed

+73
-245
lines changed

7 files changed

+73
-245
lines changed

NDTensors/src/lib/BlockSparseArrays/ext/BlockSparseArraysGradedAxesExt/test/runtests.jl

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,7 @@ using Test: @test, @testset, @test_broken
44
using BlockArrays: Block, BlockedOneTo, blockedrange, blocklengths, blocksize
55
using NDTensors.BlockSparseArrays: BlockSparseArray, block_nstored
66
using NDTensors.GradedAxes:
7-
GradedAxes,
8-
GradedOneTo,
9-
GradedUnitRangeDual,
10-
UnitRangeDual,
11-
blocklabels,
12-
dual,
13-
gradedrange
7+
GradedAxes, GradedOneTo, GradedUnitRangeDual, blocklabels, dual, gradedrange
148
using NDTensors.LabelledNumbers: label
159
using NDTensors.SparseArrayInterface: nstored
1610
using NDTensors.TensorAlgebra: fusedims, splitdims
@@ -208,14 +202,14 @@ const elts = (Float32, Float64, Complex{Float32}, Complex{Float64})
208202
@test Array(b) == 2 * Array(a)
209203
@test_broken a[:, :] isa BlockSparseArray
210204
for ax in axes(b)
211-
@test ax isa UnitRangeDual
205+
@test ax isa BlockedUnitRange
212206
end
213207

214208
I = [Block(1)[1:1]]
215209
@test_broken a[I, :]
216210
@test_broken a[:, I]
217211
@test size(a[I, I]) == (1, 1)
218-
@test_broken GradedAxes.isdual(axes(a[I, I], 1))
212+
@test !GradedAxes.isdual(axes(a[I, I], 1))
219213
end
220214

221215
# Test case when all axes are dual

NDTensors/src/lib/GradedAxes/src/GradedAxes.jl

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ module GradedAxes
22
include("blockedunitrange.jl")
33
include("gradedunitrange.jl")
44
include("dual.jl")
5-
include("unitrangedual.jl")
65
include("gradedunitrangedual.jl")
76
include("onetoone.jl")
87
include("fusion.jl")

NDTensors/src/lib/GradedAxes/src/dual.jl

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
1-
function dual end
2-
isdual(::AbstractUnitRange) = false # default behavior
1+
# default behavior: self-dual
2+
dual(r::AbstractUnitRange) = r
3+
nondual(r::AbstractUnitRange) = r
4+
isdual(::AbstractUnitRange) = false
35

46
using NDTensors.LabelledNumbers:
57
LabelledStyle, IsLabelled, NotLabelled, label, labelled, unlabel

NDTensors/src/lib/GradedAxes/src/onetoone.jl

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,4 @@ function gradedisequal(::OneToOne, g::AbstractUnitRange)
3030
return !islabelled(eltype(g)) && (first(g) == last(g) == 1)
3131
end
3232
gradedisequal(g::AbstractUnitRange, a0::OneToOne) = gradedisequal(a0, g)
33-
34-
gradedisequal(::UnitRangeDual, ::AbstractUnitRange) = false
35-
gradedisequal(::AbstractUnitRange, ::UnitRangeDual) = false
36-
gradedisequal(::OneToOne, ::UnitRangeDual) = false
37-
gradedisequal(::UnitRangeDual, ::OneToOne) = false
38-
function gradedisequal(a1::UnitRangeDual, a2::UnitRangeDual)
39-
return gradedisequal(nondual(a1), nondual(a2))
40-
end
41-
4233
gradedisequal(a1::AbstractUnitRange, a2::AbstractUnitRange) = a1 == a2

NDTensors/src/lib/GradedAxes/src/unitrangedual.jl

Lines changed: 0 additions & 112 deletions
This file was deleted.

NDTensors/src/lib/GradedAxes/test/test_basics.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@ end
106106
@test length(ax) == length(a)
107107
@test blocklengths(ax) == blocklengths(a)
108108
@test blocklabels(ax) == blocklabels(a)
109+
@test_broken(blockfirsts(a)) == [2, 3]
109110

110111
# Regression test for ambiguity error.
111112
x = gradedrange(["x" => 2, "y" => 3])

0 commit comments

Comments
 (0)