Skip to content

Commit 09b468a

Browse files
committed
Add package extensions
1 parent d0b64d1 commit 09b468a

35 files changed

+131
-73
lines changed

NDTensors/ext/NDTensorsAMDGPUExt/adapt.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
using NDTensors: NDTensors, EmptyStorage, adapt_storagetype, emptytype
22
using NDTensors.AMDGPUExtensions: AMDGPUExtensions, ROCArrayAdaptor
33
using NDTensors.GPUArraysCoreExtensions: storagemode
4-
using NDTensors.TypeParameterAccessors:
4+
using NDTensors.Vendored.TypeParameterAccessors:
55
default_type_parameters, set_type_parameters, type_parameters
66
using Adapt: Adapt, adapt
77
using AMDGPU: AMDGPU, ROCArray, ROCVector

NDTensors/ext/NDTensorsAMDGPUExt/linearalgebra.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
using NDTensors.AMDGPUExtensions: roc
22
using NDTensors.Expose: Expose, Exposed, expose, ql, ql_positive
33
using NDTensors.GPUArraysCoreExtensions: cpu
4-
using NDTensors.TypeParameterAccessors: unwrap_array_type
4+
using NDTensors.Vendored.TypeParameterAccessors: unwrap_array_type
55
using LinearAlgebra: svd
66
using Adapt: adapt
77
using AMDGPU: ROCMatrix
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
using AMDGPU: ROCArray
22
using NDTensors.GPUArraysCoreExtensions: storagemode
3-
using NDTensors.TypeParameterAccessors: TypeParameterAccessors, Position
3+
using NDTensors.Vendored.TypeParameterAccessors: TypeParameterAccessors, Position
44

55
TypeParameterAccessors.position(::Type{<:ROCArray}, ::typeof(storagemode)) = Position(3)

NDTensors/ext/NDTensorsCUDAExt/NDTensorsCUDAExt.jl

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
module NDTensorsCUDAExt
2+
23
include("append.jl")
34
include("default_kwargs.jl")
45
include("copyto.jl")
@@ -9,4 +10,5 @@ include("indexing.jl")
910
include("linearalgebra.jl")
1011
include("mul.jl")
1112
include("permutedims.jl")
13+
1214
end

NDTensors/ext/NDTensorsCUDAExt/adapt.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ using Functors: fmap
44
using NDTensors: NDTensors, EmptyStorage, adapt_storagetype, emptytype
55
using NDTensors.CUDAExtensions: CUDAExtensions, CuArrayAdaptor
66
using NDTensors.GPUArraysCoreExtensions: storagemode
7-
using NDTensors.TypeParameterAccessors:
7+
using NDTensors.Vendored.TypeParameterAccessors:
88
default_type_parameters, set_type_parameters, type_parameters
99

1010
function CUDAExtensions.cu(xs; storagemode = default_type_parameters(CuArray, storagemode))

NDTensors/ext/NDTensorsCUDAExt/linearalgebra.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ using LinearAlgebra: Adjoint, svd
44
using NDTensors: NDTensors
55
using NDTensors.Expose: Expose, expose, ql, ql_positive
66
using NDTensors.GPUArraysCoreExtensions: cpu
7-
using NDTensors.TypeParameterAccessors: unwrap_array_type
7+
using NDTensors.Vendored.TypeParameterAccessors: unwrap_array_type
88
function NDTensors.svd_catch_error(A::CuMatrix; alg::String = "jacobi_algorithm")
99
if alg == "jacobi_algorithm"
1010
alg = CUDA.CUSOLVER.JacobiAlgorithm()

NDTensors/ext/NDTensorsCUDAExt/set_types.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
using CUDA: CuArray
22
using NDTensors.GPUArraysCoreExtensions: storagemode
3-
using NDTensors.TypeParameterAccessors: TypeParameterAccessors, Position
3+
using NDTensors.Vendored.TypeParameterAccessors: TypeParameterAccessors, Position
44

55
function TypeParameterAccessors.position(::Type{<:CuArray}, ::typeof(storagemode))
66
return Position(3)

NDTensors/ext/NDTensorsGPUArraysCoreExt/contract.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ using Adapt: adapt
22
using GPUArraysCore: AbstractGPUArray
33
using NDTensors: NDTensors, DenseTensor, DiagTensor, contract!, dense, inds, Tensor
44
using NDTensors.Expose: Exposed, expose, unexpose
5-
using NDTensors.TypeParameterAccessors: parenttype, set_ndims
5+
using NDTensors.Vendored.TypeParameterAccessors: parenttype, set_ndims
66

77
function NDTensors.contract!(
88
output_tensor::Exposed{<:AbstractGPUArray, <:DenseTensor},
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
module NDTensorsJLArraysExt
2+
23
include("copyto.jl")
34
include("indexing.jl")
45
include("linearalgebra.jl")
56
include("mul.jl")
67
include("permutedims.jl")
8+
79
end

NDTensors/ext/NDTensorsJLArraysExt/linearalgebra.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ using LinearAlgebra: LinearAlgebra, Hermitian, Symmetric, qr, eigen
44
using NDTensors: NDTensors
55
using NDTensors.Expose: Expose, expose, qr, qr_positive, ql, ql_positive
66
using NDTensors.GPUArraysCoreExtensions: cpu
7-
using NDTensors.TypeParameterAccessors: unwrap_array_type
7+
using NDTensors.Vendored.TypeParameterAccessors: unwrap_array_type
88

99
## TODO this function exists because of the same issue below. when
1010
## that issue is resolved we can rely on the abstractarray version of

0 commit comments

Comments
 (0)