22# Assume 1 normalization
33_p0 (A) = one (eltype (A))
44
5- function initiateforwardrecurrence (N, A, B, C, x, μ)
6- T = promote_type (eltype (A), eltype (B), eltype (C), typeof (x))
7- p0 = convert (T, μ)
8- N == 0 && return zero (T), p0
9- p1 = convert (T, muladd (A[1 ],x,B[1 ])* p0)
10- @inbounds for n = 2 : N
11- p1,p0 = _forwardrecurrence_next (n, A, B, C, x, p0, p1),p1
12- end
13- p0,p1
14- end
155
166for (get, vie) in ((:getindex , :view ), (:(Base. unsafe_getindex), :(Base. unsafe_view)))
177 @eval begin
@@ -39,7 +29,7 @@ function forwardrecurrence_copyto!(dest::AbstractMatrix, V)
3929 Ã,B̃,C̃ = A[shift: ∞],B[shift: ∞],C[shift: ∞]
4030 for (k,x) = enumerate (xr)
4131 p0, p1 = initiateforwardrecurrence (shift, A, B, C, x, _p0 (P))
42- _forwardrecurrence ! (view (dest,k,:), Ã, B̃, C̃, x, p0, p1)
32+ forwardrecurrence ! (view (dest,k,:), Ã, B̃, C̃, x, p0, p1)
4333 end
4434 dest
4535end
@@ -54,7 +44,7 @@ function copyto!(dest::AbstractVector, V::SubArray{<:Any,1,<:OrthogonalPolynomia
5444 shift = first (jr)
5545 Ã,B̃,C̃ = A[shift: ∞],B[shift: ∞],C[shift: ∞]
5646 p0, p1 = initiateforwardrecurrence (shift, A, B, C, x, _p0 (P))
57- _forwardrecurrence ! (dest, Ã, B̃, C̃, x, p0, p1)
47+ forwardrecurrence ! (dest, Ã, B̃, C̃, x, p0, p1)
5848 dest
5949end
6050
10999Base. @propagate_inbounds getindex (f:: Mul{<:WeightedOPLayout,<:AbstractPaddedLayout} , x:: Number , j... ) =
110100 weight (f. A)[x] * (unweighted (f. A) * f. B)[x, j... ]
111101
112- # ##
113- # Operator clenshaw
114- # ##
115-
116-
117- Base. @propagate_inbounds function _clenshaw_next! (n, A:: AbstractFill , :: Zeros , C:: Ones , x:: AbstractMatrix , c, bn1:: AbstractMatrix{T} , bn2:: AbstractMatrix{T} ) where T
118- muladd! (getindex_value (A), x, bn1, - one (T), bn2)
119- view (bn2,band (0 )) .+ = c[n]
120- bn2
121- end
122-
123- Base. @propagate_inbounds function _clenshaw_next! (n, A:: AbstractVector , :: Zeros , C:: AbstractVector , x:: AbstractMatrix , c, bn1:: AbstractMatrix{T} , bn2:: AbstractMatrix{T} ) where T
124- muladd! (A[n], x, bn1, - C[n+ 1 ], bn2)
125- view (bn2,band (0 )) .+ = c[n]
126- bn2
127- end
128-
129- Base. @propagate_inbounds function _clenshaw_next! (n, A:: AbstractVector , B:: AbstractVector , C:: AbstractVector , x:: AbstractMatrix , c, bn1:: AbstractMatrix{T} , bn2:: AbstractMatrix{T} ) where T
130- # bn2 .= B[n] .* bn1 .- C[n+1] .* bn2
131- lmul! (- C[n+ 1 ], bn2)
132- LinearAlgebra. axpy! (B[n], bn1, bn2)
133- muladd! (A[n], x, bn1, one (T), bn2)
134- view (bn2,band (0 )) .+ = c[n]
135- bn2
136- end
137-
138- # Operator * f Clenshaw
139- Base. @propagate_inbounds function _clenshaw_next! (n, A:: AbstractFill , :: Zeros , C:: Ones , X:: AbstractMatrix , c, f:: AbstractVector , bn1:: AbstractVector{T} , bn2:: AbstractVector{T} ) where T
140- muladd! (getindex_value (A), X, bn1, - one (T), bn2)
141- bn2 .+ = c[n] .* f
142- bn2
143- end
144-
145- Base. @propagate_inbounds function _clenshaw_next! (n, A, :: Zeros , C, X:: AbstractMatrix , c, f:: AbstractVector , bn1:: AbstractVector{T} , bn2:: AbstractVector{T} ) where T
146- muladd! (A[n], X, bn1, - C[n+ 1 ], bn2)
147- bn2 .+ = c[n] .* f
148- bn2
149- end
150-
151- Base. @propagate_inbounds function _clenshaw_next! (n, A, B, C, X:: AbstractMatrix , c, f:: AbstractVector , bn1:: AbstractVector{T} , bn2:: AbstractVector{T} ) where T
152- bn2 .= B[n] .* bn1 .- C[n+ 1 ] .* bn2 .+ c[n] .* f
153- muladd! (A[n], X, bn1, one (T), bn2)
154- bn2
155- end
156-
157- # allow special casing first arg, for ChebyshevT in ClassicalOrthogonalPolynomials
158- Base. @propagate_inbounds function _clenshaw_first! (A, :: Zeros , C, X, c, bn1, bn2)
159- muladd! (A[1 ], X, bn1, - C[2 ], bn2)
160- view (bn2,band (0 )) .+ = c[1 ]
161- bn2
162- end
163-
164- Base. @propagate_inbounds function _clenshaw_first! (A, B, C, X, c, bn1, bn2)
165- lmul! (- C[2 ], bn2)
166- LinearAlgebra. axpy! (B[1 ], bn1, bn2)
167- muladd! (A[1 ], X, bn1, one (eltype (bn2)), bn2)
168- view (bn2,band (0 )) .+ = c[1 ]
169- bn2
170- end
171-
172- Base. @propagate_inbounds function _clenshaw_first! (A, :: Zeros , C, X, c, f:: AbstractVector , bn1, bn2)
173- muladd! (A[1 ], X, bn1, - C[2 ], bn2)
174- bn2 .+ = c[1 ] .* f
175- bn2
176- end
177-
178- Base. @propagate_inbounds function _clenshaw_first! (A, B, C, X, c, f:: AbstractVector , bn1, bn2)
179- bn2 .= B[1 ] .* bn1 .- C[2 ] .* bn2 .+ c[1 ] .* f
180- muladd! (A[1 ], X, bn1, one (eltype (bn2)), bn2)
181- bn2
182- end
183-
184- _clenshaw_op (:: AbstractBandedLayout , Z, N) = BandedMatrix (Z, (N- 1 ,N- 1 ))
185-
186- function clenshaw (c:: AbstractVector , A:: AbstractVector , B:: AbstractVector , C:: AbstractVector , X:: AbstractMatrix )
187- N = length (c)
188- T = promote_type (eltype (c),eltype (A),eltype (B),eltype (C),eltype (X))
189- @boundscheck check_clenshaw_recurrences (N, A, B, C)
190- m = size (X,1 )
191- m == size (X,2 ) || throw (DimensionMismatch (" X must be square" ))
192- N == 0 && return zero (T)
193- bn2 = _clenshaw_op (MemoryLayout (X), Zeros {T} (m, m), N)
194- bn1 = _clenshaw_op (MemoryLayout (X), c[N]* Eye {T} (m), N)
195- _clenshaw_op! (c, A, B, C, X, bn1, bn2)
196- end
197-
198- function clenshaw (c:: AbstractVector , A:: AbstractVector , B:: AbstractVector , C:: AbstractVector , X:: AbstractMatrix , f:: AbstractVector )
199- N = length (c)
200- T = promote_type (eltype (c),eltype (A),eltype (B),eltype (C),eltype (X))
201- @boundscheck check_clenshaw_recurrences (N, A, B, C)
202- m = size (X,1 )
203- m == size (X,2 ) || throw (DimensionMismatch (" X must be square" ))
204- m == length (f) || throw (DimensionMismatch (" Dimensions must match" ))
205- N == 0 && return [zero (T)]
206- bn2 = zeros (T,m)
207- bn1 = Vector {T} (undef,m)
208- bn1 .= c[N] .* f
209- _clenshaw_op! (c, A, B, C, X, f, bn1, bn2)
210- end
211-
212- function _clenshaw_op! (c, A, B, C, X, bn1, bn2)
213- N = length (c)
214- N == 1 && return bn1
215- @inbounds begin
216- for n = N- 1 : - 1 : 2
217- bn1,bn2 = _clenshaw_next! (n, A, B, C, X, c, bn1, bn2),bn1
218- end
219- bn1 = _clenshaw_first! (A, B, C, X, c, bn1, bn2)
220- end
221- bn1
222- end
223-
224- function _clenshaw_op! (c, A, B, C, X, f:: AbstractVector , bn1, bn2)
225- N = length (c)
226- N == 1 && return bn1
227- @inbounds begin
228- for n = N- 1 : - 1 : 2
229- bn1,bn2 = _clenshaw_next! (n, A, B, C, X, c, f, bn1, bn2),bn1
230- end
231- bn1 = _clenshaw_first! (A, B, C, X, c, f, bn1, bn2)
232- end
233- bn1
234- end
235-
236-
237-
238- """
239- Clenshaw(a, X)
240-
241- represents the operator `a(X)` where a is a polynomial.
242- Here `a` is to stored as a quasi-vector.
243- """
244- struct Clenshaw{T, Coefs<: AbstractVector , AA<: AbstractVector , BB<: AbstractVector , CC<: AbstractVector , Jac<: AbstractMatrix } <: AbstractBandedMatrix{T}
245- c:: Coefs
246- A:: AA
247- B:: BB
248- C:: CC
249- X:: Jac
250- p0:: T
251- end
252-
253- Clenshaw (c:: AbstractVector{T} , A:: AbstractVector , B:: AbstractVector , C:: AbstractVector , X:: AbstractMatrix{T} , p0:: T ) where T =
254- Clenshaw {T,typeof(c),typeof(A),typeof(B),typeof(C),typeof(X)} (c, A, B, C, X, p0)
255-
256- Clenshaw (c:: Number , A, B, C, X, p) = Clenshaw ([c], A, B, C, X, p)
257-
258- function Clenshaw (a:: AbstractQuasiVector , X:: AbstractQuasiMatrix )
259- P,c = arguments (a)
260- Clenshaw (paddeddata (c), recurrencecoefficients (P)... , jacobimatrix (X), _p0 (P))
261- end
262-
263- copy (M:: Clenshaw ) = M
264- size (M:: Clenshaw ) = size (M. X)
265- axes (M:: Clenshaw ) = axes (M. X)
266- bandwidths (M:: Clenshaw ) = (length (M. c)- 1 ,length (M. c)- 1 )
267-
268- Base. array_summary (io:: IO , C:: Clenshaw{T} , inds:: Tuple{Vararg{OneToInf{Int}}} ) where T =
269- print (io, Base. dims2string (length .(inds)), " Clenshaw{$T } with $(length (C. c)) degree polynomial" )
270-
271- struct ClenshawLayout <: AbstractLazyBandedLayout end
272- MemoryLayout (:: Type{<:Clenshaw} ) = ClenshawLayout ()
273- sublayout (:: ClenshawLayout , :: Type{<:NTuple{2,AbstractUnitRange{Int}}} ) = ClenshawLayout ()
274- sublayout (:: ClenshawLayout , :: Type {<: Tuple{AbstractUnitRange{Int},Union{Slice,AbstractInfUnitRange{Int}}} }) = LazyBandedLayout ()
275- sublayout (:: ClenshawLayout , :: Type {<: Tuple{Union{Slice,AbstractInfUnitRange{Int}},AbstractUnitRange{Int}} }) = LazyBandedLayout ()
276- sublayout (:: ClenshawLayout , :: Type {<: Tuple{Union{Slice,AbstractInfUnitRange{Int}},Union{Slice,AbstractInfUnitRange{Int}}} }) = LazyBandedLayout ()
277- sub_materialize (:: ClenshawLayout , V) = BandedMatrix (V)
278-
279- function _BandedMatrix (:: ClenshawLayout , V:: SubArray{<:Any,2} )
280- M = parent (V)
281- kr,jr = parentindices (V)
282- b = bandwidth (M,1 )
283- jkr = max (1 ,min (first (jr),first (kr))- b÷ 2 ): max (last (jr),last (kr))+ b÷ 2
284- # relationship between jkr and kr, jr
285- kr2,jr2 = kr.- first (jkr).+ 1 ,jr.- first (jkr).+ 1
286- lmul! (M. p0, clenshaw (M. c, M. A, M. B, M. C, M. X[jkr, jkr])[kr2,jr2])
287- end
288-
289- function getindex (M:: Clenshaw{T} , kr:: AbstractUnitRange , j:: Integer ) where T
290- b = bandwidth (M,1 )
291- jkr = max (1 ,min (j,first (kr))- b÷ 2 ): max (j,last (kr))+ b÷ 2
292- # relationship between jkr and kr, jr
293- kr2,j2 = kr.- first (jkr).+ 1 ,j- first (jkr)+ 1
294- f = [Zeros {T} (j2- 1 ); one (T); Zeros {T} (length (jkr)- j2)]
295- lmul! (M. p0, clenshaw (M. c, M. A, M. B, M. C, M. X[jkr, jkr], f)[kr2])
296- end
297-
298- getindex (M:: Clenshaw , k:: Int , j:: Int ) = M[k: k,j][1 ]
299-
300- function getindex (S:: Symmetric{T,<:Clenshaw} , k:: Integer , jr:: AbstractUnitRange ) where T
301- m = max (jr. start,jr. stop,k)
302- return Symmetric (getindex (S. data,1 : m,1 : m),Symbol (S. uplo))[k,jr]
303- end
304- function getindex (S:: Symmetric{T,<:Clenshaw} , kr:: AbstractUnitRange , j:: Integer ) where T
305- m = max (kr. start,kr. stop,j)
306- return Symmetric (getindex (S. data,1 : m,1 : m),Symbol (S. uplo))[kr,j]
307- end
308- function getindex (S:: Symmetric{T,<:Clenshaw} , kr:: AbstractUnitRange , jr:: AbstractUnitRange ) where T
309- m = max (kr. start,jr. start,kr. stop,jr. stop)
310- return Symmetric (getindex (S. data,1 : m,1 : m),Symbol (S. uplo))[kr,jr]
311- end
312-
313- transposelayout (M:: ClenshawLayout ) = LazyBandedMatrices. LazyBandedLayout ()
314- # TODO : generalise for layout, use Base.PermutedDimsArray
315- Base. permutedims (M:: Clenshaw{<:Number} ) = transpose (M)
316-
317-
318- function materialize! (M:: MatMulVecAdd{<:ClenshawLayout,<:AbstractPaddedLayout,<:AbstractPaddedLayout} )
319- α,A,x,β,y = M. α,M. A,M. B,M. β,M. C
320- length (y) == size (A,1 ) || throw (DimensionMismatch (" Dimensions must match" ))
321- length (x) == size (A,2 ) || throw (DimensionMismatch (" Dimensions must match" ))
322- x̃ = paddeddata (x);
323- m = length (x̃)
324- b = bandwidth (A,1 )
325- jkr= 1 : m+ b
326- p = [x̃; zeros (eltype (x̃),length (jkr)- m)];
327- Ax = lmul! (A. p0, clenshaw (A. c, A. A, A. B, A. C, A. X[jkr, jkr], p))
328- _fill_lmul! (β,y)
329- resizedata! (y, last (jkr))
330- v = view (paddeddata (y),jkr)
331- LinearAlgebra. axpy! (α, Ax, v)
332- y
333- end
334102
335103# TODO : generalise this to be trait based
336104function layout_broadcasted (:: Tuple{ExpansionLayout{<:AbstractOPLayout},AbstractOPLayout} , :: typeof (* ), a, P)
@@ -357,9 +125,8 @@ function _broadcasted_layout_broadcasted_mul(::Tuple{AbstractWeightLayout,Polyno
357125 a .* P
358126end
359127
360-
361- # #
362- # Banded dot is slow
363- # ##
364-
365- LinearAlgebra. dot (x:: AbstractVector , A:: Clenshaw , y:: AbstractVector ) = dot (x, mul (A, y))
128+ # constructor for Clenshaw
129+ function Clenshaw (a:: AbstractQuasiVector , X:: AbstractQuasiMatrix )
130+ P,c = arguments (a)
131+ Clenshaw (paddeddata (c), recurrencecoefficients (P)... , jacobimatrix (X), _p0 (P))
132+ end
0 commit comments