@@ -34,7 +34,16 @@ function DiskArrays.readblock!(a::LazyAggDiskArray, aout, i::UnitRange{Int}...)
3434 for (j, it) in enumerate (itime)
3535 arrays_now = a. arrays[a. inds[it]]
3636 for ia in eachindex (arrays_now)
37- DiskArrays. readblock! (arrays_now[ia], view (buf, :, :, ia), i1, i2)
37+ try
38+ DiskArrays. readblock! (arrays_now[ia], view (buf, :, :, ia), i1, i2)
39+ catch e
40+ if hasproperty (e, :captured ) && e. captured. ex isa ArchGDAL. GDAL. GDALError
41+ @warn e. captured. ex. msg
42+ buf[:,:,ia] .= missing
43+ else
44+ rethrow (e)
45+ end
46+ end
3847 end
3948 vbuf = view (buf, :, :, 1 : length (arrays_now))
4049 map! (a. f, view (aout, :, :, j), eachslice (vbuf, dims= (1 , 2 )))
@@ -135,27 +144,7 @@ function stackindices(times, timediff=200000)
135144 return groups
136145end
137146
138- #=
139- function DiskArrays.readblock!(b::GDALBand, aout, r::AbstractUnitRange...)
140- if !isa(aout,Matrix)
141- aout2 = similar(aout)
142- AG.read(b.filename) do ds
143- AG.getband(ds, b.band) do bh
144- DiskArrays.readblock!(bh, aout2, r...)
145- end
146- end
147- aout .= aout2
148- else
149- AG.read(b.filename) do ds
150- AG.getband(ds, b.band) do bh
151- DiskArrays.readblock!(bh, aout, r...)
152- end
153- end
154- end
155- end
156- =#
157-
158- function gdalcube (filenames:: AbstractVector{<:AbstractString} , stackgroups= :dae )
147+ function gdalcube (filenames:: AbstractVector{<:AbstractString} , stackgroups= :lazyagg )
159148 dates = getdate .(filenames)
160149 @show length (dates)
161150 # Sort the dates and files by DateTime
@@ -165,7 +154,6 @@ function gdalcube(filenames::AbstractVector{<:AbstractString}, stackgroups=:dae)
165154
166155 # @show sdates
167156 # Put the dates which are 200 seconds apart into groups
168- if stackgroups in [:dae , :lazyagg ]
169157 groupinds = grouptimes (sdates, 200000 )
170158 onefile = first (sfiles)
171159 gd = backendlist[:gdal ]
@@ -184,60 +172,32 @@ function gdalcube(filenames::AbstractVector{<:AbstractString}, stackgroups=:dae)
184172
185173 cubelist = CFDiskArray .(group_gdbs, (gdbattrs,))
186174 stackinds = stackindices (sdates)
187- aggdata = if stackgroups == :dae
188- gcube = diskstack (cubelist)
189- aggdata = DAE. aggregate_diskarray (gcube, mean ∘ skipmissing, (3 => stackinds,); strategy= :direct )
190- else
191- println (" Construct lazy diskarray" )
192- LazyAggDiskArray (mean ∘ skipmissing, cubelist, stackinds)
193- end
175+ println (" Construct lazy diskarray" )
176+ aggdata = LazyAggDiskArray (skipmissingmean, cubelist, stackinds)
194177 # data = DiskArrays.ConcatDiskArray(reshape(groupcubes, (1,1,length(groupcubes))))
195178 dates_grouped = [sdates[group[begin ]] for group in groupinds]
196179
197180 taxis = DD. Ti (dates_grouped)
198181 gcube = Cube (sfiles[1 ])
199182 return YAXArray ((DD. dims (gcube)[1 : 2 ]. .. , taxis), aggdata, gcube. properties,)
200- else
201- # datasets = AG.readraster.(sfiles)
202- taxis = DD. Ti (sdates)
203-
204- onefile = first (sfiles)
205- gd = backendlist[:gdal ]
206- yax1 = gd (onefile)
207- onecube = Cube (onefile)
208- # @show onecube.axes
209- gdb = get_var_handle (yax1, " Gray" )
183+ end
210184
211- # @assert gdb isa GDALBand
212- all_gdbs = map (sfiles) do f
213- BufferGDALBand {eltype(gdb)} (f, gdb. band, gdb. size, gdb. attrs, gdb. cs, Dict {Int,AG.IRasterBand} ())
214- end
215- stacked_gdbs = diskstack (all_gdbs)
216- attrs = copy (gdb. attrs)
217- # attrs["add_offset"] = Float16(attrs["add_offset"])
218- if haskey (attrs, " scale_factor" )
219- attrs[" scale_factor" ] = Float16 (attrs[" scale_factor" ])
220- end
221- all_cfs = CFDiskArray (stacked_gdbs, attrs)
222- return YAXArray ((onecube. axes... , taxis), all_cfs, onecube. properties)
185+ function skipmissingmean (x)
186+ isempty (x) && return missing
187+ s,n = reduce (x,init= (zero (eltype (x)),0 )) do (s,n), ix
188+ ismissing (ix) ? (s,n) : (s+ ix,n+ 1 )
223189 end
224- # datasetgroups = [datasets[group] for group in groupinds]
225- # We have to save the vrts because the usage of nested vrts is not working as a rasterdataset
226- # temp = tempdir()
227- # outpaths = [joinpath(temp, splitext(basename(sfiles[group][1]))[1] * ".vrt") for group in groupinds]
228- # vrt_grouped = AG.unsafe_gdalbuildvrt.(datasetgroups)
229- # AG.write.(vrt_grouped, outpaths)
230- # vrt_grouped = AG.read.(outpaths)
231- # vrt_vv = AG.unsafe_gdalbuildvrt(vrt_grouped, ["-separate"])
232- # rvrt_vv = AG.RasterDataset(vrt_vv)
233- # yaxras = YAXArray.(sfiles)
234- # cube = concatenatecubes(yaxras, taxis)
235- # bandnames = AG.GDAL.gdalgetfilelist(vrt_vv.ptr)
236-
237-
238-
239- # Set the timesteps from the bandnames as time axis
240- # dates_grouped = [sdates[group[begin]] for group in groupinds]
190+ n== 0 ? missing : s/ n
191+ end
192+
193+ # I don't dare to make this type piracy.
194+ # Base.∘(::typeof(mean), ::typeof(skipmissing)) = skipmissingmean
195+
196+ @testitem " skipmissingmean" begin
197+ @test ismissing (RQADeforestation. skipmissingmean (Float32[]))
198+ @test ismissing (RQADeforestation. skipmissingmean (Union{Float32, Missing}[missing , missing ]))
199+ @test RQADeforestation. skipmissingmean ([1 ,0 ,missing ]) == 0.5
200+ @test RQADeforestation. skipmissingmean ([1 ,2 ,3 ]) == 2.0
241201end
242202
243203
0 commit comments