Skip to content

Commit 03fc893

Browse files
committed
Remove old unused loading path
1 parent a4bb4de commit 03fc893

File tree

1 file changed

+1
-62
lines changed

1 file changed

+1
-62
lines changed

src/auxil.jl

Lines changed: 1 addition & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -144,27 +144,7 @@ function stackindices(times, timediff=200000)
144144
return groups
145145
end
146146

147-
#=
148-
function DiskArrays.readblock!(b::GDALBand, aout, r::AbstractUnitRange...)
149-
if !isa(aout,Matrix)
150-
aout2 = similar(aout)
151-
AG.read(b.filename) do ds
152-
AG.getband(ds, b.band) do bh
153-
DiskArrays.readblock!(bh, aout2, r...)
154-
end
155-
end
156-
aout .= aout2
157-
else
158-
AG.read(b.filename) do ds
159-
AG.getband(ds, b.band) do bh
160-
DiskArrays.readblock!(bh, aout, r...)
161-
end
162-
end
163-
end
164-
end
165-
=#
166-
167-
function gdalcube(filenames::AbstractVector{<:AbstractString}, stackgroups=:dae)
147+
function gdalcube(filenames::AbstractVector{<:AbstractString}, stackgroups=:lazyagg)
168148
dates = getdate.(filenames)
169149
@show length(dates)
170150
# Sort the dates and files by DateTime
@@ -206,47 +186,6 @@ function gdalcube(filenames::AbstractVector{<:AbstractString}, stackgroups=:dae)
206186
taxis = DD.Ti(dates_grouped)
207187
gcube = Cube(sfiles[1])
208188
return YAXArray((DD.dims(gcube)[1:2]..., taxis), aggdata, gcube.properties,)
209-
else
210-
#datasets = AG.readraster.(sfiles)
211-
taxis = DD.Ti(sdates)
212-
213-
onefile = first(sfiles)
214-
gd = backendlist[:gdal]
215-
yax1 = gd(onefile)
216-
onecube = Cube(onefile)
217-
#@show onecube.axes
218-
gdb = get_var_handle(yax1, "Gray")
219-
220-
#@assert gdb isa GDALBand
221-
all_gdbs = map(sfiles) do f
222-
BufferGDALBand{eltype(gdb)}(f, gdb.band, gdb.size, gdb.attrs, gdb.cs, Dict{Int,AG.IRasterBand}())
223-
end
224-
stacked_gdbs = diskstack(all_gdbs)
225-
attrs = copy(gdb.attrs)
226-
#attrs["add_offset"] = Float16(attrs["add_offset"])
227-
if haskey(attrs, "scale_factor")
228-
attrs["scale_factor"] = Float16(attrs["scale_factor"])
229-
end
230-
all_cfs = CFDiskArray(stacked_gdbs, attrs)
231-
return YAXArray((onecube.axes..., taxis), all_cfs, onecube.properties)
232-
end
233-
#datasetgroups = [datasets[group] for group in groupinds]
234-
#We have to save the vrts because the usage of nested vrts is not working as a rasterdataset
235-
#temp = tempdir()
236-
#outpaths = [joinpath(temp, splitext(basename(sfiles[group][1]))[1] * ".vrt") for group in groupinds]
237-
#vrt_grouped = AG.unsafe_gdalbuildvrt.(datasetgroups)
238-
#AG.write.(vrt_grouped, outpaths)
239-
#vrt_grouped = AG.read.(outpaths)
240-
#vrt_vv = AG.unsafe_gdalbuildvrt(vrt_grouped, ["-separate"])
241-
#rvrt_vv = AG.RasterDataset(vrt_vv)
242-
#yaxras = YAXArray.(sfiles)
243-
#cube = concatenatecubes(yaxras, taxis)
244-
#bandnames = AG.GDAL.gdalgetfilelist(vrt_vv.ptr)
245-
246-
247-
248-
# Set the timesteps from the bandnames as time axis
249-
#dates_grouped = [sdates[group[begin]] for group in groupinds]
250189
end
251190

252191
function skipmissingmean(x)

0 commit comments

Comments
 (0)