diff --git a/Project.toml b/Project.toml index e77b0c10..b0097870 100644 --- a/Project.toml +++ b/Project.toml @@ -64,7 +64,7 @@ GeoInterface = "1" HDF5 = "0.14, 0.15, 0.16" Makie = "0.19, 0.20" Missings = "0.4, 1" -NCDatasets = "0.13" +NCDatasets = "0.13, 0.14" OffsetArrays = "1" ProgressMeter = "1" RasterDataSources = "0.5.7" diff --git a/ext/RastersArchGDALExt/gdal_source.jl b/ext/RastersArchGDALExt/gdal_source.jl index 8f7d5b83..03a8589a 100644 --- a/ext/RastersArchGDALExt/gdal_source.jl +++ b/ext/RastersArchGDALExt/gdal_source.jl @@ -42,10 +42,11 @@ function RA.FileArray{GDALsource}(raster::AG.RasterDataset{T}, filename; kw...) end RA.cleanreturn(A::AG.RasterDataset) = Array(A) -RA.haslayers(::Type{GDALsource}) = false +RA.haslayers(::GDALsource) = false +RA._sourcetrait(A::AG.RasterDataset) = GDALsource() """ - Base.write(filename::AbstractString, ::Type{GDALsource}, A::AbstractRaster; force=false, kw...) + Base.write(filename::AbstractString, ::GDALsource, A::AbstractRaster; force=false, kw...) Write a `Raster` to file using GDAL. @@ -58,7 +59,7 @@ Write a `Raster` to file using GDAL. Returns `filename`. """ function Base.write( - filename::AbstractString, ::Type{GDALsource}, A::AbstractRaster{T}; + filename::AbstractString, ::GDALsource, A::AbstractRaster{T}; force=false, verbose=true, kw... ) where T RA.check_can_write(filename, force) @@ -72,7 +73,7 @@ function Base.write( return filename end -function RA.create(filename, ::Type{GDALsource}, T::Type, dims::DD.DimTuple; +function RA.create(filename, ::GDALsource, T::Type, dims::DD.DimTuple; missingval=nothing, metadata=nothing, name=nothing, lazy=true, verbose=true, kw... ) T = Missings.nonmissingtype(T) @@ -82,14 +83,14 @@ function RA.create(filename, ::Type{GDALsource}, T::Type, dims::DD.DimTuple; nothing end - return Raster(filename; source=GDALsource, name, lazy, dropband=!hasdim(dims, Band)) + return Raster(filename; source=GDALsource(), name, lazy, dropband=!hasdim(dims, Band)) end function _maybe_warn_south_up(A, verbose, msg) verbose && lookup(A, Y) isa AbstractSampled && order(A, Y) isa ForwardOrdered && @warn msg end -function RA._open(f, ::Type{GDALsource}, filename::AbstractString; write=false, kw...) +function RA._open(f, ::GDALsource, filename::AbstractString; write=false, kw...) # Check the file actually exists because the GDAL error is unhelpful if !isfile(filename) # Allow gdal virtual file systems @@ -113,7 +114,7 @@ function RA._open(f, ::Type{GDALsource}, filename::AbstractString; write=false, AG.readraster(RA.cleanreturn ∘ f, filename) end end -RA._open(f, ::Type{GDALsource}, ds::AG.RasterDataset; kw...) = RA.cleanreturn(f(ds)) +RA._open(f, ::GDALsource, ds::AG.RasterDataset; kw...) = RA.cleanreturn(f(ds)) # DimensionalData methods for ArchGDAL types ############################### @@ -121,7 +122,7 @@ RA._open(f, ::Type{GDALsource}, ds::AG.RasterDataset; kw...) = RA.cleanreturn(f( # These methods are type piracy on DimensionalData/ArchGDAL and may have to move some day # We allow passing in crs and mappedcrs manually -function DD.dims(raster::AG.RasterDataset, crs=nothing, mappedcrs=nothing) +function RA._dims(raster::AG.RasterDataset, crs=nothing, mappedcrs=nothing) gt_dims = try AG.getgeotransform(raster) catch @@ -205,7 +206,8 @@ function DD.dims(raster::AG.RasterDataset, crs=nothing, mappedcrs=nothing) end end -function DD.metadata(raster::AG.RasterDataset, args...) +# TODO make metadata optional, its slow to get +function RA._metadata(raster::AG.RasterDataset, args...) band = AG.getband(raster.ds, 1) # color = AG.getname(AG.getcolorinterp(band)) scale = AG.getscale(band) @@ -213,7 +215,7 @@ function DD.metadata(raster::AG.RasterDataset, args...) # norvw = AG.noverview(band) units = AG.getunittype(band) filelist = AG.filelist(raster) - metadata = RA._metadatadict(GDALsource, "scale"=>scale, "offset"=>offset) + metadata = RA._metadatadict(GDALsource(), "scale"=>scale, "offset"=>offset) if units == "" metadata["units"] = units end @@ -230,11 +232,11 @@ RA.Raster(ds::AG.Dataset; kw...) = Raster(AG.RasterDataset(ds); kw...) function RA.Raster(ds::AG.RasterDataset; crs=crs(ds), mappedcrs=nothing, - dims=dims(ds, crs, mappedcrs), + dims=RA._dims(ds, crs, mappedcrs), refdims=(), name=Symbol(""), - metadata=metadata(ds), - missingval=missingval(ds), + metadata=RA._metadata(ds), + missingval=RA.missingval(ds), lazy=false, dropband=false ) @@ -322,7 +324,7 @@ _missingval_from_gdal(T, x) = x _maybe_correct_to_write(A) = _maybe_correct_to_write(lookup(A, X()), A) _maybe_correct_to_write(lookup, A) = A function _maybe_correct_to_write(lookup::Union{AbstractSampled,NoLookup}, A) - RA._maybe_use_type_missingval(A, GDALsource) |> _maybe_permute_to_gdal + RA._maybe_use_type_missingval(A, GDALsource()) |> _maybe_permute_to_gdal end _check_driver(filename::Nothing, driver) = "MEM" diff --git a/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl b/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl index b0dac999..25df0317 100644 --- a/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl +++ b/ext/RastersGRIBDatasetsExt/gribdatasets_source.jl @@ -7,12 +7,15 @@ end # In GRIBDatasets, the file is open for reading the values and closed afterwards. Base.close(os::RA.OpenStack{GRIBsource}) = nothing -function RA._open(f, ::Type{GRIBsource}, filename::AbstractString; write=false, kw...) +function RA._open(f, ::GRIBsource, filename::AbstractString; write=false, kw...) isfile(filename) || _filenotfound_error(filename) ds = GRIBDatasets.GRIBDataset(filename) - RA._open(f, GRIBsource, ds; kw...) + RA._open(f, GRIBsource(), ds; kw...) end # Hack to get the inner DiskArrays chunks as they are not exposed at the top level RA._get_eachchunk(var::GDS.Variable) = DiskArrays.eachchunk(var.values) RA._get_haschunks(var::GDS.Variable) = DiskArrays.haschunks(var.values) + +RA._sourcetrait(::GDS.Variable) = GRIBsource() +RA._sourcetrait(::GDS.GRIBDataset) = GRIBsource() diff --git a/ext/RastersHDF5Ext/smap_source.jl b/ext/RastersHDF5Ext/smap_source.jl index ba8b1697..e025d6b0 100644 --- a/ext/RastersHDF5Ext/smap_source.jl +++ b/ext/RastersHDF5Ext/smap_source.jl @@ -2,7 +2,7 @@ const SMAPMISSING = -9999.0f0 const SMAPGEODATA = "Geophysical_Data" const SMAPCRS = ProjString("+proj=cea +lon_0=0 +lat_ts=30 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs +ellps=WGS84 +towgs84=0,0,0") const SMAPSIZE = (3856, 1624) -const SMAPDIMTYPES = (X, Y) +const SMAPDIMTYPES = (X(), Y()) # Dataset wrapper ############################################################### # Becauase SMAP is just one of many HDF5 formats, @@ -16,7 +16,7 @@ RA.missingval(ds::SMAPhdf5) = SMAPMISSING RA.layerkeys(ds::SMAPhdf5) = keys(ds) RA.filekey(ds::SMAPhdf5, key::Nothing) = first(keys(ds)) -function DD.dims(wrapper::SMAPhdf5) +function _dims(wrapper::SMAPhdf5, args...) dataset = parent(wrapper) proj = read(HDF5.attributes(HDF5.root(dataset)["EASE2_global_projection"]), "grid_mapping_name") if proj == "lambert_cylindrical_equal_area" @@ -50,17 +50,18 @@ function DD.dims(wrapper::SMAPhdf5) end # TODO actually add metadata to the dict -DD.metadata(wrapper::SMAPhdf5) = RA._metadatadict(SMAPsource) +_metadata(wrapper::SMAPhdf5) = RA._metadatadict(SMAPsource()) -function DD.layerdims(ds::SMAPhdf5) - keys = RA.cleankeys(RA.layerkeys(ds)) +function _layerdims(ds::SMAPhdf5; layers=nothing) + keys = map(Symbol, isnothing(layers) ? RA.cleankeys(RA.layerkeys(ds)) : layers.keys) # All dims are the same - NamedTuple{keys}(map(_ -> SMAPDIMTYPES, keys)) + NamedTuple{Tuple(keys)}(map(_ -> SMAPDIMTYPES, keys)) end -function DD.layermetadata(ds::SMAPhdf5) - keys = RA.cleankeys(RA.layerkeys(ds)) - NamedTuple{keys}(map(_ -> DD.metadata(ds), keys)) +function _layermetadata(ds::SMAPhdf5; layers=nothing) + keys = map(Symbol, isnothing(layers) ? RA.cleankeys(RA.layerkeys(ds)) : layers.keys) + md = _metadata(ds) + NamedTuple{keys}(map(_ -> md, keys)) end Base.keys(ds::SMAPhdf5) = RA.cleankeys(keys(parent(ds)[SMAPGEODATA])) @@ -205,5 +206,5 @@ end _smap_timedim(t::DateTime) = _smap_timedim(t:Hour(3):t) function _smap_timedim(times::AbstractVector) - Ti(Sampled(times, ForwardOrdered(), Regular(Hour(3)), Intervals(Start()), RA._metadatadict(SMAPsource))) + Ti(Sampled(times, ForwardOrdered(), Regular(Hour(3)), Intervals(Start()), RA._metadatadict(SMAPsource()))) end diff --git a/ext/RastersNCDatasetsExt/ncdatasets_source.jl b/ext/RastersNCDatasetsExt/ncdatasets_source.jl index 73034af1..6a8dccce 100644 --- a/ext/RastersNCDatasetsExt/ncdatasets_source.jl +++ b/ext/RastersNCDatasetsExt/ncdatasets_source.jl @@ -5,13 +5,13 @@ const UNNAMED_NCD_FILE_KEY = "unnamed" const NCDAllowedType = Union{Int8,UInt8,Int16,UInt16,Int32,UInt32,Int64,UInt64,Float32,Float64,Char,String} """ - Base.write(filename::AbstractString, ::Type{<:CDMsource}, A::AbstractRaster) + Base.write(filename::AbstractString, ::NCDsource, A::AbstractRaster) Write an NCDarray to a NetCDF file using NCDatasets.jl Returns `filename`. """ -function Base.write(filename::AbstractString, ::Type{<:CDMsource}, A::AbstractRaster; +function Base.write(filename::AbstractString, ::NCDsource, A::AbstractRaster; append=false, force=false, verbose=true, kw... ) mode = if append @@ -33,7 +33,7 @@ end # Stack ######################################################################## """ - Base.write(filename::AbstractString, ::Type{NCDsource}, s::AbstractRasterStack; kw...) + Base.write(filename::AbstractString, ::NCDsource, s::AbstractRasterStack; kw...) Write an NCDstack to a single netcdf file, using NCDatasets.jl. @@ -61,7 +61,7 @@ Keywords are passed to `NCDatasets.defVar`. - `typename` (string): The name of the NetCDF type required for vlen arrays (https://web.archive.org/save/https://www.unidata.ucar.edu/software/netcdf/netcdf-4/newdocs/netcdf-c/nc_005fdef_005fvlen.html) """ -function Base.write(filename::AbstractString, ::Type{<:CDMsource}, s::AbstractRasterStack; append = false, kw...) +function Base.write(filename::AbstractString, ::NCDsource, s::AbstractRasterStack; append = false, kw...) mode = !isfile(filename) || !append ? "c" : "a"; ds = NCD.Dataset(filename, mode; attrib=RA._attribdict(metadata(s))) try @@ -77,11 +77,11 @@ function RA.OpenStack(fs::RA.FileStack{NCDsource,K}) where K end Base.close(os::RA.OpenStack{NCDsource}) = NCD.close(RA.dataset(os)) -function RA._open(f, ::Type{NCDsource}, filename::AbstractString; write=false, kw...) +function RA._open(f, ::NCDsource, filename::AbstractString; write=false, kw...) isfile(filename) || RA._isurl(filename) || RA._filenotfound_error(filename) mode = write ? "a" : "r" NCD.Dataset(filename, mode) do ds - RA._open(f, NCDsource, ds; kw...) + RA._open(f, NCDsource(), ds; kw...) end end @@ -151,6 +151,9 @@ end RA._get_eachchunk(var::NCD.Variable) = DiskArrays.eachchunk(var) RA._get_haschunks(var::NCD.Variable) = DiskArrays.haschunks(var) +RA._sourcetrait(::NCD.Dataset) = NCDsource() +RA._sourcetrait(::NCD.Variable) = NCDsource() + # precompilation # const _NCDVar = NCDatasets.CFVariable{Union{Missing, Float32}, 3, NCDatasets.Variable{Float32, 3, NCDatasets.NCDataset}, NCDatasets.Attributes{NCDatasets.NCDataset{Nothing}}, NamedTuple{(:fillvalue, :scale_factor, :add_offset, :calendar, :time_origin, :time_factor), Tuple{Float32, Nothing, Nothing, Nothing, Nothing, Nothing}}} diff --git a/src/Rasters.jl b/src/Rasters.jl index 8dc9f885..daa7548a 100644 --- a/src/Rasters.jl +++ b/src/Rasters.jl @@ -91,6 +91,7 @@ const DEFAULT_TABLE_DIM_KEYS = (:X, :Y) include("lookup.jl") include("dimensions.jl") +include("sources/sources.jl") include("filearray.jl") include("filestack.jl") include("openstack.jl") @@ -102,7 +103,6 @@ include("crs.jl") const RasterStackOrArray = Union{AbstractRasterStack,AbstractRaster} const RasterSeriesOrStack = Union{AbstractRasterSeries,AbstractRasterStack} -include("sources/sources.jl") include("utils.jl") include("skipmissing.jl") diff --git a/src/array.jl b/src/array.jl index 2c6c5081..30ce9337 100644 --- a/src/array.jl +++ b/src/array.jl @@ -4,7 +4,7 @@ const FLATTEN_IGNORE = Union{Dict,Set,Base.MultiplicativeInverses.SignedMultipli """ AbstractRaster <: DimensionalData.AbstractDimArray -Abstract supertype for objects that wrap an array (or location of an array) +Abstract supertype for objects that wrap an array (or location of an array) and metadata about its contents. It may be memory or hold a `FileArray`, which holds the filename, and is only opened when required. @@ -46,7 +46,7 @@ isdisk(A::AbstractRaster) = parent(A) isa DiskArrays.AbstractDiskArray isdisk(x) = false ismem(x) = !isdisk(x) -function Base.:(==)(A::AbstractRaster{T,N}, B::AbstractRaster{T,N}) where {T,N} +function Base.:(==)(A::AbstractRaster{T,N}, B::AbstractRaster{T,N}) where {T,N} size(A) == size(B) && all(A .== B) end for f in (:mappedbounds, :projectedbounds, :mappedindex, :projectedindex) @@ -95,7 +95,7 @@ DiskArrays.haschunks(A::AbstractRaster) = DiskArrays.haschunks(parent(A)) function DA.readblock!(A::AbstractRaster, dst, r::AbstractUnitRange...) DA.readblock!(parent(A), dst, r...) end -function DA.writeblock!(A::AbstractRaster, src, r::AbstractUnitRange...) +function DA.writeblock!(A::AbstractRaster, src, r::AbstractUnitRange...) DA.writeblock!(parent(A), src, r...) end @@ -161,7 +161,7 @@ function _open_many(f, A::AbstractRaster, fas::Tuple, oas::Tuple; kw...) end end function _open_many(f, A::AbstractRaster, fas::Tuple{}, oas::Tuple; kw...) - data = Flatten.reconstruct(parent(A), oas, FLATTEN_SELECT, FLATTEN_IGNORE) + data = Flatten.reconstruct(parent(A), oas, FLATTEN_SELECT, FLATTEN_IGNORE) openraster = Raster(data, dims(A), refdims(A), name(A), metadata(A), missingval(A)) f(openraster) end @@ -178,7 +178,7 @@ end A generic [`AbstractRaster`](@ref) for spatial/raster array data. It may hold memory-backed arrays or [`FileArray`](@ref), that simply holds the `String` path to an unopened file. This will only be opened lazily when it is indexed with `getindex` -or when `read(A)` is called. Broadcasting, taking a view, reversing and most other +or when `read(A)` is called. Broadcasting, taking a view, reversing and most other methods _do not_ load data from disk: they are applied later, lazily. # Keywords @@ -192,14 +192,14 @@ methods _do not_ load data from disk: they are applied later, lazily. values in the raster, it simply assigns which value is treated as missing. To replace all of the missing values in the raster, use [`replace_missing`](@ref). - `metadata`: `ArrayMetadata` object for the array, or `NoMetadata()`. -- `crs`: the coordinate reference system of the objects `XDim`/`YDim` dimensions. +- `crs`: the coordinate reference system of the objects `XDim`/`YDim` dimensions. Only set this if you know the detected crs is incrorrect, or it is not present in - the file. The `crs` is expected to be a GeoFormatTypes.jl `CRS` or `Mixed` `GeoFormat` type. + the file. The `crs` is expected to be a GeoFormatTypes.jl `CRS` or `Mixed` `GeoFormat` type. - `mappedcrs`: the mapped coordinate reference system of the objects `XDim`/`YDim` dimensions. for `Mapped` lookups these are the actual values of the index. For `Projected` lookups this can be used to index in eg. `EPSG(4326)` lat/lon values, having it converted automatically. Only set this if the detected `mappedcrs` in incorrect, or the file does not have a `mappedcrs`, - e.g. a tiff. The `mappedcrs` is expected to be a GeoFormatTypes.jl `CRS` or `Mixed` `GeoFormat` type. + e.g. a tiff. The `mappedcrs` is expected to be a GeoFormatTypes.jl `CRS` or `Mixed` `GeoFormat` type. - `dropband`: drop single band dimensions. `true` by default. # Internal Keywords @@ -217,70 +217,71 @@ struct Raster{T,N,D<:Tuple,R<:Tuple,A<:AbstractArray{T,N},Na,Me,Mi} <: AbstractR metadata::Me missingval::Mi end -function Raster(A::AbstractArray, dims::Tuple; +function Raster(A::AbstractArray{T,N}, dims::Tuple; refdims=(), name=Symbol(""), metadata=NoMetadata(), missingval=missing, crs=nothing, mappedcrs=nothing -) +)::Raster{T,N} where {T,N} A = Raster(A, Dimensions.format(dims, A), refdims, name, metadata, missingval) A = isnothing(crs) ? A : setcrs(A, crs) A = isnothing(mappedcrs) ? A : setmappedcrs(A, mappedcrs) return A end -function Raster(A::AbstractArray{<:Any,1}, dims::Tuple{<:Dimension,<:Dimension,Vararg}; kw...) +function Raster(A::AbstractArray{T,1}, dims::Tuple{<:Dimension,<:Dimension,Vararg}; + kw... +)::Raster{T,length(dims)} where T Raster(reshape(A, map(length, dims)), dims; kw...) end -function Raster(table, dims::Tuple; name=first(_not_a_dimcol(table, dims)), kw...) +function Raster(table, dims::Tuple; name=first(_not_a_dimcol(table, dims)), kw...)::Raster Tables.istable(table) || throw(ArgumentError("First argument to `Raster` is not a table or other known object: $table")) isnothing(name) && throw(UndefKeywordError(:name)) cols = Tables.columns(table) A = reshape(cols[name], map(length, dims)) return Raster(A, dims; name, kw...) end -Raster(A::AbstractArray; dims, kw...) = Raster(A, dims; kw...) +Raster(A::AbstractArray; dims, kw...) = Raster(A, dims; kw...)::Raster function Raster(A::AbstractDimArray; data=parent(A), dims=dims(A), refdims=refdims(A), name=name(A), metadata=metadata(A), missingval=missingval(A), kw... -) +)::Raster return Raster(data, dims; refdims, name, metadata, missingval, kw...) end -function Raster(filename::AbstractString, dims::Tuple{<:Dimension,<:Dimension,Vararg}; kw...) +function Raster(filename::AbstractString, dims::Tuple{<:Dimension,<:Dimension,Vararg}; kw...)::Raster Raster(filename; dims, kw...) end function Raster(filename::AbstractString; name=nothing, key=name, source=nothing, kw... -) - source = isnothing(source) ? _sourcetype(filename) : _sourcetype(source) - _open(filename; source) do ds - key = filekey(ds, key) - Raster(ds, filename, key; source, kw...) - end +)::Raster + source = _sourcetrait(filename, source) + Base.invokelatest() do + _open(filename; source) do ds + key = filekey(ds, key) + Raster(ds, filename, key; source, kw...) + end::Raster + end::Raster end function Raster(ds, filename::AbstractString, key=nothing; crs=nothing, mappedcrs=nothing, dims=nothing, refdims=(), name=Symbol(key isa Nothing ? "" : string(key)), - metadata=metadata(ds), missingval=missingval(ds), source=nothing, write=false, lazy=false, dropband=true, -) - source = isnothing(source) ? _sourcetype(filename) : _sourcetype(source) + metadata=_metadata(ds), missingval=missingval(ds) +)::Raster + source = _sourcetrait(filename, source) crs = defaultcrs(source, crs) mappedcrs = defaultmappedcrs(source, mappedcrs) - dims = dims isa Nothing ? DD.dims(ds, crs, mappedcrs) : dims - data = if lazy - FileArray{source}(ds, filename; key, write) - else - _open(source, ds; key) do A - _checkmem(A) - Array(A) + data, dims = _open(source, ds; key) do var + dims1 = dims isa Nothing ? _dims(var, crs, mappedcrs) : dims + data = if lazy + FileArray{typeof(source)}(var, filename; key, write) + else + _checkmem(var) + Array(var) end + data, dims1 end raster = Raster(data, dims, refdims, name, metadata, missingval) return dropband ? _drop_single_band(raster, lazy) : raster end -function Raster{T, N, D, R, A, Na, Me, Mi}(ras::Raster{T, N, D, R, A, Na, Me, Mi}) where {T, N, D, R, A, Na, Me, Mi} - return Raster(ras.data, ras.dims, ras.refdims, ras.name, ras.metadata, ras.missingval) -end - filekey(ds, key) = key filekey(filename::String) = Symbol(splitext(basename(filename))[1]) diff --git a/src/create.jl b/src/create.jl index 9d290bb3..75e69e52 100644 --- a/src/create.jl +++ b/src/create.jl @@ -7,7 +7,7 @@ function create(filename, T, A::AbstractRaster; create(filename, T, dims(A); parent=parent(A), name, metadata, missingval, kw...) end function create(filename::AbstractString, T::Type, dims::Tuple; - lazy=true, parent=nothing, suffix=nothing, source=_sourcetype(filename), + lazy=true, parent=nothing, suffix=nothing, source::Source=_sourcetrait(filename), missingval=nothing, kw... ) filename = _maybe_add_suffix(filename, suffix) diff --git a/src/filearray.jl b/src/filearray.jl index df65f14d..9960b543 100644 --- a/src/filearray.jl +++ b/src/filearray.jl @@ -1,12 +1,12 @@ """ - FileArray{X} <: DiskArrays.AbstractDiskArray + FileArray{S} <: DiskArrays.AbstractDiskArray Filearray is a DiskArrays.jl `AbstractDiskArray`. Instead of holding an open object, it just holds a filename string that is opened lazily when it needs to be read. """ -struct FileArray{X,T,N,K,EC,HC} <: DiskArrays.AbstractDiskArray{T,N} +struct FileArray{S,T,N,K,EC,HC} <: DiskArrays.AbstractDiskArray{T,N} filename::String size::NTuple{N,Int} key::K @@ -14,20 +14,20 @@ struct FileArray{X,T,N,K,EC,HC} <: DiskArrays.AbstractDiskArray{T,N} haschunks::HC write::Bool end -function FileArray{X,T,N}( +function FileArray{S,T,N}( filename, size, key::K, eachchunk::EC=size, haschunks::HC=DA.Unchunked(), write=false -) where {X,T,N,K,EC,HC} - FileArray{X,T,N,K,EC,HC}(filename, size, key, eachchunk, haschunks, write) +) where {S,T,N,K,EC,HC} + FileArray{S,T,N,K,EC,HC}(filename, size, key, eachchunk, haschunks, write) end -function FileArray{X,T,N}(filename::String, size::Tuple; +function FileArray{S,T,N}(filename::String, size::Tuple; key=nothing, eachchunk=size, haschunks=DA.Unchunked(), write=false -) where {X,T,N} - FileArray{X,T,N}(filename, size, key, eachchunk, haschunks, write) +) where {S,T,N} + FileArray{S,T,N}(filename, size, key, eachchunk, haschunks, write) end -# FileArray has X, T and N parameters not recoverable from fields -ConstructionBase.constructorof(::Type{<:FileArray{X,T,N}}) where {X,T,N} = FileArray{X,T,N} +# FileArray has S, T and N parameters not recoverable from fields +ConstructionBase.constructorof(::Type{<:FileArray{S,T,N}}) where {S,T,N} = FileArray{S,T,N} filename(A::FileArray) = A.filename key(A::FileArray) = A.key @@ -36,8 +36,8 @@ DA.eachchunk(A::FileArray) = A.eachchunk DA.haschunks(A::FileArray) = A.haschunks # Run function `f` on the result of _open for the file type -function Base.open(f::Function, A::FileArray{X}; write=A.write, kw...) where X - _open(f, X, filename(A); key=key(A), write, kw...) +function Base.open(f::Function, A::FileArray{S}; write=A.write, kw...) where S + _open(f, S(), filename(A); key=key(A), write, kw...) end function DA.readblock!(A::FileArray, dst, r::AbstractUnitRange...) @@ -58,17 +58,17 @@ end A basic DiskArrays.jl wrapper for objects that don't have one defined yet. When we `open` a `FileArray` it is replaced with a `RasterDiskArray`. """ -struct RasterDiskArray{X,T,N,V,EC,HC} <: DiskArrays.AbstractDiskArray{T,N} +struct RasterDiskArray{S,T,N,V,EC,HC} <: DiskArrays.AbstractDiskArray{T,N} var::V eachchunk::EC haschunks::HC end -function RasterDiskArray{X}( +function RasterDiskArray{S}( var::V, eachchunk=DA.eachchunk(var), haschunks=DA.haschunks(var) -) where {X,V} +) where {S,V} T = eltype(var) N = ndims(var) - RasterDiskArray{X,T,N,V,typeof(eachchunk),typeof(haschunks)}(var, eachchunk, haschunks) + RasterDiskArray{S,T,N,V,typeof(eachchunk),typeof(haschunks)}(var, eachchunk, haschunks) end Base.parent(A::RasterDiskArray) = A.var diff --git a/src/filestack.jl b/src/filestack.jl index f827dc79..7262f694 100644 --- a/src/filestack.jl +++ b/src/filestack.jl @@ -1,37 +1,37 @@ """ - FileStack{X,K} + FileStack{S,K} - FileStack{X,K}(filename, types, sizes, eachchunk, haschunks, write) + FileStack{S,K}(filename, types, sizes, eachchunk, haschunks, write) A wrapper object that holds file pointer and size/chunking metadata for a multi-layered stack stored in a single file, typically netcdf or hdf5. -`X` is a backend type like `NCDsource`, and `K` is a tuple of `Symbol` keys. +`S` is a backend type like `NCDsource`, and `K` is a tuple of `Symbol` keys. """ -struct FileStack{X,K,F<:AbstractString,T,S,EC,HC} +struct FileStack{S,K,F<:AbstractString,T,SZ,EC,HC} filename::F types::T - sizes::S + sizes::SZ eachchunk::EC haschunks::HC write::Bool end -function FileStack{X,K}( - filename::F, types::T, sizes::S, eachchunk::EC, haschunks::HC, write::Bool - ) where {X,K,F,T,S,EC,HC} - FileStack{X,K,F,T,S,EC,HC}(filename, types, sizes, eachchunk, haschunks, write) +function FileStack{S,K}( + filename::F, types::T, sizes::SZ, eachchunk::EC, haschunks::HC, write::Bool + ) where {S,K,F,T,SZ,EC,HC} + FileStack{S,K,F,T,SZ,EC,HC}(filename, types, sizes, eachchunk, haschunks, write) end -# FileStack has `X` parameter that is not recoverable from fields. -ConstructionBase.constructorof(::Type{<:FileStack{X,K}}) where {X,K} = FileStack{X,K} +# FileStack has `S` parameter that is not recoverable from fields. +ConstructionBase.constructorof(::Type{<:FileStack{S,K}}) where {S,K} = FileStack{S,K} filename(fs::FileStack) = fs.filename Base.keys(fs::FileStack{<:Any,K}) where K = K Base.values(fs::FileStack{<:Any}) = (fs[k] for k in keys(fs)) # Indexing FileStack returns a FileArray, # referencing a specific key in the same file. -function Base.getindex(fs::FileStack{X,K}, key::Symbol) where {X,K} +function Base.getindex(fs::FileStack{S,K}, key::Symbol) where {S,K} is = NamedTuple{K}(ntuple(identity, length(K))) i = is[key] size = fs.sizes[i] @@ -39,5 +39,5 @@ function Base.getindex(fs::FileStack{X,K}, key::Symbol) where {X,K} haschunks = fs.haschunks[i] T = fs.types[i] N = length(size) - A = FileArray{X,T,N}(filename(fs), size, key, eachchunk, haschunks, fs.write) + A = FileArray{S,T,N}(filename(fs), size, key, eachchunk, haschunks, fs.write) end diff --git a/src/read.jl b/src/read.jl index 7c4020f7..27c38038 100644 --- a/src/read.jl +++ b/src/read.jl @@ -9,6 +9,22 @@ function Base.read(x::Union{AbstractRaster,AbstractRasterStack,AbstractRasterSer _checkmem(x) modify(Array, x) end +function Base.read(st::AbstractRasterStack{<:FileStack{<:Any,K}}) where K + layers = open(st) do ost + map(K) do k + Array(parent(ost)[k]) + end + end |> NamedTuple{K} + return rebuild(st; data=layers) +end +function Base.read(st::AbstractRasterStack{<:NamedTuple{K}}) where K + layers = open(st) do ost + map(K) do k + Array(parent(ost)[k]) + end + end |> NamedTuple{K} + return rebuild(st; data=layers) +end """ read!(src::Union{AbstractString,AbstractRaster}, dst::AbstractRaster) diff --git a/src/sources/commondatamodel.jl b/src/sources/commondatamodel.jl index 84f168e6..5b57ff8a 100644 --- a/src/sources/commondatamodel.jl +++ b/src/sources/commondatamodel.jl @@ -46,9 +46,8 @@ cleanreturn(A::CFDiskArray) = Array(A) missingval(A::CFDiskArray) = missingval(parent(A)) # DimensionalData methods -DD.dims(var::CFDiskArray, args...) = DD.dims(parent(var), args...) -DD.layerdims(var::CFDiskArray, args...) = DD.layerdims(parent(var), args...) -DD.metadata(var::CFDiskArray, args...) = DD.metadata(parent(var), args...) +_dims(var::CFDiskArray, args...) = _dims(parent(var), args...) +_metadata(var::CFDiskArray, args...) = _metadata(parent(var), args...) # Base methods Base.parent(A::CFDiskArray) = A.var @@ -69,11 +68,14 @@ end DiskArrays.eachchunk(var::CFDiskArray) = _get_eachchunk(var) DiskArrays.haschunks(var::CFDiskArray) = _get_haschunks(var) -_get_eachchunk(var::CFDiskArray) = _get_eachchunk(var.var) +_get_eachchunk(var::CFDiskArray) = _get_eachchunk(parent(var)) _get_eachchunk(var::CDM.CFVariable) = _get_eachchunk(var.var) -_get_haschunks(var::CFDiskArray) = _get_haschunks(var.var) +_get_haschunks(var::CFDiskArray) = _get_haschunks(parent(var)) _get_haschunks(var::CDM.CFVariable) = _get_haschunks(var.var) +_sourcetrait(var::CFDiskArray) = _sourcetrait(parent(var)) +_sourcetrait(var::CDM.CFVariable) = _sourcetrait(var.var) + # CommonDataModel.jl methods for method in (:size, :name, :dimnames, :dataset, :attribnames) @eval begin @@ -90,23 +92,24 @@ end # Rasters methods for CDM types ############################### # This is usually called inside a closure and cleaned up in `cleanreturn` -function Raster(ds::AbstractDataset, filename::AbstractString, key=nothing; +function Raster(ds::AbstractDataset, filename::AbstractString, key::Nothing=nothing; source=nothing, kw... ) - @show kw - source = isnothing(source) ? _sourcetype(filename) : _sourcetype(source) - if isnothing(key) - # Find the first valid variable - for key in layerkeys(ds) - if ndims(ds[key]) > 0 - @info "No `name` or `key` keyword provided, using first valid layer with name `:$key`" - return Raster(CFDiskArray(ds[key]), filename, key; source, kw...) - end + source = isnothing(source) ? _sourcetrait(filename) : _sourcetrait(source) + # Find the first valid variable + layers = _layers(ds) + for (key, var) in zip(layers.keys, layers.vars) + if ndims(var) > 0 + @info "No `name` or `key` keyword provided, using first valid layer with name `:$key`" + return Raster(CFDiskArray(var), filename, key; source, kw...) end - throw(ArgumentError("dataset at $filename has no array variables")) - else - return Raster(CFDiskArray(ds[key]), filename, key; source, kw...) end + throw(ArgumentError("dataset at $filename has no array variables")) +end +function Raster(ds::AbstractDataset, filename::AbstractString, key::Union{AbstractString,Symbol}; + source=nothing, kw... +) + return Raster(CFDiskArray(ds[key]), filename, key; source) end function FileArray{source}(var::AbstractVariable, filename::AbstractString; kw...) where source<:CDMsource @@ -118,36 +121,37 @@ function FileArray{source}(var::AbstractVariable, filename::AbstractString; kw.. end function FileStack{source}( - ds::AbstractDataset, filename::AbstractString; write=false, keys -) where source<:CDMsource - keys = map(Symbol, keys isa Nothing ? layerkeys(ds) : keys) |> Tuple - type_size_ec_hc = map(keys) do key - var = ds[string(key)] - Union{Missing,eltype(var)}, size(var), _get_eachchunk(var), _get_haschunks(var) - end - layertypes = map(x->x[1], type_size_ec_hc) - layersizes = map(x->x[2], type_size_ec_hc) - eachchunk = map(x->x[3], type_size_ec_hc) - haschunks = map(x->x[4], type_size_ec_hc) + ds::AbstractDataset, filename::AbstractString; + write::Bool=false, keys::NTuple{N,Symbol}, vars +) where {source<:CDMsource,N} + layertypes = map(var -> Union{Missing,eltype(var)}, vars) + layersizes = map(size, vars) + eachchunk = map(_get_eachchunk, vars) + haschunks = map(_get_haschunks, vars) return FileStack{source,keys}(filename, layertypes, layersizes, eachchunk, haschunks, write) end function Base.open(f::Function, A::FileArray{source}; write=A.write, kw...) where source<:CDMsource - _open(source, filename(A); key=key(A), write, kw...) do var + _open(source(), filename(A); key=key(A), write, kw...) do var f(var) end end -function _open(f, ::Type{<:CDMsource}, ds::AbstractDataset; key=nothing, kw...) +function _open(f, ::CDMsource, ds::AbstractDataset; key=nothing, kw...) x = key isa Nothing ? ds : CFDiskArray(ds[_firstkey(ds, key)]) cleanreturn(f(x)) end -_open(f, ::Type{<:CDMsource}, var::CFDiskArray; kw...) = cleanreturn(f(var)) -# _open(f, ::Type{<:CDMsource}, var::CDM.CFVariable; kw...) = cleanreturn(f(CFDiskArray(var))) - -function create(filename, source::Type{<:CDMsource}, T::Union{Type,Tuple}, dims::DimTuple; - name=:layer1, keys=(name,), layerdims=map(_->dims, keys), missingval=nothing, - metadata=NoMetadata(), lazy=true, +_open(f, ::CDMsource, var::CFDiskArray; kw...) = cleanreturn(f(var)) +# _open(f, ::CDMsource, var::CDM.CFVariable; kw...) = cleanreturn(f(CFDiskArray(var))) + +# TODO fix/test this for RasterStack +function create(filename, source::CDMsource, T::Union{Type,Tuple}, dims::DimTuple; + name=:layer1, + keys=(name,), + layerdims=map(_ -> dims, keys), + missingval=nothing, + metadata=NoMetadata(), + lazy=true, ) types = T isa Tuple ? T : Ref(T) missingval = T isa Tuple ? missingval : Ref(missingval) @@ -163,19 +167,20 @@ end missingval(var::AbstractDataset) = missing missingval(var::AbstractVariable{T}) where T = missing isa T ? missing : nothing cleanreturn(A::AbstractVariable) = Array(A) -haslayers(::Type{<:CDMsource}) = true -defaultcrs(::Type{<:CDMsource}) = EPSG(4326) -defaultmappedcrs(::Type{<:CDMsource}) = EPSG(4326) +haslayers(::CDMsource) = true +defaultcrs(::CDMsource) = EPSG(4326) +defaultmappedcrs(::CDMsource) = EPSG(4326) -function layerkeys(ds::AbstractDataset) +function _layers(ds::AbstractDataset, ::Nothing=nothing) dimkeys = CDM.dimnames(ds) toremove = if "bnds" in dimkeys dimkeys = setdiff(dimkeys, ("bnds",)) boundskeys = String[] for k in dimkeys var = ds[k] - if haskey(CDM.attribs(var), "bounds") - push!(boundskeys, CDM.attribs(var)["bounds"]) + attr = CDM.attribs(var) + if haskey(attr, "bounds") + push!(boundskeys, attr["bounds"]) end end union(dimkeys, boundskeys)::Vector{String} @@ -184,60 +189,71 @@ function layerkeys(ds::AbstractDataset) end nondim = setdiff(keys(ds), toremove) grid_mapping = String[] - for k in nondim - var = ds[k] - if haskey(CDM.attribs(var), "grid_mapping") - push!(grid_mapping, CDM.attribs(var)["grid_mapping"]) + vars = map(k -> ds[k], nondim) + attrs = map(CDM.attribs, vars) + for attr in attrs + if haskey(attr, "grid_mapping") + push!(grid_mapping, attr["grid_mapping"]) end end - nondim = setdiff(nondim, grid_mapping) + bitinds = map(!in(grid_mapping), nondim) + (; + keys=nondim[bitinds], + vars=vars[bitinds], + attrs=attrs[bitinds], + ) end - -# DimensionalData methods for CDM types ############################### - -function DD.dims(var::AbstractVariable, crs=nothing, mappedcrs=nothing) - map(CDM.dimnames(var)) do key - _cdmdim(CDM.dataset(var), key, crs, mappedcrs) - end |> Tuple +function _layers(ds::AbstractDataset, keys) + vars = map(k -> ds[k], keys) + attrs = map(CDM.attribs, vars) + (; keys, vars, attrs) end -function DD.layerdims(var::AbstractVariable) - map(CDM.dimnames(var)) do dimname - _cdmdim(CDM.dataset(var), dimname) - end |> Tuple + +function _dims(var::AbstractVariable{<:Any,N}, crs=nothing, mappedcrs=nothing) where N + dimnames = CDM.dimnames(var) + ntuple(Val(N)) do i + _cdmdim(CDM.dataset(var), dimnames[i], crs, mappedcrs) + end end -DD.metadata(var::AbstractVariable) = _metadatadict(CDMsource, CDM.attribs(var)) +_metadata(var::AbstractVariable; attr=CDM.attribs(var)) = + _metadatadict(_sourcetrait(var), attr) -function DD.dims(ds::AbstractDataset, crs=nothing, mappedcrs=nothing) - @show CDM.dimnames(ds) - map(CDM.dimnames(ds)) do key - _cdmdim(ds, key, crs, mappedcrs) +function _dimdict(ds::AbstractDataset, crs=nothing, mappedcrs=nothing) + dimdict = Dict{String,Dimension}() + for dimname in CDM.dimnames(ds) + dimdict[dimname] = _cdmdim(ds, dimname, crs, mappedcrs) + end + return dimdict +end +function _dims(ds::AbstractDataset, dimdict::Dict) + map(CDM.dimnames(ds)) do dimname + dimdict[dimname] end |> Tuple end -DD.metadata(ds::AbstractDataset) = _metadatadict(CDMsource, CDM.attribs(ds)) -function DD.layerdims(ds::AbstractDataset) - keys = Tuple(layerkeys(ds)) - dimtypes = map(keys) do key - DD.layerdims(ds[string(key)]) +_metadata(ds::AbstractDataset; attr=CDM.attribs(ds)) = + _metadatadict(_sourcetrait(ds), attr) +function _layerdims(ds::AbstractDataset; layers, dimdict) + map(layers.vars) do var + map(CDM.dimnames(var)) do dimname + basedims(dimdict[dimname]) + end |> Tuple end - NamedTuple{map(Symbol, keys)}(dimtypes) -end -function DD.layermetadata(ds::AbstractDataset) - keys = Tuple(layerkeys(ds)) - dimtypes = map(keys) do k - var = ds[k] - md = DD.metadata(var) - if haskey(CDM.attribs(var), "grid_mapping") - md["grid_mapping"] = Dict(CDM.attribs(ds[CDM.attribs(var)["grid_mapping"]])) +end +function _layermetadata(ds::AbstractDataset; layers) + map(layers.attrs) do attr + md = _metadatadict(_sourcetrait(ds), attr) + if haskey(attr, "grid_mapping") + md["grid_mapping"] = Dict(CDM.attribs(ds[attr["grid_mapping"]])) end md end - NamedTuple{map(Symbol, keys)}(dimtypes) end # Utils ######################################################################## -_firstkey(ds::AbstractDataset, key::Nothing=nothing) = Symbol(first(layerkeys(ds))) +# TODO dont load all keys here with _layers +_firstkey(ds::AbstractDataset, key::Nothing=nothing) = Symbol(first(_layers(ds).keys)) _firstkey(ds::AbstractDataset, key) = Symbol(key) function _cdmdim(ds, dimname::Key, crs=nothing, mappedcrs=nothing) @@ -271,20 +287,20 @@ end # Find the matching dimension constructor. If its an unknown name # use the generic Dim with the dim name as type parameter function _cdmdimtype(attrib, dimname) - if haskey(attrib, "axis") - k = attrib["axis"] - if haskey(CDM_AXIS_MAP, k) - return CDM_AXIS_MAP[k] + if haskey(attrib, "axis") + k = attrib["axis"] + if haskey(CDM_AXIS_MAP, k) + return CDM_AXIS_MAP[k] end end if haskey(attrib, "standard_name") k = attrib["standard_name"] - if haskey(CDM_STANDARD_NAME_MAP, k) + if haskey(CDM_STANDARD_NAME_MAP, k) return CDM_STANDARD_NAME_MAP[k] end end - if haskey(CDM_DIM_MAP, dimname) - return CDM_DIM_MAP[dimname] + if haskey(CDM_DIM_MAP, dimname) + return CDM_DIM_MAP[dimname] end return DD.basetypeof(DD.key2dim(Symbol(dimname))) end @@ -292,20 +308,22 @@ end # _cdmlookup # Generate a `Lookup` from a nCDM dim. function _cdmlookup(ds::AbstractDataset, dimname, D::Type, crs, mappedcrs) - dvar = ds[dimname] - index = dvar[:] - metadata = _metadatadict(CDMsource, CDM.attribs(dvar)) - return _cdmlookup(ds, dimname, D, index, metadata, crs, mappedcrs) + var = ds[dimname] + index = var[:] + attr = CDM.attribs(var) + metadata = _metadatadict(_sourcetrait(ds), attr) + return _cdmlookup(ds, var, attr, dimname, D, index, metadata, crs, mappedcrs) end # For unknown types we just make a Categorical lookup -function _cdmlookup(ds::AbstractDataset, dimname, D::Type, index::AbstractArray, metadata, crs, mappedcrs) +function _cdmlookup(ds::AbstractDataset, var, attr, dimname, D::Type, index::AbstractArray, metadata, crs, mappedcrs) Categorical(index; order=Unordered(), metadata=metadata) end # For Number and AbstractTime we generate order/span/sampling # We need to include `Missing` in unions in case `_FillValue` is used # on coordinate variables in a file and propagates here. function _cdmlookup( - ds::AbstractDataset, dimname, D::Type, index::AbstractArray{<:Union{Missing,Number,Dates.AbstractTime}}, + ds::AbstractDataset, var, attr, dimname, + D::Type, index::AbstractArray{<:Union{Missing,Number,Dates.AbstractTime}}, metadata, crs, mappedcrs ) # Assume the locus is at the center of the cell if boundaries aren't provided. @@ -391,10 +409,15 @@ function _parse_period(period_str::String) if mtch === nothing return nothing else - vals = Tuple(parse.(Int, mtch.captures)) - periods = (Year, Month, Day, Hour, Minute, Second) - if length(vals) == length(periods) - compound = sum(map((p, v) -> p(v), periods, vals)) + vals = map(x -> parse(Int, x), mtch.captures) + if length(vals) == 6 + y = Year(vals[1]) + m = Month(vals[2]) + d = Day(vals[3]) + h = Hour(vals[4]) + m = Minute(vals[5]) + s = Second(vals[6]) + compound = sum(y, m, d, h, m, s) if length(compound.periods) == 1 return compound.periods[1] else @@ -413,7 +436,7 @@ _attribdict(md) = Dict{String,Any}() # We need to get better at guaranteeing if X/Y is actually measured in `longitude/latitude` # CF standards requires that we specify "units" if we use these standard names _cdm_set_axis_attrib!(atr, dim::X) = atr["axis"] = "X" # at["standard_name"] = "longitude"; -_cdm_set_axis_attrib!(atr, dim::Y) = atr["axis"] = "Y" # at["standard_name"] = "latitude"; +_cdm_set_axis_attrib!(atr, dim::Y) = atr["axis"] = "Y" # at["standard_name"] = "latitude"; _cdm_set_axis_attrib!(atr, dim::Z) = (atr["axis"] = "Z"; atr["standard_name"] = "depth") _cdm_set_axis_attrib!(atr, dim::Ti) = (atr["axis"] = "T"; atr["standard_name"] = "time") _cdm_set_axis_attrib!(atr, dim) = nothing diff --git a/src/sources/grd.jl b/src/sources/grd.jl index d9b0d0cf..f68a52ac 100644 --- a/src/sources/grd.jl +++ b/src/sources/grd.jl @@ -40,7 +40,7 @@ attrib(grd::GRDattrib) = grd.attrib filename(grd::GRDattrib) = grd.filename filekey(grd::GRDattrib, key::Nothing) = get(attrib(grd), "layername", Symbol("")) -function DD.dims(grd::GRDattrib, crs=nothing, mappedcrs=nothing) +function _dims(grd::GRDattrib, crs=nothing, mappedcrs=nothing) attrib = grd.attrib crs = crs isa Nothing ? ProjString(attrib["projection"]) : crs @@ -54,7 +54,7 @@ function DD.dims(grd::GRDattrib, crs=nothing, mappedcrs=nothing) yspan = (ybounds[1] - ybounds[2]) / ysize # Not fully implemented yet - xy_metadata = _metadatadict(GRDsource) + xy_metadata = _metadatadict(GRDsource()) xindex = LinRange(xbounds[1], xbounds[2] - xspan, xsize) yindex = LinRange(ybounds[2] + yspan, ybounds[1], ysize) @@ -84,10 +84,10 @@ function DD.dims(grd::GRDattrib, crs=nothing, mappedcrs=nothing) return x, y, band end -DD.name(grd::GRDattrib) = Symbol(get(grd.attrib, "layername", "")) +_name(grd::GRDattrib) = Symbol(get(grd.attrib, "layername", "")) -function DD.metadata(grd::GRDattrib, args...) - metadata = _metadatadict(GRDsource) +function _metadata(grd::GRDattrib, args...) + metadata = _metadatadict(GRDsource()) for key in ("creator", "created", "history") val = get(grd.attrib, key, "") if val != "" @@ -149,11 +149,11 @@ The extension of `filename` will be ignored. Returns `filename`. """ -function Base.write(filename::String, ::Type{GRDsource}, A::AbstractRaster; +function Base.write(filename::String, ::GRDsource, A::AbstractRaster; force=false, verbose=true, kw... ) check_can_write(filename, force) - A = _maybe_use_type_missingval(A, GRDsource) + A = _maybe_use_type_missingval(A, GRDsource()) if hasdim(A, Band) correctedA = permutedims(A, (X, Y, Band)) |> a -> reorder(a, (X(GRD_X_ORDER), Y(GRD_Y_ORDER), Band(GRD_BAND_ORDER))) @@ -222,7 +222,7 @@ function _write_grd(filename, T, dims, missingval, minvalue, maxvalue, name) end -function create(filename, ::Type{GRDsource}, T::Type, dims::DD.DimTuple; +function create(filename, ::GRDsource, T::Type, dims::DD.DimTuple; name="layer", metadata=nothing, missingval=nothing, keys=(name,), lazy=true, ) # Remove extension @@ -237,7 +237,7 @@ function create(filename, ::Type{GRDsource}, T::Type, dims::DD.DimTuple; open(basename * ".gri", write=true) do IO write(IO, FillArrays.Zeros(sze)) end - return Raster(filename; source=GRDsource, lazy) + return Raster(filename; source=GRDsource(), lazy) end # AbstractRasterStack methods @@ -248,13 +248,13 @@ function Base.open(f::Function, A::FileArray{GRDsource}, key...; write=A.write) _mmapgrd(mm -> f(RasterDiskArray{GRDsource}(mm, A.eachchunk, A.haschunks)), A; write) end -function _open(f, ::Type{GRDsource}, filename::AbstractString; key=nothing, write=false) +function _open(f, ::GRDsource, filename::AbstractString; key=nothing, write=false) isfile(filename) || _filenotfound_error(filename) - _open(f, GRDsource, GRDattrib(filename; write)) + _open(f, GRDsource(), GRDattrib(filename; write)) end -_open(f, ::Type{GRDsource}, attrib::GRDattrib; kw...) = f(attrib) +_open(f, ::GRDsource, attrib::GRDattrib; kw...) = f(attrib) -haslayers(::Type{GRDsource}) = false +haslayers(::GRDsource) = false # Utils ######################################################################## diff --git a/src/sources/sources.jl b/src/sources/sources.jl index 07505a8f..ea850095 100644 --- a/src/sources/sources.jl +++ b/src/sources/sources.jl @@ -17,35 +17,35 @@ const GDALfile = GDALsource const SMAPfile = SMAPsource const SYMBOL2SOURCE = Dict( - :gdal => GDALsource, - :grd => GRDsource, - :netcdf => NCDsource, - :grib => GRIBsource, - :smap => SMAPsource, + :gdal => GDALsource(), + :grd => GRDsource(), + :netcdf => NCDsource(), + :grib => GRIBsource(), + :smap => SMAPsource(), ) const SOURCE2SYMBOL = Dict(map(reverse, collect(pairs(SYMBOL2SOURCE)))) # File extensions. GDAL is the catch-all for everything else const SOURCE2EXT = Dict( - GRDsource => (".grd", ".gri"), - NCDsource => (".nc",), - GRIBsource => (".grib",), - SMAPsource => (".h5",), + GRDsource() => (".grd", ".gri"), + NCDsource() => (".nc",), + GRIBsource() => (".grib",), + SMAPsource() => (".h5",), ) const SOURCE2PACAKGENAME = Dict( - GDALsource => "ArchGDAL", - NCDsource => "NCDatasets", - GRIBsource => "GRIBDatasets", - SMAPsource => "HDF5", + GDALsource() => "ArchGDAL", + NCDsource() => "NCDatasets", + GRIBsource() => "GRIBDatasets", + SMAPsource() => "HDF5", ) const EXT2SOURCE = Dict( - ".grd" => GRDsource, - ".gri" => GRDsource, - ".nc" => NCDsource, - ".grib" => GRIBsource, - ".h5" => SMAPsource + ".grd" => GRDsource(), + ".gri" => GRDsource(), + ".nc" => NCDsource(), + ".grib" => GRIBsource(), + ".h5" => SMAPsource(), ) # exception to be raised when backend extension is not satisfied @@ -59,13 +59,15 @@ function Base.showerror(io::IO, e::BackendException) end # Get the source backend for a file extension, falling back to GDALsource -_sourcetype(filename::AbstractString) = get(EXT2SOURCE, splitext(filename)[2], GDALsource) -_sourcetype(filenames::NamedTuple) = _sourcetype(first(filenames)) -_sourcetype(filename, ext::Nothing) = _sourcetype(filename) -_sourcetype(filename, ext) = get(EXT2SOURCE, ext, GDALsource) -_sourcetype(source::Source) = typeof(source) -_sourcetype(source::Type{<:Source}) = source -function _sourcetype(name::Symbol) +_sourcetrait(filename::AbstractString, s::Source) = s +_sourcetrait(filename::AbstractString, s) = _sourcetrait(s) +_sourcetrait(filename::AbstractString, ::Nothing) = _sourcetrait(filename) +_sourcetrait(filename::AbstractString) = get(EXT2SOURCE, splitext(filename)[2], GDALsource()) +_sourcetrait(filenames::NamedTuple) = _sourcetrait(first(filenames)) +_sourcetrait(filename, ext) = get(EXT2SOURCE, ext, GDALsource()) +_sourcetrait(source::Source) = source +_sourcetrait(source::Type{<:Source}) = source() +function _sourcetrait(name::Symbol) if haskey(SYMBOL2SOURCE, name) SYMBOL2SOURCE[name] else @@ -74,10 +76,10 @@ function _sourcetype(name::Symbol) end # Internal read method -function _open(f, filename::AbstractString; source=_sourcetype(filename), kw...) +function _open(f, filename::AbstractString; source=_sourcetrait(filename), kw...) _open(f, source, filename; kw...) end -function _open(f, T::Type, filename::AbstractString; kw...) - packagename = SOURCE2PACAKGENAME[T] +function _open(f, s::Source, filename::AbstractString; kw...) + packagename = SOURCE2PACAKGENAME[s] throw(BackendException(packagename)) end diff --git a/src/stack.jl b/src/stack.jl index 5ad7a2e7..c63c4bc0 100644 --- a/src/stack.jl +++ b/src/stack.jl @@ -169,27 +169,27 @@ function RasterStack( filenames::Union{AbstractArray{<:AbstractString},Tuple{<:AbstractString,Vararg}}; name=map(filekey, filenames), keys=name, kw... ) - RasterStack(NamedTuple{cleankeys(Tuple(keys))}(Tuple(filenames)); kw...) + RasterStack(NamedTuple{cleankeys(Tuple(keys))}(filenames); kw...) end function RasterStack(filenames::NamedTuple{K,<:Tuple{<:AbstractString,Vararg}}; crs=nothing, mappedcrs=nothing, source=nothing, lazy=false, dropband=true, kw... ) where K layers = map(keys(filenames), values(filenames)) do key, fn - source = source isa Nothing ? _sourcetype(fn) : _sourcetype(source) + source = _sourcetrait(fn, source) crs = defaultcrs(source, crs) mappedcrs = defaultmappedcrs(source, mappedcrs) _open(source, fn; key) do ds - dims = DD.dims(ds, crs, mappedcrs) + dims = _dims(ds, crs, mappedcrs) prod(map(length, dims)) data = if lazy - FileArray{source}(ds, fn; key) + FileArray{typeof(source)}(ds, fn; key) else _open(source, ds; key) do A _checkmem(A) Array(A) end end - md = metadata(ds) + md = _metadata(ds) mv = missingval(ds) raster = Raster(data, dims; name=key, metadata=md, missingval=mv) return dropband ? _drop_single_band(raster, lazy) : raster @@ -237,7 +237,7 @@ end function RasterStack(filename::AbstractString; source=nothing, name=nothing, keys=name, lazy=false, dropband=true, kw... ) - source = isnothing(source) ? _sourcetype(filename) : _sourcetype(source) + source = _sourcetrait(filename, source) st = if isdir(filename) # Load as a whole directory filenames = readdir(filename) @@ -253,9 +253,9 @@ function RasterStack(filename::AbstractString; RasterStack(joinpath.(Ref(filename), filenames); lazy, kw...) else # Load as a single file - st = if haslayers(source) + if haslayers(source) # With multiple named layers - l_st = _layer_stack(filename; source, name, keys, kw...) + l_st = _layer_stack(filename; source, name, keys, lazy, kw...) # Maybe split the stack into separate arrays to remove extra dims. if !(keys isa Nothing) @@ -269,41 +269,89 @@ function RasterStack(filename::AbstractString; end end - # Maybe read the lazy stack to memory - st1 = lazy ? st : read(st) - # Maybe drop the Band dimension - if dropband && hasdim(st1, Band()) && size(st1, Band()) == 1 + if dropband && hasdim(st, Band()) && size(st, Band()) == 1 if lazy - return view(st1, Band(1)) # TODO fix dropdims in DiskArrays + return view(st, Band(1)) # TODO fix dropdims in DiskArrays else - return dropdims(st1; dims=Band()) + return dropdims(st; dims=Band()) end else - return st1 + return st end end function _layer_stack(filename; dims=nothing, refdims=(), metadata=nothing, crs=nothing, mappedcrs=nothing, layerdims=nothing, layermetadata=nothing, missingval=nothing, - source=nothing, name=nothing, keys=name, resize=nothing, kw... + source=nothing, name=nothing, keys=name, resize=nothing, lazy=false, kw... ) crs = defaultcrs(source, crs) mappedcrs = defaultmappedcrs(source, mappedcrs) data, field_kw = _open(filename; source) do ds - dims = dims isa Nothing ? DD.dims(ds, crs, mappedcrs) : dims + layers = _layers(ds, keys) + # Create a Dict of dimkey => Dimension to use in `dim` and `layerdims` + dimdict = _dimdict(ds, crs, mappedcrs) refdims = refdims == () || refdims isa Nothing ? () : refdims - layerdims = layerdims isa Nothing ? DD.layerdims(ds) : layerdims - metadata = metadata isa Nothing ? DD.metadata(ds) : metadata - layermetadata = layermetadata isa Nothing ? DD.layermetadata(ds) : layermetadata + metadata = metadata isa Nothing ? _metadata(ds) : metadata + layerdims = layerdims isa Nothing ? _layerdims(ds; layers, dimdict) : layerdims + dims = _sort_by_layerdims(dims isa Nothing ? _dims(ds, dimdict) : dims, layerdims) + layermetadata = layermetadata isa Nothing ? _layermetadata(ds; layers) : layermetadata missingval = missingval isa Nothing ? Rasters.missingval(ds) : missingval - data = FileStack{source}(ds, filename; keys) - data, (; dims, refdims, layerdims, metadata, layermetadata, missingval) + tuplekeys = Tuple(map(Symbol, layers.keys)) + data = if lazy + FileStack{typeof(source)}(ds, filename; keys=tuplekeys, vars=Tuple(layers.vars)) + else + NamedTuple{tuplekeys}(map(Array, layers.vars)) + end + data, (; dims, refdims, layerdims=NamedTuple{tuplekeys}(layerdims), metadata, layermetadata=NamedTuple{tuplekeys}(layermetadata), missingval) end return RasterStack(data; field_kw..., kw...) end +# Try to sort the dimensions by layer dimension into a sensible +# order that applies without permutation, preferencing the layers +# with most dimensions, and those that come first. +# Intentionally not type-stable +function _sort_by_layerdims(dims, layerdims) + dimlist = union(layerdims) + currentorder = nothing + for i in length(dims):-1:1 + for ldims in dimlist + length(ldims) == i || continue + currentorder = _merge_dimorder(ldims, currentorder) + end + end + return DD.dims(dims, currentorder) +end + +_merge_dimorder(neworder, ::Nothing) = neworder +function _merge_dimorder(neworder, currentorder) + ods = otherdims(neworder, currentorder) + outorder = currentorder + for od in ods + # Get the dims position in current order + i = findfirst(d -> d == od, neworder) + found = false + # Find the next dimension that is in the outorder + for j in 1:length(ods) + if length(neworder) >= (i + j) + nextd = neworder[i + j] + if nextd in outorder + n = dimnum(outorder, nextd) + outorder = (outorder[1:n-1]..., od, outorder[n:end]...) + found = true + break + end + end + end + if !found + outorder = (outorder..., od) + end + end + return outorder +end + # Stack from a Raster function RasterStack(A::Raster; layersfrom=nothing, name=nothing, keys=name, metadata=metadata(A), refdims=refdims(A), kw... @@ -362,16 +410,21 @@ function RasterStack(s::AbstractDimStack; name=cleankeys(Base.keys(s)), keys=nam return set(st, dims...) end -function DD.modify(f, s::AbstractRasterStack{<:FileStack}) +function DD.modify(f, s::AbstractRasterStack{<:FileStack{<:Any,K}}) where K open(s) do o - map(a -> modify(f, a), o) + map(K) do k + Array(parent(ost)[k]) + end end end # Open a single file stack -function Base.open(f::Function, st::AbstractRasterStack{<:FileStack}; kw...) +function Base.open(f::Function, st::AbstractRasterStack{<:FileStack{<:Any,K}}; kw...) where K ost = OpenStack(parent(st)) - out = f(rebuild(st; data=ost)) + layers = map(K) do k + ost[k] + end |> NamedTuple{K} + out = f(rebuild(st; data=layers)) close(ost) return out end @@ -418,9 +471,9 @@ Base.convert(::Type{RasterStack}, src::AbstractDimStack) = RasterStack(src) Raster(stack::RasterStack) = cat(values(stack)...; dims=Band([keys(stack)...])) -defaultcrs(T::Type, crs) = crs -defaultcrs(T::Type, ::Nothing) = defaultcrs(T) -defaultcrs(T::Type) = nothing -defaultmappedcrs(T::Type, crs) = crs -defaultmappedcrs(T::Type, ::Nothing) = defaultmappedcrs(T) -defaultmappedcrs(T::Type) = nothing +defaultcrs(::Source, crs) = crs +defaultcrs(s::Source, ::Nothing) = defaultcrs(s) +defaultcrs(x) = nothing +defaultmappedcrs(::Source, crs) = crs +defaultmappedcrs(s::Source, ::Nothing) = defaultmappedcrs(s) +defaultmappedcrs(::Source) = nothing diff --git a/src/utils.jl b/src/utils.jl index 4c434ef4..0ba449b9 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -22,7 +22,7 @@ nolookup_to_sampled(dims::DimTuple) = map(nolookup_to_sampled, dims) nolookup_to_sampled(d::Dimension) = lookup(d) isa NoLookup ? set(d, Sampled(; sampling=Points())) : d -function _maybe_use_type_missingval(A::AbstractRaster{T}, source::Type{<:Source}) where T +function _maybe_use_type_missingval(A::AbstractRaster{T}, source::Source) where T if ismissing(missingval(A)) newmissingval = _type_missingval(Missings.nonmissingtype(T)) A1 = replace_missing(A, newmissingval) @@ -34,14 +34,15 @@ function _maybe_use_type_missingval(A::AbstractRaster{T}, source::Type{<:Source} end # Create a standardisted Metadata object of source T, containing a `Dict{String,Any}` -_metadatadict(T::Type, p1::Pair, pairs::Pair...) = _metadatadict(T, (p1, pairs...)) -_metadatadict(::Type{T}) where T = Metadata{T}(Dict{String,Any}()) -function _metadatadict(::Type{T}, pairs) where T +_metadatadict(s::Source, p1::Pair, pairs::Pair...) = + _metadatadict(s, (p1, pairs...)) +_metadatadict(::S) where S<:Source = Metadata{S}(Dict{String,Any}()) +function _metadatadict(::S, pairs) where S<:Source dict = Dict{String,Any}() for (k, v) in pairs dict[String(k)] = v end - return Metadata{T}(dict) + return Metadata{S}(dict) end # We often need to convert the locus and the lookup in the same step, diff --git a/src/write.jl b/src/write.jl index 78d6d275..a1041b85 100644 --- a/src/write.jl +++ b/src/write.jl @@ -6,13 +6,13 @@ Write an [`AbstractRaster`](@ref) to file, guessing the backend from the file ex Keyword arguments are passed to the `write` method for the backend. """ function Base.write( - filename::AbstractString, A::AbstractRaster; source=_sourcetype(filename), kw... + filename::AbstractString, A::AbstractRaster; source=_sourcetrait(filename), kw... ) write(filename, source, A; kw...) end Base.write(A::AbstractRaster; kw...) = write(filename(A), A; kw...) function Base.write( - filename::AbstractString, source::Type{<:Source}, A::Union{AbstractRaster,AbstractRasterStack}; kw... + filename::AbstractString, source::Source, A::Union{AbstractRaster,AbstractRasterStack}; kw... ) missing_package = SOURCE2PACAKGENAME[source] error("Missing package extension for $source. Run `using $missing_package` before useing `write` for this file.") @@ -32,7 +32,7 @@ Other keyword arguments are passed to the `write` method for the backend. If the source can't be saved as a stack-like object, individual array layers will be saved. """ function Base.write(path::AbstractString, s::AbstractRasterStack; - suffix=nothing, ext=nothing, source=_sourcetype(path, ext), verbose=true, kw... + suffix=nothing, ext=nothing, source=_sourcetrait(path, ext), verbose=true, kw... ) if haslayers(source) write(path, source, s; kw...) @@ -75,7 +75,7 @@ See other docs for `write`. All keywords are passed through to `Raster` and `Ras """ function Base.write( path::AbstractString, A::AbstractRasterSeries; - ext=nothing, source=_sourcetype(path, ext), kw... + ext=nothing, source=_sourcetrait(path, ext), kw... ) base, name_ext = splitext(path) ext = isnothing(ext) ? name_ext : ext diff --git a/test/sources/gdal.jl b/test/sources/gdal.jl index a79597d1..db069d54 100644 --- a/test/sources/gdal.jl +++ b/test/sources/gdal.jl @@ -10,7 +10,7 @@ gdalpath = maybedownload(url) @testset "Raster" begin @test_throws ArgumentError Raster("notafile.tif") - @time gdalarray = Raster(gdalpath; name=:test) + @time gdalarray = Raster(gdalpath; name=:test); @time lazyarray = Raster(gdalpath; lazy=true); @time eagerarray = Raster(gdalpath; lazy=false); diff --git a/test/sources/grd.jl b/test/sources/grd.jl index 6abc210e..f9dca821 100644 --- a/test/sources/grd.jl +++ b/test/sources/grd.jl @@ -201,7 +201,7 @@ grdpath = stem * ".gri" @testset "3d with subset" begin geoA = grdarray[1:100, 1:50, 1:2] filename = tempname() * ".grd" - write(filename, GRDsource, geoA) + write(filename, GRDsource(), geoA) saved = Raster(filename) @test size(saved) == size(geoA) @test refdims(saved) == () @@ -239,7 +239,7 @@ grdpath = stem * ".gri" @testset "to gdal" begin # No Band gdalfilename = tempname() * ".tif" - write(gdalfilename, GDALsource, grdarray[Band(1)]) + write(gdalfilename, GDALsource(), grdarray[Band(1)]) gdalarray = Raster(gdalfilename) # @test convert(ProjString, crs(gdalarray)) == convert(ProjString, EPSG(4326)) @test val(dims(gdalarray, X)) ≈ val(dims(grdarray, X)) diff --git a/test/sources/gribdatasets.jl b/test/sources/gribdatasets.jl index b7770feb..3779e68d 100644 --- a/test/sources/gribdatasets.jl +++ b/test/sources/gribdatasets.jl @@ -1,5 +1,5 @@ using Rasters, Test, GRIBDatasets -using Rasters: FileArray, CDMsource +using Rasters: FileArray, FileStack, GRIBsource using Rasters.Lookups, Rasters.Dimensions using Statistics using Dates @@ -27,15 +27,18 @@ era5 = joinpath(gribexamples_dir, "era5-levels-members.grib") @testset "Raster" begin @time gribarray = Raster(era5) - @time lazyarray = Raster(era5; lazy=true); + @time lazyarray = Raster(era5; lazy=true) @time lazystack = RasterStack(era5; lazy=true); - @time eagerstack = RasterStack(era5; lazy=false); + @time eagerstack = RasterStack(era5; lazy=false) @time ds = GRIBDataset(era5); @testset "lazyness" begin - @time Raster(era5); @test parent(gribarray) isa Array @test parent(lazyarray) isa FileArray + @test parent(eagerstack) isa NamedTuple + @test parent(lazystack) isa FileStack + @test parent(eagerstack[:t]) isa Array + @test parent(lazystack[:t]) isa FileArray end @testset "read" begin @@ -52,26 +55,26 @@ era5 = joinpath(gribexamples_dir, "era5-levels-members.grib") end @testset "stack, compare to GRIBDataset" begin - stack = RasterStack(era5; lazy = true) - ds = GRIBDataset(era5) - - diff = stack[:z][:,:,1,1,1] - ds["z"][:,:,1,1,1] - - @test all(diff .== 0.) + @test eagerstack[:z][Z=1, Ti=1, number=1] == + lazystack[:z][Z=1, Ti=1, number=1] == + ds["z"][:, :, 1, 1, 1] end - @testset "eager stack" begin - t = eagerstack[:t] - @test t[:,:,2,3,1] isa AbstractMatrix + @testset "stack properties" begin + @test size(eagerstack) == size(lazystack) == (120, 61, 2, 10, 4) + @test keys(eagerstack) == keys(lazystack) == (:z, :t) + @test dims(eagerstack) isa Tuple{<:X,<:Y,<:Z,<:Dim{:number},<:Ti} + @test dims(lazystack) isa Tuple{<:X,<:Y,<:Z,<:Dim{:number},<:Ti} + @test missingval(eagerstack) === missingval(lazystack) === missing end @testset "array properties" begin dsvar = ds["z"] @test size(gribarray) == size(dsvar) @test gribarray isa Raster - @test index(gribarray, Ti) == DateTime(2017, 1, 1):Hour(12):DateTime(2017, 1, 2, 12) - @test index(gribarray, Y) == 90.0:-3.0:-90.0 - @test index(gribarray, X) == 0.0:3.0:357.0 + @test lookup(gribarray, Ti) == DateTime(2017, 1, 1):Hour(12):DateTime(2017, 1, 2, 12) + @test lookup(gribarray, Y) == 90.0:-3.0:-90.0 + @test lookup(gribarray, X) == 0.0:3.0:357.0 end @testset "dimensions" begin @@ -94,7 +97,7 @@ era5 = joinpath(gribexamples_dir, "era5-levels-members.grib") @testset "other fields" begin @test ismissing(missingval(gribarray)) - @test metadata(gribarray) isa Metadata{CDMsource,Dict{String,Any}} + @test metadata(gribarray) isa Metadata{GRIBsource,Dict{String,Any}} end @testset "indexing" begin diff --git a/test/sources/ncdatasets.jl b/test/sources/ncdatasets.jl index 11474d5d..1196f857 100644 --- a/test/sources/ncdatasets.jl +++ b/test/sources/ncdatasets.jl @@ -8,6 +8,7 @@ include(joinpath(testdir, "test_utils.jl")) ncexamples = "https://www.unidata.ucar.edu/software/netcdf/examples/" ncsingle = maybedownload(joinpath(ncexamples, "tos_O1_2001-2002.nc")) ncmulti = maybedownload(joinpath(ncexamples, "test_echam_spectral.nc")) +maybedownload(joinpath(ncexamples, "test_echam_spectral.nc")) stackkeys = ( :abso4, :aclcac, :aclcov, :ahfcon, :ahfice, :ahfl, :ahfliac, :ahfllac, @@ -42,9 +43,10 @@ stackkeys = ( end @testset "Raster" begin - @time ncarray = Raster(ncsingle) - @time lazyarray = Raster(ncsingle; lazy=true); - @time eagerarray = Raster(ncsingle; lazy=false); + @time ncarray = Raster(ncsingle); + + @time lazyarray = Raster(ncsingle; lazy=true) + @time eagerarray = Raster(ncsingle; lazy=false) @test_throws ArgumentError Raster("notafile.nc") @testset "lazyness" begin @@ -67,7 +69,7 @@ end end @testset "read" begin - @time A = read(ncarray); + @time A = read(lazyarray); @test A isa Raster @test parent(A) isa Array A2 = copy(A) .= 0 @@ -357,12 +359,12 @@ end end @testset "Single file stack" begin - @time ncstack = RasterStack(ncmulti) + @time ncstack = RasterStack(ncmulti); @testset "lazyness" begin @time read(RasterStack(ncmulti)); @time lazystack = RasterStack(ncmulti; lazy=true) - @time eagerstack = RasterStack(ncmulti; lazy=false); + @time eagerstack = RasterStack(ncmulti; lazy=false) # Lazy is the default @test parent(ncstack[:xi]) isa Array @test parent(lazystack[:xi]) isa FileArray @@ -378,6 +380,11 @@ end rm(no_ext) end + @testset "size and dim order" begin + @test size(ncstack) == (192, 96, 2, 2080, 47, 8, 48) + @test dims(ncstack) isa Tuple{<:X,<:Y,<:Dim{:complex},<:Dim{:spc},<:Z,<:Ti,<:Dim{:ilev}} + end + @testset "crs" begin st = RasterStack(ncmulti; crs=EPSG(3857), mappedcrs=EPSG(3857)) @test crs(st) == EPSG(3857) @@ -398,7 +405,7 @@ end @testset "load ncstack" begin @test ncstack isa RasterStack - @test all(ismissing, missingval(ncstack)) + @test ismissing(missingval(ncstack)) @test dims(ncstack[:abso4]) == dims(ncstack, (X, Y, Ti)) @test refdims(ncstack) == () # Loads child as a regular Raster