diff --git a/src/JLD.jl b/src/JLD.jl index 1950e7e..d882b5d 100644 --- a/src/JLD.jl +++ b/src/JLD.jl @@ -138,8 +138,8 @@ function close(f::JldFile) isdefined(f, :gref) && close(f.gref) # Ensure that all other datasets, groups, and datatypes are closed (ref #176) - for obj_id in HDF5.h5f_get_obj_ids(f.plain.id, HDF5.H5F_OBJ_DATASET | HDF5.H5F_OBJ_GROUP | HDF5.H5F_OBJ_DATATYPE) - HDF5.h5o_close(obj_id) + for obj_id in HDF5.API.h5f_get_obj_ids(f.plain.id, HDF5.API.H5F_OBJ_DATASET | HDF5.API.H5F_OBJ_GROUP | HDF5.API.H5F_OBJ_DATATYPE) + HDF5.API.h5o_close(obj_id) end # Close file @@ -168,20 +168,20 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo error("File ", filename, " cannot be found") end version = version_current - pa = create_property(HDF5.H5P_FILE_ACCESS) - # HDF5.h5p_set_libver_bounds(pa, HDF5.H5F_LIBVER_18, HDF5.H5F_LIBVER_18) + fapl = HDF5.FileAccessProperties() # fapl + # HDF5.h5p_set_libver_bounds(fapl, HDF5.H5F_LIBVER_18, HDF5.H5F_LIBVER_18) try - pa[:fclose_degree] = HDF5.H5F_CLOSE_STRONG + fapl.fclose_degree = HDF5.API.H5F_CLOSE_STRONG if cr && (tr || !isfile(filename)) # We're truncating, so we don't have to check the format of an existing file # Set the user block to 512 bytes, to save room for the header - p = create_property(HDF5.H5P_FILE_CREATE) + fcpl = HDF5.FileCreateProperties() # fcpl local f try - p[:userblock] = 512 - f = HDF5.h5f_create(filename, HDF5.H5F_ACC_TRUNC, p.id, pa.id) + fcpl.userblock = 512 + f = HDF5.API.h5f_create(filename, HDF5.API.H5F_ACC_TRUNC, fcpl, fapl) finally - close(p) + close(fcpl) end fj = JldFile(HDF5.File(f, filename, false), version, true, true, mmaparrays, compatible, compress) # Record creator information. Don't use any fancy types here, @@ -216,7 +216,7 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo if version < v"0.1.0" fj = JLD00.jldopen(filename, rd, wr, cr, tr, ff; mmaparrays=mmaparrays) else - f = HDF5.h5f_open(filename, wr ? HDF5.H5F_ACC_RDWR : HDF5.H5F_ACC_RDONLY, pa.id) + f = HDF5.API.h5f_open(filename, wr ? HDF5.API.H5F_ACC_RDWR : HDF5.API.H5F_ACC_RDONLY, fapl) fj = JldFile(HDF5.File(f, filename, false), version, true, cr|wr, mmaparrays, compatible, compress) # Load any required files/packages if haskey(fj, pathrequire) @@ -241,7 +241,7 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo end end finally - close(pa) + close(fapl) end return fj end @@ -286,20 +286,20 @@ function creator(file::JldFile, key::AbstractString) end end -function jldobject(obj_id::HDF5.hid_t, parent) - obj_type = HDF5.h5i_get_type(obj_id) - obj_type == HDF5.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) : - obj_type == HDF5.H5I_DATATYPE ? HDF5.Datatype(obj_id) : - obj_type == HDF5.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) : +function jldobject(obj_id::HDF5.API.hid_t, parent) + obj_type = HDF5.API.h5i_get_type(obj_id) + obj_type == HDF5.API.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) : + obj_type == HDF5.API.H5I_DATATYPE ? HDF5.Datatype(obj_id) : + obj_type == HDF5.API.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) : error("Invalid object type for path ", path) end getindex(parent::Union{JldFile, JldGroup}, path::String) = - jldobject(HDF5.h5o_open(parent.plain.id, path, HDF5.H5P_DEFAULT), parent) + jldobject(HDF5.API.h5o_open(parent.plain.id, path, HDF5.API.H5P_DEFAULT), parent) function getindex(parent::Union{JldFile, JldGroup, JldDataset}, r::HDF5.Reference) r == HDF5.Reference() && error("Reference is null") - obj_id = HDF5.h5r_dereference(parent.plain.id, HDF5.H5P_DEFAULT, HDF5.H5R_OBJECT, r) + obj_id = HDF5.API.h5r_dereference(parent.plain.id, HDF5.API.H5P_DEFAULT, HDF5.API.H5R_OBJECT, r) jldobject(obj_id, parent) end @@ -378,17 +378,17 @@ end function read(obj::JldDataset) dtype = datatype(obj.plain) - dspace_id = HDF5.h5d_get_space(obj.plain) - extent_type = HDF5.h5s_get_simple_extent_type(dspace_id) + dspace_id = HDF5.API.h5d_get_space(obj.plain) + extent_type = HDF5.API.h5s_get_simple_extent_type(dspace_id) try - if extent_type == HDF5.H5S_SCALAR + if extent_type == HDF5.API.H5S_SCALAR # Scalar value return read_scalar(obj, dtype, jldatatype(file(obj), dtype)) - elseif extent_type == HDF5.H5S_SIMPLE - return read_array(obj, dtype, dspace_id, HDF5.H5S_ALL) - elseif extent_type == HDF5.H5S_NULL + elseif extent_type == HDF5.API.H5S_SIMPLE + return read_array(obj, dtype, dspace_id, HDF5.API.H5S_ALL) + elseif extent_type == HDF5.API.H5S_NULL # Empty array - if HDF5.h5t_get_class(dtype) == HDF5.H5T_REFERENCE + if HDF5.API.h5t_get_class(dtype) == HDF5.API.H5T_REFERENCE T = refarray_eltype(obj) else T = jldatatype(file(obj), dtype) @@ -401,7 +401,7 @@ function read(obj::JldDataset) end end finally - HDF5.h5s_close(dspace_id) + HDF5.API.h5s_close(dspace_id) end end @@ -424,9 +424,9 @@ end ## Arrays # Read an array -function read_array(obj::JldDataset, dtype::HDF5.Datatype, dspace_id::HDF5.hid_t, dsel_id::HDF5.hid_t, - dims::Tuple{Vararg{Int}} = (Int.(reverse!(HDF5.h5s_get_simple_extent_dims(dspace_id)[1]))...,)) - if HDF5.h5t_get_class(dtype) == HDF5.H5T_REFERENCE +function read_array(obj::JldDataset, dtype::HDF5.Datatype, dspace_id::HDF5.API.hid_t, dsel_id::HDF5.API.hid_t, + dims::Tuple{Vararg{Int}} = (Int.(reverse!(HDF5.API.h5s_get_simple_extent_dims(dspace_id)[1]))...,)) + if HDF5.API.h5t_get_class(dtype) == HDF5.API.H5T_REFERENCE val = read_refs(obj, refarray_eltype(obj), dspace_id, dsel_id, dims) else val = read_vals(obj, dtype, jldatatype(file(obj), dtype), dspace_id, dsel_id, dims) @@ -436,27 +436,27 @@ end # Arrays of basic HDF5 kinds function read_vals(obj::JldDataset, dtype::HDF5.Datatype, T::Union{Type{S}, Type{Complex{S}}}, - dspace_id::HDF5.hid_t, dsel_id::HDF5.hid_t, dims::Tuple{Vararg{Int}}) where {S<:HDF5.BitsType} + dspace_id::HDF5.API.hid_t, dsel_id::HDF5.API.hid_t, dims::Tuple{Vararg{Int}}) where {S<:HDF5.BitsType} if S === Bool && obj.file.version < v"0.1.3" return read_vals_default(obj, dtype, T, dspace_id, dsel_id, dims) end - if obj.file.mmaparrays && HDF5.iscontiguous(obj.plain) && dsel_id == HDF5.H5S_ALL + if obj.file.mmaparrays && HDF5.iscontiguous(obj.plain) && dsel_id == HDF5.API.H5S_ALL readmmap(obj.plain, T) else out = Array{T}(undef, dims) - HDF5.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.H5P_DEFAULT, out) + HDF5.API.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.API.H5P_DEFAULT, out) out end end # Arrays of immutables/bitstypes -function read_vals(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.hid_t, - dsel_id::HDF5.hid_t, dims::Tuple{Vararg{Int}}) +function read_vals(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.API.hid_t, + dsel_id::HDF5.API.hid_t, dims::Tuple{Vararg{Int}}) return read_vals_default(obj, dtype, T, dspace_id, dsel_id, dims) end -function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.hid_t, - dsel_id::HDF5.hid_t, dims::Tuple{Vararg{Int}}) +function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.API.hid_t, + dsel_id::HDF5.API.hid_t, dims::Tuple{Vararg{Int}}) out = Array{T}(undef, dims) # Empty objects don't need to be read at all T.size == 0 && !ismutabletype(T) && return out @@ -465,7 +465,7 @@ function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspac n = prod(dims) h5sz = sizeof(dtype) buf = Vector{UInt8}(undef, h5sz*n) - HDF5.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.H5P_DEFAULT, buf) + HDF5.API.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.API.H5P_DEFAULT, buf) f = file(obj) h5offset = pointer(buf) @@ -492,10 +492,10 @@ function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspac end # Arrays of references -function read_refs(obj::JldDataset, ::Type{T}, dspace_id::HDF5.hid_t, dsel_id::HDF5.hid_t, +function read_refs(obj::JldDataset, ::Type{T}, dspace_id::HDF5.API.hid_t, dsel_id::HDF5.API.hid_t, dims::Tuple{Vararg{Int}}) where T refs = Array{HDF5.Reference}(undef, dims) - HDF5.h5d_read(obj.plain.id, HDF5.H5T_STD_REF_OBJ, dspace_id, dsel_id, HDF5.H5P_DEFAULT, refs) + HDF5.API.h5d_read(obj.plain.id, HDF5.API.H5T_STD_REF_OBJ, dspace_id, dsel_id, HDF5.API.H5P_DEFAULT, refs) out = Array{T}(undef, dims) f = file(obj) @@ -549,16 +549,17 @@ function dset_create_properties(parent, sz::Int, obj, chunk=Int[]; mmap::Bool=fa return compact_properties(), false end if iscompressed(parent) && !isempty(chunk) - p = create_property(HDF5.H5P_DATASET_CREATE, chunk = chunk) + p = HDF5.DatasetCreateProperties() + p.chunk = chunk if iscompatible(parent) - p[:shuffle] = () - p[:compress] = 5 + p.shuffle = true + p.deflate = 5 else - p[:blosc] = 5 + p.blosc = 5 end return p, true else - return HDF5.DEFAULT_PROPERTIES, false + return HDF5.DatasetCreateProperties(), false end end @@ -570,9 +571,9 @@ function _write(parent::Union{JldFile, JldGroup}, chunk = T <: String ? Int[] : HDF5.heuristic_chunk(data) dprop, dprop_close = dset_create_properties(parent, sizeof(data), data, chunk; kargs...) dtype = datatype(data) - dset = HDF5.Dataset(HDF5.h5d_create(parent.plain, String(name), dtype, dataspace(data), + dset = HDF5.Dataset(HDF5.API.h5d_create(parent.plain, String(name), dtype, dataspace(data), HDF5._link_properties(name), dprop, - HDF5.DEFAULT_PROPERTIES), file(parent.plain)) + HDF5.DatasetAccessProperties()), file(parent.plain)) try # Write the attribute isa(data, Array) && isempty(data) && write_attribute(dset, "dims", [size(data)...]) @@ -592,14 +593,14 @@ function _write(parent::Union{JldFile, JldGroup}, f = file(parent) dtype = h5fieldtype(f, T, true) buf = h5convert_array(f, data, dtype, wsession) - dims = convert(Vector{HDF5.hsize_t}, [reverse(size(data))...]) + dims = convert(Vector{HDF5.API.hsize_t}, [reverse(size(data))...]) dspace = dataspace(data) chunk = HDF5.heuristic_chunk(dtype, size(data)) dprop, dprop_close = dset_create_properties(parent, sizeof(buf),buf, chunk; kargs...) try - dset = HDF5.Dataset(HDF5.h5d_create(parent.plain, path, dtype.dtype, dspace, + dset = HDF5.Dataset(HDF5.API.h5d_create(parent.plain, path, dtype.dtype, dspace, HDF5._link_properties(path), dprop, - HDF5.DEFAULT_PROPERTIES), file(parent.plain)) + HDF5.DatasetAccessProperties()), file(parent.plain)) if dtype == JLD_REF_TYPE write_attribute(dset, "julia eltype", full_typename(f, T)) end @@ -654,7 +655,7 @@ end # HDF5 compound objects. A separate function so that it is specialized. @noinline function _h5convert_vals(f::JldFile, data::Array, dtype::JldDatatype, wsession::JldWriteSession) - sz = HDF5.h5t_get_size(dtype) + sz = HDF5.API.h5t_get_size(dtype) n = length(data) buf = Vector{UInt8}(undef, sz*n) offset = pointer(buf) @@ -692,7 +693,7 @@ function write_ref(parent::JldFile, data, wsession::JldWriteSession) dset = _write(gref, name, writeas(data), wsession) # Add reference to reference list - ref = HDF5.Reference(HDF5.hobj_ref_t(object_info(dset).addr)) + ref = HDF5.Reference(HDF5.API.hobj_ref_t(object_info(dset).addr)) close(dset) if !isa(data, Tuple) && ismutable(data) wsession.h5ref[data] = ref @@ -730,16 +731,16 @@ function write_compound(parent::Union{JldFile, JldGroup}, name::String, dtype = h5type(f, T, true) gen_h5convert(f, T) - buf = Vector{UInt8}(undef, HDF5.h5t_get_size(dtype)) + buf = Vector{UInt8}(undef, HDF5.API.h5t_get_size(dtype)) h5convert!(pointer(buf), file(parent), s, wsession) gcuse(buf) - dspace = HDF5.Dataspace(HDF5.h5s_create(HDF5.H5S_SCALAR)) + dspace = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_SCALAR)) dprop, dprop_close = dset_create_properties(parent, length(buf), buf; kargs...) try - dset = HDF5.Dataset(HDF5.h5d_create(parent.plain, name, dtype.dtype, dspace, + dset = HDF5.Dataset(HDF5.API.h5d_create(parent.plain, name, dtype.dtype, dspace, HDF5._link_properties(name), dprop, - HDF5.DEFAULT_PROPERTIES), file(parent.plain)) + HDF5.DatasetAccessProperties()), file(parent.plain)) write_dataset(dset, dtype.dtype, buf) return dset finally @@ -779,7 +780,7 @@ function setindex!(dset::JldDataset, X::AbstractArray{T,N}, indices::Union{Abstr dtype = datatype(dset.plain) try # Convert array to writeable buffer - if HDF5.h5t_get_class(dtype) == HDF5.H5T_REFERENCE + if HDF5.API.h5t_get_class(dtype) == HDF5.API.H5T_REFERENCE written_eltype = refarray_eltype(dset) jldtype = JLD_REF_TYPE else @@ -792,7 +793,7 @@ function setindex!(dset::JldDataset, X::AbstractArray{T,N}, indices::Union{Abstr dspace = HDF5._dataspace(sz) try - HDF5.h5d_write(dset.plain.id, dtype, dspace, dsel_id, HDF5.H5P_DEFAULT, buf) + HDF5.API.h5d_write(dset.plain.id, dtype, dspace, dsel_id, HDF5.API.H5P_DEFAULT, buf) finally close(dspace) end @@ -800,7 +801,7 @@ function setindex!(dset::JldDataset, X::AbstractArray{T,N}, indices::Union{Abstr close(dtype) end finally - HDF5.h5s_close(dsel_id) + HDF5.API.h5s_close(dsel_id) end end function setindex!(dset::JldDataset, x::Number, indices::Union{AbstractRange{Int},Integer}...) @@ -1336,11 +1337,10 @@ const _runtime_properties = Ref{HDF5.Properties}() compact_properties() = _runtime_properties[] function __init__() - COMPACT_PROPERTIES = create_property(HDF5.H5P_DATASET_CREATE) - HDF5.h5p_set_layout(COMPACT_PROPERTIES.id, HDF5.H5D_COMPACT) + prop = HDF5.DatasetCreateProperties(layout=:compact) global _runtime_properties - _runtime_properties[] = COMPACT_PROPERTIES + _runtime_properties[] = prop Base.rehash!(_typedict, length(_typedict.keys)) Base.rehash!(BUILTIN_TYPES.dict, length(BUILTIN_TYPES.dict.keys)) diff --git a/src/JLD00.jl b/src/JLD00.jl index aa57c3c..156a094 100644 --- a/src/JLD00.jl +++ b/src/JLD00.jl @@ -95,17 +95,17 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo error("File ", filename, " cannot be found") end version = version_current - pa = create_property(HDF5.H5P_FILE_ACCESS) + pa = create_property(HDF5.API.H5P_FILE_ACCESS) try - pa[:fclose_degree] = HDF5.H5F_CLOSE_STRONG + pa[:fclose_degree] = HDF5.API.H5F_CLOSE_STRONG if cr && (tr || !isfile(filename)) # We're truncating, so we don't have to check the format of an existing file # Set the user block to 512 bytes, to save room for the header - p = create_property(HDF5.H5P_FILE_CREATE) + p = create_property(HDF5.API.H5P_FILE_CREATE) local f try p[:userblock] = 512 - f = HDF5.h5f_create(filename, HDF5.H5F_ACC_TRUNC, p.id, pa.id) + f = HDF5.API.API.h5f_create(filename, HDF5.API.H5F_ACC_TRUNC, p.id, pa.id) finally close(p) end @@ -126,7 +126,7 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo close(rawfid) end if length(magic) ≥ ncodeunits(magic_base) && view(magic, 1:ncodeunits(magic_base)) == Vector{UInt8}(codeunits(magic_base)) - f = HDF5.h5f_open(filename, wr ? HDF5.H5F_ACC_RDWR : HDF5.H5F_ACC_RDONLY, pa.id) + f = HDF5.API.h5f_open(filename, wr ? HDF5.API.H5F_ACC_RDWR : HDF5.API.H5F_ACC_RDONLY, pa.id) version = unsafe_string(pointer(magic) + length(magic_base)) fj = JldFile(HDF5.File(f, filename), version, true, true, mmaparrays) # Load any required files/packages @@ -174,20 +174,20 @@ function jldopen(f::Function, args...) end end -function jldobject(obj_id::HDF5.hid_t, parent) - obj_type = HDF5.h5i_get_type(obj_id) - obj_type == HDF5.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) : - obj_type == HDF5.H5I_DATATYPE ? HDF5.Datatype(obj_id) : - obj_type == HDF5.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) : +function jldobject(obj_id::HDF5.API.hid_t, parent) + obj_type = HDF5.API.h5i_get_type(obj_id) + obj_type == HDF5.API.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) : + obj_type == HDF5.API.H5I_DATATYPE ? HDF5.Datatype(obj_id) : + obj_type == HDF5.API.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) : error("Invalid object type for path ", path) end getindex(parent::Union{JldFile, JldGroup}, path::String) = - jldobject(HDF5.h5o_open(parent.plain.id, path, HDF5.H5P_DEFAULT), parent) + jldobject(HDF5.API.h5o_open(parent.plain.id, path, HDF5.API.H5P_DEFAULT), parent) function getindex(parent::Union{JldFile, JldGroup, JldDataset}, r::HDF5.Reference) r == HDF5.Reference() && error("Reference is null") - obj_id = HDF5.h5r_dereference(parent.plain.id, HDF5.H5P_DEFAULT, HDF5.H5R_OBJECT, r) + obj_id = HDF5.API.h5r_dereference(parent.plain.id, HDF5.API.H5P_DEFAULT, HDF5.API.H5R_OBJECT, r) jldobject(obj_id, parent) end @@ -591,7 +591,7 @@ function write(parent::Union{JldFile, JldGroup}, name::String, n::Nothing, astyp try dspace = dataspace(nothing) dset = HDF5.Dataset(HDF5.h5d_create(HDF5.parents_create(HDF5.checkvalid(parent.plain), name, HDF5.H5T_NATIVE_UINT8, dspace.id, - HDF5.H5P_DEFAULT, HDF5.H5P_DEFAULT, HDF5.H5P_DEFAULT)...), file(parent.plain)) + HDF5.API.H5P_DEFAULT, HDF5.API.H5P_DEFAULT, HDF5.API.H5P_DEFAULT)...), file(parent.plain)) write_attribute(dset, name_type_attr, astype) finally close(dspace) diff --git a/src/jld_types.jl b/src/jld_types.jl index d4cea93..d25f3f2 100644 --- a/src/jld_types.jl +++ b/src/jld_types.jl @@ -5,7 +5,7 @@ const INLINE_TUPLE = false const INLINE_POINTER_IMMUTABLE = false -const JLD_REF_TYPE = JldDatatype(HDF5.Datatype(HDF5.H5T_STD_REF_OBJ, false), 0) +const JLD_REF_TYPE = JldDatatype(HDF5.Datatype(HDF5.API.H5T_STD_REF_OBJ, false), 0) const BUILTIN_TYPES = Set([Symbol, Type, BigFloat, BigInt]) const JL_TYPENAME_TRANSLATE = Dict{String, String}() const JLCONVERT_INFO = Dict{Any, Any}() @@ -36,7 +36,7 @@ function JldTypeInfo(parent::JldFile, types::TypesType, commit::Bool) for i = 1:length(types) dtype = dtypes[i] = h5fieldtype(parent, types[i], commit) offsets[i] = offset - offset += HDF5.h5t_get_size(dtype) + offset += HDF5.API.h5t_get_size(dtype) end JldTypeInfo(dtypes, offsets, offset) end @@ -75,7 +75,7 @@ commit_datatype(parent::Nothing, dtype::HDF5.Datatype, @nospecialize(T)) = # later. mangle_name(jtype::JldDatatype, jlname) = jtype.index <= 0 ? string(jlname, "_") : string(jlname, "_", jtype.index) -Base.convert(::Type{HDF5.hid_t}, x::JldDatatype) = x.dtype.id +Base.convert(::Type{HDF5.API.hid_t}, x::JldDatatype) = x.dtype.id ## Serialization of datatypes to JLD ## @@ -155,9 +155,9 @@ h5fieldtype(parent::JldFile, ::Type{T}, ::Bool) where {T<:String} = # Stored as variable-length strings function h5type(::JldFile, ::Type{T}, ::Bool) where T<:String - type_id = HDF5.h5t_copy(HDF5.hdf5_type_id(T)) - HDF5.h5t_set_size(type_id, HDF5.H5T_VARIABLE) - HDF5.h5t_set_cset(type_id, HDF5.cset(T)) + type_id = HDF5.API.h5t_copy(HDF5.hdf5_type_id(T)) + HDF5.API.h5t_set_size(type_id, HDF5.API.H5T_VARIABLE) + HDF5.API.h5t_set_cset(type_id, HDF5.cset(T)) JldDatatype(HDF5.Datatype(type_id, false), 0) end @@ -180,8 +180,8 @@ h5fieldtype(parent::JldFile, ::Type{Symbol}, commit::Bool) = # Stored as a compound type that contains a variable length string function h5type(parent::JldFile, ::Type{Symbol}, commit::Bool) haskey(parent.jlh5type, Symbol) && return parent.jlh5type[Symbol] - id = HDF5.h5t_create(HDF5.H5T_COMPOUND, 8) - HDF5.h5t_insert(id, "symbol_", 0, h5fieldtype(parent, String, commit)) + id = HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, 8) + HDF5.API.h5t_insert(id, "symbol_", 0, h5fieldtype(parent, String, commit)) dtype = HDF5.Datatype(id, parent.plain) commit ? commit_datatype(parent, dtype, Symbol) : JldDatatype(dtype, -1) end @@ -203,8 +203,8 @@ h5fieldtype(parent::JldFile, T::Union{Type{BigInt}, Type{BigFloat}}, commit::Boo # Stored as a compound type that contains a variable length string function h5type(parent::JldFile, T::Union{Type{BigInt}, Type{BigFloat}}, commit::Bool) haskey(parent.jlh5type, T) && return parent.jlh5type[T] - id = HDF5.h5t_create(HDF5.H5T_COMPOUND, 8) - HDF5.h5t_insert(id, "data_", 0, h5fieldtype(parent, String, commit)) + id = HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, 8) + HDF5.API.h5t_insert(id, "data_", 0, h5fieldtype(parent, String, commit)) dtype = HDF5.Datatype(id, parent.plain) commit ? commit_datatype(parent, dtype, T) : JldDatatype(dtype, -1) end @@ -233,8 +233,8 @@ h5fieldtype(parent::JldFile, ::Type{T}, commit::Bool) where {T<:Type} = # Stored as a compound type that contains a variable length string function h5type(parent::JldFile, ::Type{T}, commit::Bool) where T<:Type haskey(parent.jlh5type, Type) && return parent.jlh5type[Type] - id = HDF5.h5t_create(HDF5.H5T_COMPOUND, 8) - HDF5.h5t_insert(id, "typename_", 0, h5fieldtype(parent, String, commit)) + id = HDF5.API.h5t_create(HDF5.API.API.H5T_COMPOUND, 8) + HDF5.API.h5t_insert(id, "typename_", 0, h5fieldtype(parent, String, commit)) dtype = HDF5.Datatype(id, parent.plain) out = commit ? commit_datatype(parent, dtype, Type) : JldDatatype(dtype, -1) end @@ -280,13 +280,13 @@ function h5type(parent::JldFile, T::TupleType, commit::Bool) typeinfo = JldTypeInfo(parent, T, commit) if isopaque(T) - id = HDF5.h5t_create(HDF5.H5T_OPAQUE, opaquesize(T)) + id = HDF5.API.h5t_create(HDF5.API.H5T_OPAQUE, opaquesize(T)) else - id = HDF5.h5t_create(HDF5.H5T_COMPOUND, typeinfo.size) + id = HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, typeinfo.size) end for i = 1:length(typeinfo.offsets) fielddtype = typeinfo.dtypes[i] - HDF5.h5t_insert(id, mangle_name(fielddtype, i), typeinfo.offsets[i], fielddtype) + HDF5.API.h5t_insert(id, mangle_name(fielddtype, i), typeinfo.offsets[i], fielddtype) end dtype = HDF5.Datatype(id, parent.plain) @@ -333,14 +333,14 @@ function h5type_default(parent::JldFile, @nospecialize(T), commit::Bool) if isopaque(T) # Empty type or non-basic bitstype - id = HDF5.h5t_create(HDF5.H5T_OPAQUE, opaquesize(T)) + id = HDF5.API.h5t_create(HDF5.API.H5T_OPAQUE, opaquesize(T)) else # Compound type typeinfo = JldTypeInfo(parent, T.types, commit) - id = HDF5.h5t_create(HDF5.H5T_COMPOUND, typeinfo.size) + id = HDF5.API.h5t_create(HDF5.API.H5T_COMPOUND, typeinfo.size) for i = 1:length(typeinfo.offsets) fielddtype = typeinfo.dtypes[i] - HDF5.h5t_insert(id, mangle_name(fielddtype, fieldnames(T)[i]), typeinfo.offsets[i], fielddtype) + HDF5.API.h5t_insert(id, mangle_name(fielddtype, fieldnames(T)[i]), typeinfo.offsets[i], fielddtype) end end @@ -365,7 +365,7 @@ function _gen_jlconvert_type(typeinfo::JldTypeInfo, @nospecialize(T)) for i = 1:length(typeinfo.dtypes) h5offset = typeinfo.offsets[i] - if HDF5.h5t_get_class(typeinfo.dtypes[i]) == HDF5.H5T_REFERENCE + if HDF5.API.h5t_get_class(typeinfo.dtypes[i]) == HDF5.API.H5T_REFERENCE push!(args, quote ref = unsafe_load(convert(Ptr{HDF5.Reference}, ptr)+$h5offset) if ref != HDF5.Reference() @@ -415,7 +415,7 @@ function _gen_jlconvert_immutable(typeinfo::JldTypeInfo, @nospecialize(T)) ninit += 1 end end) - elseif HDF5.h5t_get_class(typeinfo.dtypes[i]) == HDF5.H5T_REFERENCE + elseif HDF5.API.h5t_get_class(typeinfo.dtypes[i]) == HDF5.API.H5T_REFERENCE push!(args, quote ref = unsafe_load(convert(Ptr{HDF5.Reference}, ptr)+$h5offset) if ref != HDF5.Reference() @@ -456,7 +456,7 @@ function _gen_jlconvert_immutable!(typeinfo::JldTypeInfo, @nospecialize(T)) unsafe_store!(convert(Ptr{$(T.types[i])}, out)+$jloffset, read_ref(file, ref)) end end) - elseif HDF5.h5t_get_class(typeinfo.dtypes[i]) == HDF5.H5T_REFERENCE + elseif HDF5.API.h5t_get_class(typeinfo.dtypes[i]) == HDF5.API.H5T_REFERENCE error("reference encountered in pointerfree immutable; this is a bug") else push!(args, :(jlconvert!(out+$jloffset, $(T.types[i]), file, ptr+$h5offset))) @@ -482,7 +482,7 @@ function _gen_jlconvert_tuple(typeinfo::JldTypeInfo, @nospecialize(T)) h5offset = typeinfo.offsets[i] field = Symbol(string("field", i)) - if HDF5.h5t_get_class(typeinfo.dtypes[i]) == HDF5.H5T_REFERENCE + if HDF5.API.h5t_get_class(typeinfo.dtypes[i]) == HDF5.API.H5T_REFERENCE push!(args, :($field = read_ref(file, unsafe_load(convert(Ptr{HDF5.Reference}, ptr)+$h5offset)))) else push!(args, :($field = jlconvert($(types[i]), file, ptr+$h5offset))) @@ -594,9 +594,9 @@ function gen_h5convert(parent::JldFile, @nospecialize(T)) types = (T::DataType).types end - n = HDF5.h5t_get_nmembers(dtype.id) + n = HDF5.API.h5t_get_nmembers(dtype.id) for i = 1:n - if HDF5.h5t_get_member_class(dtype.id, i-1) != HDF5.H5T_REFERENCE + if HDF5.API.h5t_get_member_class(dtype.id, i-1) != HDF5.API.H5T_REFERENCE gen_h5convert(parent, types[i]) end end @@ -626,10 +626,10 @@ function _gen_h5convert!(@nospecialize(T)) getindex_fn = istuple ? (:getindex) : (:getfield) ex = Expr(:block) args = ex.args - n = HDF5.h5t_get_nmembers(dtype.id) + n = HDF5.API.h5t_get_nmembers(dtype.id) for i = 1:n - offset = HDF5.h5t_get_member_offset(dtype.id, i-1) - if HDF5.h5t_get_member_class(dtype.id, i-1) == HDF5.H5T_REFERENCE + offset = HDF5.API.h5t_get_member_offset(dtype.id, i-1) + if HDF5.API.h5t_get_member_class(dtype.id, i-1) == HDF5.API.H5T_REFERENCE if istuple push!(args, :(unsafe_store!(convert(Ptr{HDF5.Reference}, out)+$offset, write_ref(file, $getindex_fn(x, $i), wsession)))) @@ -660,33 +660,33 @@ end # Type mapping function. Given an HDF5.Datatype, find (or construct) the # corresponding Julia type. function jldatatype(parent::JldFile, dtype::HDF5.Datatype) - class_id = HDF5.h5t_get_class(dtype.id) - if class_id == HDF5.H5T_STRING - cset = HDF5.h5t_get_cset(dtype.id) - if cset == HDF5.H5T_CSET_ASCII + class_id = HDF5.API.h5t_get_class(dtype.id) + if class_id == HDF5.API.H5T_STRING + cset = HDF5.API.h5t_get_cset(dtype.id) + if cset == HDF5.API.H5T_CSET_ASCII return String - elseif cset == HDF5.H5T_CSET_UTF8 + elseif cset == HDF5.API.H5T_CSET_UTF8 return String else error("character set ", cset, " not recognized") end - elseif class_id == HDF5.H5T_INTEGER || class_id == HDF5.H5T_FLOAT || class_id == HDF5.H5T_BITFIELD + elseif class_id == HDF5.API.H5T_INTEGER || class_id == HDF5.API.H5T_FLOAT || class_id == HDF5.API.H5T_BITFIELD # This can be a performance hotspot - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_DOUBLE) > 0 && return Float64 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_INT64) > 0 && return Int64 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_FLOAT) > 0 && return Float32 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_INT32) > 0 && return Int32 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_UINT8) > 0 && return UInt8 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_UINT64) > 0 && return UInt64 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_UINT32) > 0 && return UInt32 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_INT8) > 0 && return Int8 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_INT16) > 0 && return Int16 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_UINT16) > 0 && return UInt16 - HDF5.h5t_equal(dtype.id, HDF5.H5T_NATIVE_B8) > 0 && return Bool + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_DOUBLE) > 0 && return Float64 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_INT64) > 0 && return Int64 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_FLOAT) > 0 && return Float32 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_INT32) > 0 && return Int32 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_UINT8) > 0 && return UInt8 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_UINT64) > 0 && return UInt64 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_UINT32) > 0 && return UInt32 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_INT8) > 0 && return Int8 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_INT16) > 0 && return Int16 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_UINT16) > 0 && return UInt16 + HDF5.API.h5t_equal(dtype.id, HDF5.API.H5T_NATIVE_B8) > 0 && return Bool error("unrecognized integer or float type") - elseif class_id == HDF5.H5T_BITFIELD + elseif class_id == HDF5.API.H5T_BITFIELD Bool - elseif class_id == HDF5.H5T_COMPOUND || class_id == HDF5.H5T_OPAQUE + elseif class_id == HDF5.API.H5T_COMPOUND || class_id == HDF5.API.H5T_OPAQUE addr = object_info(dtype).addr haskey(parent.h5jltype, addr) && return parent.h5jltype[addr] @@ -703,9 +703,9 @@ function jldatatype(parent::JldFile, dtype::HDF5.Datatype) if !(T in BUILTIN_TYPES) # Call jldatatype on dependent types to validate them and # define jlconvert - if class_id == HDF5.H5T_COMPOUND - for i = 0:HDF5.h5t_get_nmembers(dtype.id)-1 - member_name = HDF5.h5t_get_member_name(dtype.id, i) + if class_id == HDF5.API.H5T_COMPOUND + for i = 0:HDF5.API.h5t_get_nmembers(dtype.id)-1 + member_name = HDF5.API.h5t_get_member_name(dtype.id, i) idx = first(something(findlast("_", member_name), 0:-1)) if idx != sizeof(member_name) member_dtype = open_datatype(parent.plain, string(pathtypes, '/', lpad(member_name[idx+1:end], 8, '0'))) @@ -724,14 +724,14 @@ function jldatatype(parent::JldFile, dtype::HDF5.Datatype) # If the types compare equal, continue else # Otherwise the types may be compatible yet still naively compare as - # unequal. This seems to be related to H5T_REFERENCE members in the compound + # unequal. This seems to be related to API.H5T_REFERENCE members in the compound # datatype (starting with libhdf5 v1.12) comparing unequal if one half has # been committed while the other is still transient. # # Both dtype and newtype may be committed (former) or contain committed member # types loaded from the cache (latter), so copy both for comparison. - dtype′ = HDF5.Datatype(HDF5.h5t_copy(dtype.id)) - newtype = HDF5.Datatype(HDF5.h5t_copy(newtype.id)) + dtype′ = HDF5.Datatype(HDF5.API.h5t_copy(dtype.id)) + newtype = HDF5.Datatype(HDF5.API.h5t_copy(newtype.id)) dtype′ == newtype || throw(TypeMismatchException(typename)) end end @@ -750,21 +750,21 @@ end # when the type is no longer available. function reconstruct_type(parent::JldFile, dtype::HDF5.Datatype, savedname::AbstractString) name = gensym(savedname) - class_id = HDF5.h5t_get_class(dtype.id) - if class_id == HDF5.H5T_OPAQUE + class_id = HDF5.API.h5t_get_class(dtype.id) + if class_id == HDF5.API.H5T_OPAQUE if haskey(dtype, "empty") @eval (struct $name; end; $name) else - sz = Int(HDF5.h5t_get_size(dtype.id))*8 + sz = Int(HDF5.API.h5t_get_size(dtype.id))*8 @eval (primitive type $name $sz end; $name) end else # Figure out field names and types - nfields = HDF5.h5t_get_nmembers(dtype.id) + nfields = HDF5.API.h5t_get_nmembers(dtype.id) fieldnames = Vector{Symbol}(undef, nfields) fieldtypes = Vector{Type}(undef, nfields) for i = 1:nfields - membername = HDF5.h5t_get_member_name(dtype.id, i-1) + membername = HDF5.API.h5t_get_member_name(dtype.id, i-1) idx = first(something(findlast("_", membername), 0:-1)) fieldname = fieldnames[i] = Symbol(membername[1:prevind(membername,idx)]) @@ -774,13 +774,13 @@ function reconstruct_type(parent::JldFile, dtype::HDF5.Datatype, savedname::Abst memberdtype = open_datatype(parent.plain, string(pathtypes, '/', lpad(membername[idx+1:end], 8, '0'))) fieldtypes[i] = jldatatype(parent, memberdtype) else - memberclass = HDF5.h5t_get_member_class(dtype.id, i-1) - if memberclass == HDF5.H5T_REFERENCE + memberclass = HDF5.API.h5t_get_member_class(dtype.id, i-1) + if memberclass == HDF5.API.H5T_REFERENCE # Field is a reference, so use Any fieldtypes[i] = Any else # Type is built-in - memberdtype = HDF5.Datatype(HDF5.h5t_get_member_type(dtype.id, i-1), parent.plain) + memberdtype = HDF5.Datatype(HDF5.API.h5t_get_member_type(dtype.id, i-1), parent.plain) fieldtypes[i] = jldatatype(parent, memberdtype) end end