Skip to content

Commit

Permalink
Fix deprecations for HDF5 v0.16 (#311)
Browse files Browse the repository at this point in the history
  • Loading branch information
musm authored Feb 4, 2022
1 parent ec58d45 commit 7c449a1
Show file tree
Hide file tree
Showing 3 changed files with 134 additions and 134 deletions.
122 changes: 61 additions & 61 deletions src/JLD.jl
Original file line number Diff line number Diff line change
Expand Up @@ -138,8 +138,8 @@ function close(f::JldFile)
isdefined(f, :gref) && close(f.gref)

# Ensure that all other datasets, groups, and datatypes are closed (ref #176)
for obj_id in HDF5.h5f_get_obj_ids(f.plain.id, HDF5.H5F_OBJ_DATASET | HDF5.H5F_OBJ_GROUP | HDF5.H5F_OBJ_DATATYPE)
HDF5.h5o_close(obj_id)
for obj_id in HDF5.API.h5f_get_obj_ids(f.plain.id, HDF5.API.H5F_OBJ_DATASET | HDF5.API.H5F_OBJ_GROUP | HDF5.API.H5F_OBJ_DATATYPE)
HDF5.API.h5o_close(obj_id)
end

# Close file
Expand Down Expand Up @@ -168,20 +168,20 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo
error("File ", filename, " cannot be found")
end
version = version_current
pa = create_property(HDF5.H5P_FILE_ACCESS)
# HDF5.h5p_set_libver_bounds(pa, HDF5.H5F_LIBVER_18, HDF5.H5F_LIBVER_18)
fapl = HDF5.FileAccessProperties() # fapl
# HDF5.h5p_set_libver_bounds(fapl, HDF5.H5F_LIBVER_18, HDF5.H5F_LIBVER_18)
try
pa[:fclose_degree] = HDF5.H5F_CLOSE_STRONG
fapl.fclose_degree = HDF5.API.H5F_CLOSE_STRONG
if cr && (tr || !isfile(filename))
# We're truncating, so we don't have to check the format of an existing file
# Set the user block to 512 bytes, to save room for the header
p = create_property(HDF5.H5P_FILE_CREATE)
fcpl = HDF5.FileCreateProperties() # fcpl
local f
try
p[:userblock] = 512
f = HDF5.h5f_create(filename, HDF5.H5F_ACC_TRUNC, p.id, pa.id)
fcpl.userblock = 512
f = HDF5.API.h5f_create(filename, HDF5.API.H5F_ACC_TRUNC, fcpl, fapl)
finally
close(p)
close(fcpl)
end
fj = JldFile(HDF5.File(f, filename, false), version, true, true, mmaparrays, compatible, compress)
# Record creator information. Don't use any fancy types here,
Expand Down Expand Up @@ -216,7 +216,7 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo
if version < v"0.1.0"
fj = JLD00.jldopen(filename, rd, wr, cr, tr, ff; mmaparrays=mmaparrays)
else
f = HDF5.h5f_open(filename, wr ? HDF5.H5F_ACC_RDWR : HDF5.H5F_ACC_RDONLY, pa.id)
f = HDF5.API.h5f_open(filename, wr ? HDF5.API.H5F_ACC_RDWR : HDF5.API.H5F_ACC_RDONLY, fapl)
fj = JldFile(HDF5.File(f, filename, false), version, true, cr|wr, mmaparrays, compatible, compress)
# Load any required files/packages
if haskey(fj, pathrequire)
Expand All @@ -241,7 +241,7 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo
end
end
finally
close(pa)
close(fapl)
end
return fj
end
Expand Down Expand Up @@ -286,20 +286,20 @@ function creator(file::JldFile, key::AbstractString)
end
end

function jldobject(obj_id::HDF5.hid_t, parent)
obj_type = HDF5.h5i_get_type(obj_id)
obj_type == HDF5.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) :
obj_type == HDF5.H5I_DATATYPE ? HDF5.Datatype(obj_id) :
obj_type == HDF5.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) :
function jldobject(obj_id::HDF5.API.hid_t, parent)
obj_type = HDF5.API.h5i_get_type(obj_id)
obj_type == HDF5.API.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) :
obj_type == HDF5.API.H5I_DATATYPE ? HDF5.Datatype(obj_id) :
obj_type == HDF5.API.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) :
error("Invalid object type for path ", path)
end

getindex(parent::Union{JldFile, JldGroup}, path::String) =
jldobject(HDF5.h5o_open(parent.plain.id, path, HDF5.H5P_DEFAULT), parent)
jldobject(HDF5.API.h5o_open(parent.plain.id, path, HDF5.API.H5P_DEFAULT), parent)

function getindex(parent::Union{JldFile, JldGroup, JldDataset}, r::HDF5.Reference)
r == HDF5.Reference() && error("Reference is null")
obj_id = HDF5.h5r_dereference(parent.plain.id, HDF5.H5P_DEFAULT, HDF5.H5R_OBJECT, r)
obj_id = HDF5.API.h5r_dereference(parent.plain.id, HDF5.API.H5P_DEFAULT, HDF5.API.H5R_OBJECT, r)
jldobject(obj_id, parent)
end

Expand Down Expand Up @@ -378,17 +378,17 @@ end

function read(obj::JldDataset)
dtype = datatype(obj.plain)
dspace_id = HDF5.h5d_get_space(obj.plain)
extent_type = HDF5.h5s_get_simple_extent_type(dspace_id)
dspace_id = HDF5.API.h5d_get_space(obj.plain)
extent_type = HDF5.API.h5s_get_simple_extent_type(dspace_id)
try
if extent_type == HDF5.H5S_SCALAR
if extent_type == HDF5.API.H5S_SCALAR
# Scalar value
return read_scalar(obj, dtype, jldatatype(file(obj), dtype))
elseif extent_type == HDF5.H5S_SIMPLE
return read_array(obj, dtype, dspace_id, HDF5.H5S_ALL)
elseif extent_type == HDF5.H5S_NULL
elseif extent_type == HDF5.API.H5S_SIMPLE
return read_array(obj, dtype, dspace_id, HDF5.API.H5S_ALL)
elseif extent_type == HDF5.API.H5S_NULL
# Empty array
if HDF5.h5t_get_class(dtype) == HDF5.H5T_REFERENCE
if HDF5.API.h5t_get_class(dtype) == HDF5.API.H5T_REFERENCE
T = refarray_eltype(obj)
else
T = jldatatype(file(obj), dtype)
Expand All @@ -401,7 +401,7 @@ function read(obj::JldDataset)
end
end
finally
HDF5.h5s_close(dspace_id)
HDF5.API.h5s_close(dspace_id)
end
end

Expand All @@ -424,9 +424,9 @@ end
## Arrays

# Read an array
function read_array(obj::JldDataset, dtype::HDF5.Datatype, dspace_id::HDF5.hid_t, dsel_id::HDF5.hid_t,
dims::Tuple{Vararg{Int}} = (Int.(reverse!(HDF5.h5s_get_simple_extent_dims(dspace_id)[1]))...,))
if HDF5.h5t_get_class(dtype) == HDF5.H5T_REFERENCE
function read_array(obj::JldDataset, dtype::HDF5.Datatype, dspace_id::HDF5.API.hid_t, dsel_id::HDF5.API.hid_t,
dims::Tuple{Vararg{Int}} = (Int.(reverse!(HDF5.API.h5s_get_simple_extent_dims(dspace_id)[1]))...,))
if HDF5.API.h5t_get_class(dtype) == HDF5.API.H5T_REFERENCE
val = read_refs(obj, refarray_eltype(obj), dspace_id, dsel_id, dims)
else
val = read_vals(obj, dtype, jldatatype(file(obj), dtype), dspace_id, dsel_id, dims)
Expand All @@ -436,27 +436,27 @@ end

# Arrays of basic HDF5 kinds
function read_vals(obj::JldDataset, dtype::HDF5.Datatype, T::Union{Type{S}, Type{Complex{S}}},
dspace_id::HDF5.hid_t, dsel_id::HDF5.hid_t, dims::Tuple{Vararg{Int}}) where {S<:HDF5.BitsType}
dspace_id::HDF5.API.hid_t, dsel_id::HDF5.API.hid_t, dims::Tuple{Vararg{Int}}) where {S<:HDF5.BitsType}
if S === Bool && obj.file.version < v"0.1.3"
return read_vals_default(obj, dtype, T, dspace_id, dsel_id, dims)
end
if obj.file.mmaparrays && HDF5.iscontiguous(obj.plain) && dsel_id == HDF5.H5S_ALL
if obj.file.mmaparrays && HDF5.iscontiguous(obj.plain) && dsel_id == HDF5.API.H5S_ALL
readmmap(obj.plain, T)
else
out = Array{T}(undef, dims)
HDF5.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.H5P_DEFAULT, out)
HDF5.API.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.API.H5P_DEFAULT, out)
out
end
end

# Arrays of immutables/bitstypes
function read_vals(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.hid_t,
dsel_id::HDF5.hid_t, dims::Tuple{Vararg{Int}})
function read_vals(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.API.hid_t,
dsel_id::HDF5.API.hid_t, dims::Tuple{Vararg{Int}})
return read_vals_default(obj, dtype, T, dspace_id, dsel_id, dims)
end

function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.hid_t,
dsel_id::HDF5.hid_t, dims::Tuple{Vararg{Int}})
function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspace_id::HDF5.API.hid_t,
dsel_id::HDF5.API.hid_t, dims::Tuple{Vararg{Int}})
out = Array{T}(undef, dims)
# Empty objects don't need to be read at all
T.size == 0 && !ismutabletype(T) && return out
Expand All @@ -465,7 +465,7 @@ function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspac
n = prod(dims)
h5sz = sizeof(dtype)
buf = Vector{UInt8}(undef, h5sz*n)
HDF5.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.H5P_DEFAULT, buf)
HDF5.API.h5d_read(obj.plain.id, dtype.id, dspace_id, dsel_id, HDF5.API.H5P_DEFAULT, buf)

f = file(obj)
h5offset = pointer(buf)
Expand All @@ -492,10 +492,10 @@ function read_vals_default(obj::JldDataset, dtype::HDF5.Datatype, T::Type, dspac
end

# Arrays of references
function read_refs(obj::JldDataset, ::Type{T}, dspace_id::HDF5.hid_t, dsel_id::HDF5.hid_t,
function read_refs(obj::JldDataset, ::Type{T}, dspace_id::HDF5.API.hid_t, dsel_id::HDF5.API.hid_t,
dims::Tuple{Vararg{Int}}) where T
refs = Array{HDF5.Reference}(undef, dims)
HDF5.h5d_read(obj.plain.id, HDF5.H5T_STD_REF_OBJ, dspace_id, dsel_id, HDF5.H5P_DEFAULT, refs)
HDF5.API.h5d_read(obj.plain.id, HDF5.API.H5T_STD_REF_OBJ, dspace_id, dsel_id, HDF5.API.H5P_DEFAULT, refs)

out = Array{T}(undef, dims)
f = file(obj)
Expand Down Expand Up @@ -549,16 +549,17 @@ function dset_create_properties(parent, sz::Int, obj, chunk=Int[]; mmap::Bool=fa
return compact_properties(), false
end
if iscompressed(parent) && !isempty(chunk)
p = create_property(HDF5.H5P_DATASET_CREATE, chunk = chunk)
p = HDF5.DatasetCreateProperties()
p.chunk = chunk
if iscompatible(parent)
p[:shuffle] = ()
p[:compress] = 5
p.shuffle = true
p.deflate = 5
else
p[:blosc] = 5
p.blosc = 5
end
return p, true
else
return HDF5.DEFAULT_PROPERTIES, false
return HDF5.DatasetCreateProperties(), false
end
end

Expand All @@ -570,9 +571,9 @@ function _write(parent::Union{JldFile, JldGroup},
chunk = T <: String ? Int[] : HDF5.heuristic_chunk(data)
dprop, dprop_close = dset_create_properties(parent, sizeof(data), data, chunk; kargs...)
dtype = datatype(data)
dset = HDF5.Dataset(HDF5.h5d_create(parent.plain, String(name), dtype, dataspace(data),
dset = HDF5.Dataset(HDF5.API.h5d_create(parent.plain, String(name), dtype, dataspace(data),
HDF5._link_properties(name), dprop,
HDF5.DEFAULT_PROPERTIES), file(parent.plain))
HDF5.DatasetAccessProperties()), file(parent.plain))
try
# Write the attribute
isa(data, Array) && isempty(data) && write_attribute(dset, "dims", [size(data)...])
Expand All @@ -592,14 +593,14 @@ function _write(parent::Union{JldFile, JldGroup},
f = file(parent)
dtype = h5fieldtype(f, T, true)
buf = h5convert_array(f, data, dtype, wsession)
dims = convert(Vector{HDF5.hsize_t}, [reverse(size(data))...])
dims = convert(Vector{HDF5.API.hsize_t}, [reverse(size(data))...])
dspace = dataspace(data)
chunk = HDF5.heuristic_chunk(dtype, size(data))
dprop, dprop_close = dset_create_properties(parent, sizeof(buf),buf, chunk; kargs...)
try
dset = HDF5.Dataset(HDF5.h5d_create(parent.plain, path, dtype.dtype, dspace,
dset = HDF5.Dataset(HDF5.API.h5d_create(parent.plain, path, dtype.dtype, dspace,
HDF5._link_properties(path), dprop,
HDF5.DEFAULT_PROPERTIES), file(parent.plain))
HDF5.DatasetAccessProperties()), file(parent.plain))
if dtype == JLD_REF_TYPE
write_attribute(dset, "julia eltype", full_typename(f, T))
end
Expand Down Expand Up @@ -654,7 +655,7 @@ end
# HDF5 compound objects. A separate function so that it is specialized.
@noinline function _h5convert_vals(f::JldFile, data::Array,
dtype::JldDatatype, wsession::JldWriteSession)
sz = HDF5.h5t_get_size(dtype)
sz = HDF5.API.h5t_get_size(dtype)
n = length(data)
buf = Vector{UInt8}(undef, sz*n)
offset = pointer(buf)
Expand Down Expand Up @@ -692,7 +693,7 @@ function write_ref(parent::JldFile, data, wsession::JldWriteSession)
dset = _write(gref, name, writeas(data), wsession)

# Add reference to reference list
ref = HDF5.Reference(HDF5.hobj_ref_t(object_info(dset).addr))
ref = HDF5.Reference(HDF5.API.hobj_ref_t(object_info(dset).addr))
close(dset)
if !isa(data, Tuple) && ismutable(data)
wsession.h5ref[data] = ref
Expand Down Expand Up @@ -730,16 +731,16 @@ function write_compound(parent::Union{JldFile, JldGroup}, name::String,
dtype = h5type(f, T, true)
gen_h5convert(f, T)

buf = Vector{UInt8}(undef, HDF5.h5t_get_size(dtype))
buf = Vector{UInt8}(undef, HDF5.API.h5t_get_size(dtype))
h5convert!(pointer(buf), file(parent), s, wsession)
gcuse(buf)

dspace = HDF5.Dataspace(HDF5.h5s_create(HDF5.H5S_SCALAR))
dspace = HDF5.Dataspace(HDF5.API.h5s_create(HDF5.API.H5S_SCALAR))
dprop, dprop_close = dset_create_properties(parent, length(buf), buf; kargs...)
try
dset = HDF5.Dataset(HDF5.h5d_create(parent.plain, name, dtype.dtype, dspace,
dset = HDF5.Dataset(HDF5.API.h5d_create(parent.plain, name, dtype.dtype, dspace,
HDF5._link_properties(name), dprop,
HDF5.DEFAULT_PROPERTIES), file(parent.plain))
HDF5.DatasetAccessProperties()), file(parent.plain))
write_dataset(dset, dtype.dtype, buf)
return dset
finally
Expand Down Expand Up @@ -779,7 +780,7 @@ function setindex!(dset::JldDataset, X::AbstractArray{T,N}, indices::Union{Abstr
dtype = datatype(dset.plain)
try
# Convert array to writeable buffer
if HDF5.h5t_get_class(dtype) == HDF5.H5T_REFERENCE
if HDF5.API.h5t_get_class(dtype) == HDF5.API.H5T_REFERENCE
written_eltype = refarray_eltype(dset)
jldtype = JLD_REF_TYPE
else
Expand All @@ -792,15 +793,15 @@ function setindex!(dset::JldDataset, X::AbstractArray{T,N}, indices::Union{Abstr

dspace = HDF5._dataspace(sz)
try
HDF5.h5d_write(dset.plain.id, dtype, dspace, dsel_id, HDF5.H5P_DEFAULT, buf)
HDF5.API.h5d_write(dset.plain.id, dtype, dspace, dsel_id, HDF5.API.H5P_DEFAULT, buf)
finally
close(dspace)
end
finally
close(dtype)
end
finally
HDF5.h5s_close(dsel_id)
HDF5.API.h5s_close(dsel_id)
end
end
function setindex!(dset::JldDataset, x::Number, indices::Union{AbstractRange{Int},Integer}...)
Expand Down Expand Up @@ -1336,11 +1337,10 @@ const _runtime_properties = Ref{HDF5.Properties}()
compact_properties() = _runtime_properties[]

function __init__()
COMPACT_PROPERTIES = create_property(HDF5.H5P_DATASET_CREATE)
HDF5.h5p_set_layout(COMPACT_PROPERTIES.id, HDF5.H5D_COMPACT)
prop = HDF5.DatasetCreateProperties(layout=:compact)

global _runtime_properties
_runtime_properties[] = COMPACT_PROPERTIES
_runtime_properties[] = prop

Base.rehash!(_typedict, length(_typedict.keys))
Base.rehash!(BUILTIN_TYPES.dict, length(BUILTIN_TYPES.dict.keys))
Expand Down
26 changes: 13 additions & 13 deletions src/JLD00.jl
Original file line number Diff line number Diff line change
Expand Up @@ -95,17 +95,17 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo
error("File ", filename, " cannot be found")
end
version = version_current
pa = create_property(HDF5.H5P_FILE_ACCESS)
pa = create_property(HDF5.API.H5P_FILE_ACCESS)
try
pa[:fclose_degree] = HDF5.H5F_CLOSE_STRONG
pa[:fclose_degree] = HDF5.API.H5F_CLOSE_STRONG
if cr && (tr || !isfile(filename))
# We're truncating, so we don't have to check the format of an existing file
# Set the user block to 512 bytes, to save room for the header
p = create_property(HDF5.H5P_FILE_CREATE)
p = create_property(HDF5.API.H5P_FILE_CREATE)
local f
try
p[:userblock] = 512
f = HDF5.h5f_create(filename, HDF5.H5F_ACC_TRUNC, p.id, pa.id)
f = HDF5.API.API.h5f_create(filename, HDF5.API.H5F_ACC_TRUNC, p.id, pa.id)
finally
close(p)
end
Expand All @@ -126,7 +126,7 @@ function jldopen(filename::AbstractString, rd::Bool, wr::Bool, cr::Bool, tr::Boo
close(rawfid)
end
if length(magic) ncodeunits(magic_base) && view(magic, 1:ncodeunits(magic_base)) == Vector{UInt8}(codeunits(magic_base))
f = HDF5.h5f_open(filename, wr ? HDF5.H5F_ACC_RDWR : HDF5.H5F_ACC_RDONLY, pa.id)
f = HDF5.API.h5f_open(filename, wr ? HDF5.API.H5F_ACC_RDWR : HDF5.API.H5F_ACC_RDONLY, pa.id)
version = unsafe_string(pointer(magic) + length(magic_base))
fj = JldFile(HDF5.File(f, filename), version, true, true, mmaparrays)
# Load any required files/packages
Expand Down Expand Up @@ -174,20 +174,20 @@ function jldopen(f::Function, args...)
end
end

function jldobject(obj_id::HDF5.hid_t, parent)
obj_type = HDF5.h5i_get_type(obj_id)
obj_type == HDF5.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) :
obj_type == HDF5.H5I_DATATYPE ? HDF5.Datatype(obj_id) :
obj_type == HDF5.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) :
function jldobject(obj_id::HDF5.API.hid_t, parent)
obj_type = HDF5.API.h5i_get_type(obj_id)
obj_type == HDF5.API.H5I_GROUP ? JldGroup(HDF5.Group(obj_id, file(parent.plain)), file(parent)) :
obj_type == HDF5.API.H5I_DATATYPE ? HDF5.Datatype(obj_id) :
obj_type == HDF5.API.H5I_DATASET ? JldDataset(HDF5.Dataset(obj_id, file(parent.plain)), file(parent)) :
error("Invalid object type for path ", path)
end

getindex(parent::Union{JldFile, JldGroup}, path::String) =
jldobject(HDF5.h5o_open(parent.plain.id, path, HDF5.H5P_DEFAULT), parent)
jldobject(HDF5.API.h5o_open(parent.plain.id, path, HDF5.API.H5P_DEFAULT), parent)

function getindex(parent::Union{JldFile, JldGroup, JldDataset}, r::HDF5.Reference)
r == HDF5.Reference() && error("Reference is null")
obj_id = HDF5.h5r_dereference(parent.plain.id, HDF5.H5P_DEFAULT, HDF5.H5R_OBJECT, r)
obj_id = HDF5.API.h5r_dereference(parent.plain.id, HDF5.API.H5P_DEFAULT, HDF5.API.H5R_OBJECT, r)
jldobject(obj_id, parent)
end

Expand Down Expand Up @@ -591,7 +591,7 @@ function write(parent::Union{JldFile, JldGroup}, name::String, n::Nothing, astyp
try
dspace = dataspace(nothing)
dset = HDF5.Dataset(HDF5.h5d_create(HDF5.parents_create(HDF5.checkvalid(parent.plain), name, HDF5.H5T_NATIVE_UINT8, dspace.id,
HDF5.H5P_DEFAULT, HDF5.H5P_DEFAULT, HDF5.H5P_DEFAULT)...), file(parent.plain))
HDF5.API.H5P_DEFAULT, HDF5.API.H5P_DEFAULT, HDF5.API.H5P_DEFAULT)...), file(parent.plain))
write_attribute(dset, name_type_attr, astype)
finally
close(dspace)
Expand Down
Loading

2 comments on commit 7c449a1

@musm
Copy link
Member Author

@musm musm commented on 7c449a1 Feb 4, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/53854

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.13.0 -m "<description of version>" 7c449a179a9e18f4aa0835b896a6f7402bcb0364
git push origin v0.13.0

Please sign in to comment.