@@ -682,18 +682,18 @@ void read_hdf5_tree_into_conduit_node(hid_t hdf5_id,
682682// / @param inopts Input options
683683// / @param dataspace_id ID of the dataspace of the dataset in question
684684// / @param filled_opts[out] The options, filled out with defaults
685- // /
685+ // /
686686// / This routine uses the following children of \a inopts:
687687// / - sizes (or size)
688688// / - offsets (or offset)
689689// / - strides (or stride)
690690// / .
691691// / All are optional. All should be numeric and of the same length
692692// / (the dimensionality of the dataset \a dataspace_id).
693- // /
693+ // /
694694// / This routine does the following:
695695// / 1. makes a deep copy of \a inopts into \a filled_opts
696- // / 2. retrieves metadata from \a dataspace_id. If it's not a dataspace,
696+ // / 2. retrieves metadata from \a dataspace_id. If it's not a dataspace,
697697// / throw an error.
698698// / 3. sets filled_opts["slabparams/rank"] as a scalar, the rank (number
699699// / of dimensions) of the dataset
@@ -708,7 +708,7 @@ void read_hdf5_tree_into_conduit_node(hid_t hdf5_id,
708708// / the user provided in inopts
709709// / 7. sets filled_opts["slabparams/readcount"] as a scalar, the number
710710// / of values to read, as specified by offset, stride, and size
711- void
711+ void
712712fill_dataset_opts (const std::string & ref_path, const Node& inopts,
713713 hid_t dataspace_id, Node& filled_opts);
714714
@@ -2507,7 +2507,7 @@ struct h5_read_opdata
25072507#else
25082508 haddr_t addr; /* Group address */
25092509#endif
2510-
2510+
25112511 // pointer to conduit node, anchors traversal to
25122512 Node *node;
25132513 const Node *opts;
@@ -2565,7 +2565,7 @@ int
25652565h5_group_check (h5_read_opdata *od,
25662566 haddr_t target_addr)
25672567{
2568-
2568+
25692569 if (od->addr == target_addr)
25702570 {
25712571 /* Addresses match */
@@ -2810,7 +2810,7 @@ read_hdf5_group_into_conduit_node(hid_t hdf5_group_id,
28102810#else
28112811 herr_t h5_status = H5Oget_info (hdf5_group_id,
28122812 &h5_info_buf);
2813- #endif
2813+ #endif
28142814
28152815 // Check if this is a list or an object case
28162816 if (check_if_hdf5_group_has_conduit_list_attribute (hdf5_group_id,
@@ -2928,14 +2928,14 @@ conduit_node_to_argarray(Node& n,
29282928 p_ary[d] = dft;
29292929 }
29302930 }
2931-
2931+
29322932 return hdf5array.as_index_t_array ();
29332933}
29342934
29352935
29362936// ---------------------------------------------------------------------------//
29372937index_t
2938- calculate_readsize (index_t_array readsize, index_t rank,
2938+ calculate_readsize (index_t_array readsize, index_t rank,
29392939 const index_t_array dataset_sizes, const index_t_array offsets, const index_t_array strides)
29402940{
29412941 index_t readtotal = 1 ;
@@ -2979,7 +2979,7 @@ fill_dataset_opts(const std::string & ref_path, const Node & inopts,
29792979 // - Each element of offset >= 0
29802980
29812981 // If dataspace_id is a scalar, then H5Sget_simple_extent_ndims will
2982- // return zero. Setting rank to 0 makes the following code create
2982+ // return zero. Setting rank to 0 makes the following code create
29832983 // zero-length arrays for offset, stride, and size, which is unhealthy.
29842984 bool is_scalar = false ;
29852985 if (rank < 1 )
@@ -3077,7 +3077,7 @@ read_hdf5_dataset_into_conduit_node(hid_t hdf5_dset_id,
30773077 std::vector<hsize_t > readsize_vec;
30783078 std::vector<hsize_t > offset_vec;
30793079 std::vector<hsize_t > stride_vec;
3080-
3080+
30813081 make_dataset_opt_copy (filled_opts, " sizes" ,readsize_vec);
30823082 make_dataset_opt_copy (filled_opts, " offsets" , offset_vec);
30833083 make_dataset_opt_copy (filled_opts, " strides" , stride_vec);
@@ -3267,7 +3267,7 @@ read_hdf5_tree_into_conduit_node(hid_t hdf5_id,
32673267 herr_t h5_status = H5Oget_info (hdf5_id,&h5_info_buf,H5O_INFO_ALL);
32683268#else
32693269 herr_t h5_status = H5Oget_info (hdf5_id,&h5_info_buf);
3270- #endif
3270+ #endif
32713271
32723272
32733273 CONDUIT_CHECK_HDF5_ERROR_WITH_FILE_AND_REF_PATH (h5_status,
@@ -3391,6 +3391,20 @@ create_hdf5_file_access_plist()
33913391 {
33923392 h5_status = H5Pset_libver_bounds (h5_fa_props, H5F_LIBVER_V18, H5F_LIBVER_V110);
33933393 }
3394+ // nested case for hdf5 >= 1.12
3395+ #if H5_VERSION_GE(1, 12, 0)
3396+ else if (HDF5Options::libver == " v1120" )
3397+ {
3398+ h5_status = H5Pset_libver_bounds (h5_fa_props, H5F_LIBVER_V18, H5F_LIBVER_V112);
3399+ }
3400+ #endif
3401+ // nested case for hdf5 >= 1.14
3402+ #if H5_VERSION_GE(1, 14, 0)
3403+ else if (HDF5Options::libver == " v1140" )
3404+ {
3405+ h5_status = H5Pset_libver_bounds (h5_fa_props, H5F_LIBVER_V18, H5F_LIBVER_V114);
3406+ }
3407+ #endif
33943408 else if (HDF5Options::libver == " latest" )
33953409 {
33963410 h5_status = H5Pset_libver_bounds (h5_fa_props, H5F_LIBVER_V18, H5F_LIBVER_LATEST);
@@ -3403,7 +3417,7 @@ create_hdf5_file_access_plist()
34033417 {
34043418 // unknown or unsupported libver
34053419 CONDUIT_ERROR (" HDF5 libver option: '"
3406- << HDF5Options::libver
3420+ << HDF5Options::libver
34073421 << " ' is unknown or unsupported with HDF5 v"
34083422 << major_num << " ." << major_num << " ." << release_num
34093423 );
@@ -3424,7 +3438,7 @@ create_hdf5_file_access_plist()
34243438 {
34253439 // unknown or unsupported libver
34263440 CONDUIT_ERROR (" HDF5 libver option: '"
3427- << HDF5Options::libver
3441+ << HDF5Options::libver
34283442 << " ' is unknown or unsupported with HDF5 v"
34293443 << major_num << " ." << major_num << " ." << release_num
34303444 );
0 commit comments