Storing a large number of floating point numbers that evolves over time

Hello HDF community

I want to store a large number of floating point numbers (20 000) , that
evolves over time.
At each time step, I want to save my 20 000 numbers in a HDF5 file.

I have tried to store them individually in datasets, (by creating 20 000
datasets), and extend each dataset when adding new data.
It works but it creates a large HDF5 file for small number of records (48.8
Mo to store the first iteration, with 20 000 * 64 bit floating numbers)

As I could not find a reasonnable solution, I now try to save them in a
compound dataset that I create dynamically.
I succeeded, however I am facing a limitation on the number of columns I
can insert in my compound dataset.

Apparently I can't create more than 1092 columns, and it is due to a limit
on an attribute's size that is 64K

H5O_alloc(): object header message is too large

1)
Is there a way to fix this?

2)
Is the only solution is:
- to store raw 64 bit floating number in an extendible numeric 2D array?
- to store columns name in a separate dataset?

I am using Hdf5 1.8.16 and 1.10.1

Below are the programs I use to create my dynamic compound dataset, and the
error message I have.

    module hdf5_module
      !
      use iso_c_binding
      use hdf5
      implicit none
      !
      integer, parameter :: r_k8 = selected_real_kind(10)
      !
      integer, parameter :: HDF5_RANK = 1
      !
      integer(hid_t) :: hdf5_s1_tid
      integer(hid_t) :: hdf5_file_id ! File identifier
      integer(hid_t) :: hdf5_dset_id1 ! Dataset identifier
      integer(hid_t) :: hdf5_dset_id1_prop ! Dataset creation property
identifier
    contains
      subroutine hdf5_init(filename)
        !
        character(len=*), intent(in) :: filename
        !
        integer :: error
        integer(hid_t) :: fapl_id
        integer(hid_t) :: dataspace
        integer(hid_t) :: memspace
        character(len=15), parameter :: dsetname = "results"
        integer(hsize_t), dimension(1), parameter :: dimsc = (/1/)
        integer(hsize_t), dimension(1) :: dims = (/0/)
        integer(hsize_t), dimension(1) :: maxdims = (/0/)
        integer(size_t) :: structure_size
        integer :: i
        character(len=6) :: char_i

        call h5open_f(error)
        !
        ! H5F_LIBVER_EARLIEST_F
        ! H5F_LIBVER_LATEST_F
        call h5pcreate_f( H5P_FILE_ACCESS_F, fapl_id, error)
        call h5pset_libver_bounds_f(fapl_id, H5F_LIBVER_LATEST_F,
H5F_LIBVER_LATEST_F, error)

        call h5fcreate_f(filename, H5F_ACC_TRUNC_F, hdf5_file_id, error)

        maxdims = H5S_UNLIMITED_F
        call h5screate_simple_f(HDF5_RANK, dims, dataspace, error, maxdims)

        call h5pcreate_f(H5P_DATASET_CREATE_F, hdf5_dset_id1_prop, error)
        call h5pset_chunk_f(hdf5_dset_id1_prop, HDF5_RANK, dimsc, error)

        structure_size = 1092 * 8
        call H5Tcreate_f(H5T_COMPOUND_F, structure_size, hdf5_s1_tid, error)
        do i=1,1092
            write(char_i, '(I6.6)') i
            call H5Tinsert_f(hdf5_s1_tid, "x"//char_i, int (8*(i-1),
size_t), h5kind_to_type(r_k8,H5_REAL_KIND), error)
        end do

        call h5dcreate_f(hdf5_file_id, dsetname, hdf5_s1_tid, dataspace,
hdf5_dset_id1, error, hdf5_dset_id1_prop)
        call h5sclose_f(dataspace, error)
      end subroutine

      subroutine hdf5_close()
        integer :: error
        call h5pclose_f(hdf5_dset_id1_prop, error)
        call h5dclose_f(hdf5_dset_id1, error)
        call h5fclose_f(hdf5_file_id, error)
      end subroutine
    end module

    program toto
        use hdf5_module
        character(LEN=12), parameter :: filename = "resultats.h5"
        call hdf5_init(filename)
        call hdf5_close()
    end program

    HDF5-DIAG: Error detected in HDF5 (1.10.1) thread 0:
      #000: hdf5-1.10.1/src/H5D.c line 145 in H5Dcreate2(): unable to
create dataset
        major: Dataset
        minor: Unable to initialize object
      #001: hdf5-1.10.1/src/H5Dint.c line 490 in H5D__create_named():
unable to create and link to dataset
        major: Dataset
        minor: Unable to initialize object
      #002: hdf5-1.10.1/src/H5L.c line 1695 in H5L_link_object(): unable to
create new link to object
        major: Links
        minor: Unable to initialize object
      #003: hdf5-1.10.1/src/H5L.c line 1939 in H5L_create_real(): can't
insert link
        major: Symbol table
        minor: Unable to insert object
      #004: hdf5-1.10.1/src/H5Gtraverse.c line 867 in H5G_traverse():
internal path traversal failed
        major: Symbol table
        minor: Object not found
      #005: hdf5-1.10.1/src/H5Gtraverse.c line 639 in H5G_traverse_real():
traversal operator failed
        major: Symbol table
        minor: Callback failed
      #006: hdf5-1.10.1/src/H5L.c line 1742 in H5L_link_cb(): unable to
create object
        major: Object header
        minor: Unable to initialize object
      #007: hdf5-1.10.1/src/H5O.c line 3178 in H5O_obj_create(): unable to
open object
        major: Object header
        minor: Can't open object
      #008: hdf5-1.10.1/src/H5Doh.c line 291 in H5O__dset_create(): unable
to create dataset
        major: Dataset
        minor: Unable to initialize object
      #009: hdf5-1.10.1/src/H5Dint.c line 1256 in H5D__create(): can't
update the metadata cache
        major: Dataset
        minor: Unable to initialize object
      #010: hdf5-1.10.1/src/H5Dint.c line 916 in H5D__update_oh_info():
unable to update datatype header message
        major: Dataset
        minor: Unable to initialize object
      #011: hdf5-1.10.1/src/H5Omessage.c line 183 in H5O_msg_append_oh():
unable to create new message in header
        major: Attribute
        minor: Unable to insert object
      #012: hdf5-1.10.1/src/H5Omessage.c line 223 in H5O_msg_append_real():
unable to create new message
        major: Object header
        minor: No space available for allocation
      #013: hdf5-1.10.1/src/H5Omessage.c line 1933 in H5O_msg_alloc():
unable to allocate space for message
        major: Object header
        minor: Unable to initialize object
      #014: hdf5-1.10.1/src/H5Oalloc.c line 1314 in H5O_alloc(): object
header message is too large
        major: Object header
        minor: Unable to initialize object

What about a 2D dataset with dimensions (unlimited, 20 000)?

···

On 24 May 2017 at 10:54, Guillaume Jacquenot <guillaume.jacquenot@gmail.com> wrote:

Hello HDF community

I want to store a large number of floating point numbers (20 000) , that
evolves over time.
At each time step, I want to save my 20 000 numbers in a HDF5 file.

I have tried to store them individually in datasets, (by creating 20 000
datasets), and extend each dataset when adding new data.
It works but it creates a large HDF5 file for small number of records
(48.8 Mo to store the first iteration, with 20 000 * 64 bit floating
numbers)

As I could not find a reasonnable solution, I now try to save them in a
compound dataset that I create dynamically.
I succeeded, however I am facing a limitation on the number of columns I
can insert in my compound dataset.

Apparently I can't create more than 1092 columns, and it is due to a limit
on an attribute's size that is 64K

H5O_alloc(): object header message is too large

1)
Is there a way to fix this?

2)
Is the only solution is:
- to store raw 64 bit floating number in an extendible numeric 2D array?
- to store columns name in a separate dataset?

I am using Hdf5 1.8.16 and 1.10.1

Below are the programs I use to create my dynamic compound dataset, and
the error message I have.

    module hdf5_module
      !
      use iso_c_binding
      use hdf5
      implicit none
      !
      integer, parameter :: r_k8 = selected_real_kind(10)
      !
      integer, parameter :: HDF5_RANK = 1
      !
      integer(hid_t) :: hdf5_s1_tid
      integer(hid_t) :: hdf5_file_id ! File identifier
      integer(hid_t) :: hdf5_dset_id1 ! Dataset identifier
      integer(hid_t) :: hdf5_dset_id1_prop ! Dataset creation property
identifier
    contains
      subroutine hdf5_init(filename)
        !
        character(len=*), intent(in) :: filename
        !
        integer :: error
        integer(hid_t) :: fapl_id
        integer(hid_t) :: dataspace
        integer(hid_t) :: memspace
        character(len=15), parameter :: dsetname = "results"
        integer(hsize_t), dimension(1), parameter :: dimsc = (/1/)
        integer(hsize_t), dimension(1) :: dims = (/0/)
        integer(hsize_t), dimension(1) :: maxdims = (/0/)
        integer(size_t) :: structure_size
        integer :: i
        character(len=6) :: char_i

        call h5open_f(error)
        !
        ! H5F_LIBVER_EARLIEST_F
        ! H5F_LIBVER_LATEST_F
        call h5pcreate_f( H5P_FILE_ACCESS_F, fapl_id, error)
        call h5pset_libver_bounds_f(fapl_id, H5F_LIBVER_LATEST_F,
H5F_LIBVER_LATEST_F, error)

        call h5fcreate_f(filename, H5F_ACC_TRUNC_F, hdf5_file_id, error)

        maxdims = H5S_UNLIMITED_F
        call h5screate_simple_f(HDF5_RANK, dims, dataspace, error, maxdims)

        call h5pcreate_f(H5P_DATASET_CREATE_F, hdf5_dset_id1_prop, error)
        call h5pset_chunk_f(hdf5_dset_id1_prop, HDF5_RANK, dimsc, error)

        structure_size = 1092 * 8
        call H5Tcreate_f(H5T_COMPOUND_F, structure_size, hdf5_s1_tid,
error)
        do i=1,1092
            write(char_i, '(I6.6)') i
            call H5Tinsert_f(hdf5_s1_tid, "x"//char_i, int (8*(i-1),
size_t), h5kind_to_type(r_k8,H5_REAL_KIND), error)
        end do

        call h5dcreate_f(hdf5_file_id, dsetname, hdf5_s1_tid, dataspace,
hdf5_dset_id1, error, hdf5_dset_id1_prop)
        call h5sclose_f(dataspace, error)
      end subroutine

      subroutine hdf5_close()
        integer :: error
        call h5pclose_f(hdf5_dset_id1_prop, error)
        call h5dclose_f(hdf5_dset_id1, error)
        call h5fclose_f(hdf5_file_id, error)
      end subroutine
    end module

    program toto
        use hdf5_module
        character(LEN=12), parameter :: filename = "resultats.h5"
        call hdf5_init(filename)
        call hdf5_close()
    end program

    HDF5-DIAG: Error detected in HDF5 (1.10.1) thread 0:
      #000: hdf5-1.10.1/src/H5D.c line 145 in H5Dcreate2(): unable to
create dataset
        major: Dataset
        minor: Unable to initialize object
      #001: hdf5-1.10.1/src/H5Dint.c line 490 in H5D__create_named():
unable to create and link to dataset
        major: Dataset
        minor: Unable to initialize object
      #002: hdf5-1.10.1/src/H5L.c line 1695 in H5L_link_object(): unable
to create new link to object
        major: Links
        minor: Unable to initialize object
      #003: hdf5-1.10.1/src/H5L.c line 1939 in H5L_create_real(): can't
insert link
        major: Symbol table
        minor: Unable to insert object
      #004: hdf5-1.10.1/src/H5Gtraverse.c line 867 in H5G_traverse():
internal path traversal failed
        major: Symbol table
        minor: Object not found
      #005: hdf5-1.10.1/src/H5Gtraverse.c line 639 in H5G_traverse_real():
traversal operator failed
        major: Symbol table
        minor: Callback failed
      #006: hdf5-1.10.1/src/H5L.c line 1742 in H5L_link_cb(): unable to
create object
        major: Object header
        minor: Unable to initialize object
      #007: hdf5-1.10.1/src/H5O.c line 3178 in H5O_obj_create(): unable to
open object
        major: Object header
        minor: Can't open object
      #008: hdf5-1.10.1/src/H5Doh.c line 291 in H5O__dset_create(): unable
to create dataset
        major: Dataset
        minor: Unable to initialize object
      #009: hdf5-1.10.1/src/H5Dint.c line 1256 in H5D__create(): can't
update the metadata cache
        major: Dataset
        minor: Unable to initialize object
      #010: hdf5-1.10.1/src/H5Dint.c line 916 in H5D__update_oh_info():
unable to update datatype header message
        major: Dataset
        minor: Unable to initialize object
      #011: hdf5-1.10.1/src/H5Omessage.c line 183 in H5O_msg_append_oh():
unable to create new message in header
        major: Attribute
        minor: Unable to insert object
      #012: hdf5-1.10.1/src/H5Omessage.c line 223 in
H5O_msg_append_real(): unable to create new message
        major: Object header
        minor: No space available for allocation
      #013: hdf5-1.10.1/src/H5Omessage.c line 1933 in H5O_msg_alloc():
unable to allocate space for message
        major: Object header
        minor: Unable to initialize object
      #014: hdf5-1.10.1/src/H5Oalloc.c line 1314 in H5O_alloc(): object
header message is too large
        major: Object header
        minor: Unable to initialize object

_______________________________________________
Hdf-forum is for HDF software users discussion.
Hdf-forum@lists.hdfgroup.org
http://lists.hdfgroup.org/mailman/listinfo/hdf-forum_lists.hdfgroup.org
Twitter: https://twitter.com/hdf5

FWIW and IMHO, and at the risk of sounding overly critical though that is not my intention, I am just going to say it...

...there is a right way and a wrong way to use HDF5.

Creating 20,000 datasets to store 20,000 floating point numbers each dataset storing a single float is not the right way to use HDF5. I mean if that actually makes sense in some context, then why stop there? Why not just create 20,000 HDF5 *files*, each file storing a single floating point number?

A dataset involves on the order of ¼ to ½ kilo-bytes of metadata (symbol table entry, file offset, name string and name heap, object header, type information, etc. ,etc., etc.). So, that represents a space (e.g. memory/file space) performance hit. Next, there is the time to open read and close each dataset simply to access any given float. That represents a massive time performance hit.

Worst of all, there are no tools in the HDF5 "eco-system" that will be able to treat your 20,000 floats stored in this way in any useful way. Can't plot 'em, can't read a hyperslab of them, can't create a numpy array out of them, can't compress them, etc., etc., etc.

What you want to use is, as someone else suggested, a *single* 2D dataset that is 20,000 X unlimited of type float<64>.

If issues such as string names for column headers are also part of the issue, store those as a second dataset or perhaps an attribute on the first dataset.

There are many examples of using HDF5 available that can help. Here are some...

https://support.hdfgroup.org/HDF5/examples/intro.html#fortran

"Hdf-forum on behalf of Guillaume Jacquenot" wrote:

Hello HDF community
I want to store a large number of floating point numbers (20 000) , that evolves over time.
At each time step, I want to save my 20 000 numbers in a HDF5 file.

I have tried to store them individually in datasets, (by creating 20 000 datasets), and extend each dataset when adding new data.
It works but it creates a large HDF5 file for small number of records (48.8 Mo to store the first iteration, with 20 000 * 64 bit floating numbers)

As I could not find a reasonnable solution, I now try to save them in a compound dataset that I create dynamically.
I succeeded, however I am facing a limitation on the number of columns I can insert in my compound dataset.
Apparently I can't create more than 1092 columns, and it is due to a limit on an attribute's size that is 64K

H5O_alloc(): object header message is too large

1)
Is there a way to fix this?

2)
Is the only solution is:
- to store raw 64 bit floating number in an extendible numeric 2D array?
- to store columns name in a separate dataset?

I am using Hdf5 1.8.16 and 1.10.1

Below are the programs I use to create my dynamic compound dataset, and the error message I have.

    module hdf5_module
      !
      use iso_c_binding
      use hdf5
      implicit none
      !
      integer, parameter :: r_k8 = selected_real_kind(10)
      !
      integer, parameter :: HDF5_RANK = 1
      !
      integer(hid_t) :: hdf5_s1_tid
      integer(hid_t) :: hdf5_file_id ! File identifier
      integer(hid_t) :: hdf5_dset_id1 ! Dataset identifier
      integer(hid_t) :: hdf5_dset_id1_prop ! Dataset creation property identifier
    contains
      subroutine hdf5_init(filename)
        !
        character(len=*), intent(in) :: filename
        !
        integer :: error
        integer(hid_t) :: fapl_id
        integer(hid_t) :: dataspace
        integer(hid_t) :: memspace
        character(len=15), parameter :: dsetname = "results"
        integer(hsize_t), dimension(1), parameter :: dimsc = (/1/)
        integer(hsize_t), dimension(1) :: dims = (/0/)
        integer(hsize_t), dimension(1) :: maxdims = (/0/)
        integer(size_t) :: structure_size
        integer :: i
        character(len=6) :: char_i

        call h5open_f(error)
        !
        ! H5F_LIBVER_EARLIEST_F
        ! H5F_LIBVER_LATEST_F
        call h5pcreate_f( H5P_FILE_ACCESS_F, fapl_id, error)
        call h5pset_libver_bounds_f(fapl_id, H5F_LIBVER_LATEST_F, H5F_LIBVER_LATEST_F, error)

        call h5fcreate_f(filename, H5F_ACC_TRUNC_F, hdf5_file_id, error)

        maxdims = H5S_UNLIMITED_F
        call h5screate_simple_f(HDF5_RANK, dims, dataspace, error, maxdims)

        call h5pcreate_f(H5P_DATASET_CREATE_F, hdf5_dset_id1_prop, error)
        call h5pset_chunk_f(hdf5_dset_id1_prop, HDF5_RANK, dimsc, error)

        structure_size = 1092 * 8
        call H5Tcreate_f(H5T_COMPOUND_F, structure_size, hdf5_s1_tid, error)
        do i=1,1092
            write(char_i, '(I6.6)') i
            call H5Tinsert_f(hdf5_s1_tid, "x"//char_i, int (8*(i-1), size_t), h5kind_to_type(r_k8,H5_REAL_KIND), error)
        end do

        call h5dcreate_f(hdf5_file_id, dsetname, hdf5_s1_tid, dataspace, hdf5_dset_id1, error, hdf5_dset_id1_prop)
        call h5sclose_f(dataspace, error)
      end subroutine

      subroutine hdf5_close()
        integer :: error
        call h5pclose_f(hdf5_dset_id1_prop, error)
        call h5dclose_f(hdf5_dset_id1, error)
        call h5fclose_f(hdf5_file_id, error)
      end subroutine
    end module

    program toto
        use hdf5_module
        character(LEN=12), parameter :: filename = "resultats.h5"
        call hdf5_init(filename)
        call hdf5_close()
    end program

    HDF5-DIAG: Error detected in HDF5 (1.10.1) thread 0:
      #000: hdf5-1.10.1/src/H5D.c line 145 in H5Dcreate2(): unable to create dataset
        major: Dataset
        minor: Unable to initialize object
      #001: hdf5-1.10.1/src/H5Dint.c line 490 in H5D__create_named(): unable to create and link to dataset
        major: Dataset
        minor: Unable to initialize object
      #002: hdf5-1.10.1/src/H5L.c line 1695 in H5L_link_object(): unable to create new link to object
        major: Links
        minor: Unable to initialize object
      #003: hdf5-1.10.1/src/H5L.c line 1939 in H5L_create_real(): can't insert link
        major: Symbol table
        minor: Unable to insert object
      #004: hdf5-1.10.1/src/H5Gtraverse.c line 867 in H5G_traverse(): internal path traversal failed
        major: Symbol table
        minor: Object not found
      #005: hdf5-1.10.1/src/H5Gtraverse.c line 639 in H5G_traverse_real(): traversal operator failed
        major: Symbol table
        minor: Callback failed
      #006: hdf5-1.10.1/src/H5L.c line 1742 in H5L_link_cb(): unable to create object
        major: Object header
        minor: Unable to initialize object
      #007: hdf5-1.10.1/src/H5O.c line 3178 in H5O_obj_create(): unable to open object
        major: Object header
        minor: Can't open object
      #008: hdf5-1.10.1/src/H5Doh.c line 291 in H5O__dset_create(): unable to create dataset
        major: Dataset
        minor: Unable to initialize object
      #009: hdf5-1.10.1/src/H5Dint.c line 1256 in H5D__create(): can't update the metadata cache
        major: Dataset
        minor: Unable to initialize object
      #010: hdf5-1.10.1/src/H5Dint.c line 916 in H5D__update_oh_info(): unable to update datatype header message
        major: Dataset
        minor: Unable to initialize object
      #011: hdf5-1.10.1/src/H5Omessage.c line 183 in H5O_msg_append_oh(): unable to create new message in header
        major: Attribute
        minor: Unable to insert object
      #012: hdf5-1.10.1/src/H5Omessage.c line 223 in H5O_msg_append_real(): unable to create new message
        major: Object header
        minor: No space available for allocation
      #013: hdf5-1.10.1/src/H5Omessage.c line 1933 in H5O_msg_alloc(): unable to allocate space for message
        major: Object header
        minor: Unable to initialize object
      #014: hdf5-1.10.1/src/H5Oalloc.c line 1314 in H5O_alloc(): object header message is too large
        major: Object header
        minor: Unable to initialize object

Storing the column names in a String Array dataset and the data in a unlimited length, 20000 wide array (or transposed) seems to make sense. The 64K limitation on attributes is only for those attributes which reside in the object header. https://support.hdfgroup.org/HDF5/faq/limits.html With attributes, you don’t have the option to only read in part of the array, so you’ll be reading in all 20000 column names whenever you open a file. With a dataset, you can read in specific indices. Also, if you are concerned about size overhead, having the column headers stored in a dataset would allow you to add compression, and I imagine your column header text would compress well.

Jarom

···

From: Hdf-forum [mailto:hdf-forum-bounces@lists.hdfgroup.org] On Behalf Of Filipe Maia
Sent: Wednesday, May 24, 2017 1:57 AM
To: HDF Users Discussion List <hdf-forum@lists.hdfgroup.org>
Subject: Re: [Hdf-forum] Storing a large number of floating point numbers that evolves over time

What about a 2D dataset with dimensions (unlimited, 20 000)?

On 24 May 2017 at 10:54, Guillaume Jacquenot <guillaume.jacquenot@gmail.com<mailto:guillaume.jacquenot@gmail.com>> wrote:
Hello HDF community
I want to store a large number of floating point numbers (20 000) , that evolves over time.
At each time step, I want to save my 20 000 numbers in a HDF5 file.

I have tried to store them individually in datasets, (by creating 20 000 datasets), and extend each dataset when adding new data.
It works but it creates a large HDF5 file for small number of records (48.8 Mo to store the first iteration, with 20 000 * 64 bit floating numbers)

As I could not find a reasonnable solution, I now try to save them in a compound dataset that I create dynamically.
I succeeded, however I am facing a limitation on the number of columns I can insert in my compound dataset.
Apparently I can't create more than 1092 columns, and it is due to a limit on an attribute's size that is 64K

H5O_alloc(): object header message is too large

1)
Is there a way to fix this?

2)
Is the only solution is:
- to store raw 64 bit floating number in an extendible numeric 2D array?
- to store columns name in a separate dataset?

I am using Hdf5 1.8.16 and 1.10.1

Below are the programs I use to create my dynamic compound dataset, and the error message I have.

    module hdf5_module
      !
      use iso_c_binding
      use hdf5
      implicit none
      !
      integer, parameter :: r_k8 = selected_real_kind(10)
      !
      integer, parameter :: HDF5_RANK = 1
      !
      integer(hid_t) :: hdf5_s1_tid
      integer(hid_t) :: hdf5_file_id ! File identifier
      integer(hid_t) :: hdf5_dset_id1 ! Dataset identifier
      integer(hid_t) :: hdf5_dset_id1_prop ! Dataset creation property identifier
    contains
      subroutine hdf5_init(filename)
        !
        character(len=*), intent(in) :: filename
        !
        integer :: error
        integer(hid_t) :: fapl_id
        integer(hid_t) :: dataspace
        integer(hid_t) :: memspace
        character(len=15), parameter :: dsetname = "results"
        integer(hsize_t), dimension(1), parameter :: dimsc = (/1/)
        integer(hsize_t), dimension(1) :: dims = (/0/)
        integer(hsize_t), dimension(1) :: maxdims = (/0/)
        integer(size_t) :: structure_size
        integer :: i
        character(len=6) :: char_i

        call h5open_f(error)
        !
        ! H5F_LIBVER_EARLIEST_F
        ! H5F_LIBVER_LATEST_F
        call h5pcreate_f( H5P_FILE_ACCESS_F, fapl_id, error)
        call h5pset_libver_bounds_f(fapl_id, H5F_LIBVER_LATEST_F, H5F_LIBVER_LATEST_F, error)

        call h5fcreate_f(filename, H5F_ACC_TRUNC_F, hdf5_file_id, error)

        maxdims = H5S_UNLIMITED_F
        call h5screate_simple_f(HDF5_RANK, dims, dataspace, error, maxdims)

        call h5pcreate_f(H5P_DATASET_CREATE_F, hdf5_dset_id1_prop, error)
        call h5pset_chunk_f(hdf5_dset_id1_prop, HDF5_RANK, dimsc, error)

        structure_size = 1092 * 8
        call H5Tcreate_f(H5T_COMPOUND_F, structure_size, hdf5_s1_tid, error)
        do i=1,1092
            write(char_i, '(I6.6)') i
            call H5Tinsert_f(hdf5_s1_tid, "x"//char_i, int (8*(i-1), size_t), h5kind_to_type(r_k8,H5_REAL_KIND), error)
        end do

        call h5dcreate_f(hdf5_file_id, dsetname, hdf5_s1_tid, dataspace, hdf5_dset_id1, error, hdf5_dset_id1_prop)
        call h5sclose_f(dataspace, error)
      end subroutine

      subroutine hdf5_close()
        integer :: error
        call h5pclose_f(hdf5_dset_id1_prop, error)
        call h5dclose_f(hdf5_dset_id1, error)
        call h5fclose_f(hdf5_file_id, error)
      end subroutine
    end module

    program toto
        use hdf5_module
        character(LEN=12), parameter :: filename = "resultats.h5"
        call hdf5_init(filename)
        call hdf5_close()
    end program

    HDF5-DIAG: Error detected in HDF5 (1.10.1) thread 0:
      #000: hdf5-1.10.1/src/H5D.c line 145 in H5Dcreate2(): unable to create dataset
        major: Dataset
        minor: Unable to initialize object
      #001: hdf5-1.10.1/src/H5Dint.c line 490 in H5D__create_named(): unable to create and link to dataset
        major: Dataset
        minor: Unable to initialize object
      #002: hdf5-1.10.1/src/H5L.c line 1695 in H5L_link_object(): unable to create new link to object
        major: Links
        minor: Unable to initialize object
      #003: hdf5-1.10.1/src/H5L.c line 1939 in H5L_create_real(): can't insert link
        major: Symbol table
        minor: Unable to insert object
      #004: hdf5-1.10.1/src/H5Gtraverse.c line 867 in H5G_traverse(): internal path traversal failed
        major: Symbol table
        minor: Object not found
      #005: hdf5-1.10.1/src/H5Gtraverse.c line 639 in H5G_traverse_real(): traversal operator failed
        major: Symbol table
        minor: Callback failed
      #006: hdf5-1.10.1/src/H5L.c line 1742 in H5L_link_cb(): unable to create object
        major: Object header
        minor: Unable to initialize object
      #007: hdf5-1.10.1/src/H5O.c line 3178 in H5O_obj_create(): unable to open object
        major: Object header
        minor: Can't open object
      #008: hdf5-1.10.1/src/H5Doh.c line 291 in H5O__dset_create(): unable to create dataset
        major: Dataset
        minor: Unable to initialize object
      #009: hdf5-1.10.1/src/H5Dint.c line 1256 in H5D__create(): can't update the metadata cache
        major: Dataset
        minor: Unable to initialize object
      #010: hdf5-1.10.1/src/H5Dint.c line 916 in H5D__update_oh_info(): unable to update datatype header message
        major: Dataset
        minor: Unable to initialize object
      #011: hdf5-1.10.1/src/H5Omessage.c line 183 in H5O_msg_append_oh(): unable to create new message in header
        major: Attribute
        minor: Unable to insert object
      #012: hdf5-1.10.1/src/H5Omessage.c line 223 in H5O_msg_append_real(): unable to create new message
        major: Object header
        minor: No space available for allocation
      #013: hdf5-1.10.1/src/H5Omessage.c line 1933 in H5O_msg_alloc(): unable to allocate space for message
        major: Object header
        minor: Unable to initialize object
      #014: hdf5-1.10.1/src/H5Oalloc.c line 1314 in H5O_alloc(): object header message is too large
        major: Object header
        minor: Unable to initialize object

_______________________________________________
Hdf-forum is for HDF software users discussion.
Hdf-forum@lists.hdfgroup.org<mailto:Hdf-forum@lists.hdfgroup.org>
http://lists.hdfgroup.org/mailman/listinfo/hdf-forum_lists.hdfgroup.org
Twitter: https://twitter.com/hdf5