string, double data set

Hey guys,

I am a total newbie to HDF5. I want to use it via Java.

Now I need a suggestion about how to structure best my output file. I have
many cells with coordinates x and y. For each cell I need a cell code and
furthermore I have many attributes.

I have:
x-coordinate: double or float (f.e. 4582436.9081 [m])
y-coordinate: double or float (f.e. 3116378.8266 [m])
cell code: string (f.e. 1kmN3116E4582)
attribute1: double or float (f.e. 10.1)
attribute2: double or float
attribute3: double or float
...

I have an array which should be read into hdf5 format. but I do not know how
many attributes there are for each cell.

any suggestions? I could need some help!

···

--
View this message in context: http://hdf-forum.184993.n3.nabble.com/string-double-data-set-tp981166p981166.html
Sent from the hdf-forum mailing list archive at Nabble.com.

hey guys,

I have a problem.^^ I tried to solve it with the code snippet below. but I
do not get the output I want to.

I wanted something like this:
cell code E-coordinate [m] N-coordinate [m] attribute
1kmN3116E4582 4582436.9081 3116378.8266 10.1
1kmN3112E4584 4584523.7417 3112367.3833 10.3

but I got something like this:

objectheader cell code E-coordinate [m]
N-coordinate [m]
29303 1027mN4460E3035 -6.065937113828252E66 2.954988390438623E237

attribute
-6.065988000076415E66

what am I doing wrong? besides I tried to reject the column "object header"
because I used an example. But it does not work.

does anybody have any suggestions?

public class HDF5 {
  private static String fname;
  private static String DATASETNAME = "Grid";
  private static int DIM0;
  private static final int RANK = 1;

  /**
   * compoundDatatype class is used to capture basic externalization
information,
   * strings need to have a Maximum Size specified
   * @author Linda
   */
  private static class CompoundDatatype {
    protected static final int OBJHEADERSIZE = 2;
    protected static final int MAGICNUMBER = 4;
    protected static final int DOUBLESIZE = 8;
    protected final static int MAXSTRINGSIZE = 90;
  }

  /**
   * compound type class includes supporting Grid_Datatype class,
Grid_Datatype class could be external as well
   * @author Linda
   */
  private static class Grid {
    
    static Grid_Datatype datatypes;

    String cellcode;
    double x_coordinate;
    double y_coordinate;
    double attribute;

    Grid() {
      datatypes = new Grid_Datatype();
    }

    public void writeExternal(ObjectOutput out) throws IOException {
      for (int indx = 0; indx < Grid_Datatype.MAXSTRINGSIZE; indx++) {
        if (indx < cellcode.length())
          out.writeByte(cellcode.charAt(indx));
        else
          out.writeByte(0);
      }
      out.writeDouble(roundScale2(x_coordinate));
      out.writeDouble(roundScale2(y_coordinate));
      out.writeDouble(roundScale2(attribute));
    }
  }

  /**
   * method to create data set and write it to file
   * @param gridList
   * @param resolution
   */
  public void createDataset(double[][] gridList, double resolution, String
name) {

    DIM0 = gridList.length;

    int file_id = -1;
    int strtype_id = -1;
    int memtype_id = -1;
    int filetype_id = -1;
    int dataspace_id = -1;
    int dataset_id = -1;
    long[] dims = {DIM0};
    Grid[] grid_data = new Grid[DIM0];
    byte[] dset_data = null;

    // initialize data, initialize data via for loop
    for (int i = 0; i < DIM0; i++) {
      int north = (int) (gridList[i][0]/resolution);
      int east = (int) (gridList[i][1]/resolution);
      int intResolution = (int) resolution;
      grid_data[i] = new Grid();
      grid_data[i].cellcode = new String(intResolution + "m" + "N" +
          north + "E" + east);
      grid_data[i].x_coordinate = gridList[i][0];
      grid_data[i].y_coordinate = gridList[i][1];
      grid_data[i].attribute = gridList[i][2];
    }
    
    fname = name + ".hdf5";
    
    // create a new file using default properties
    try {
      file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC,
          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create string datatype
    try {
      strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
      if (strtype_id >= 0)
        H5.H5Tset_size(strtype_id, Grid_Datatype.MAXSTRINGSIZE);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create compound datatype for memory
    try {
      memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Grid_Datatype
          .getDataSize());
      if (memtype_id >= 0) {
        for (int indx = 0; indx < Grid_Datatype.numberMembers; indx++) {
          int type_id = Grid.datatypes.memberMemTypes[indx];
          if (type_id == HDF5Constants.H5T_C_S1)
            type_id = strtype_id;
          H5.H5Tinsert(memtype_id, Grid.datatypes.memberNames[indx],
              Grid_Datatype.getOffset(indx), type_id);
        }
      }
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create compound datatype for the file because standard
    // types we are using for file may have different sizes than
    // corresponding native types, we must manually calculate
    // offset of each member
    try {
      filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Grid_Datatype
          .getDataSize());
      if (filetype_id >= 0) {
        for (int indx = 0; indx < Grid_Datatype.numberMembers; indx++) {
          int type_id = Grid.datatypes.memberFileTypes[indx];
          if (type_id == HDF5Constants.H5T_C_S1)
            type_id = strtype_id;
          H5.H5Tinsert(filetype_id, Grid.datatypes.memberNames[indx],
              Grid_Datatype.getOffset(indx), type_id);
        }
      }
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create dataspace, setting maximum size to NULL sets maximum
    // size to be current size
    try {
      dataspace_id = H5.H5Screate_simple(RANK, dims, null);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create dataset
    try {
      if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
        dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id,
            dataspace_id, HDF5Constants.H5P_DEFAULT);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // write compound data to dataset
    try {
      ByteArrayOutputStream baos = new ByteArrayOutputStream(Grid_Datatype
          .getTotalDataSize(DIM0));
      ObjectOutputStream oout = new ObjectOutputStream(baos);
      for (int indx = 0; indx < DIM0; indx++) {
        grid_data[indx].writeExternal(oout);
        oout.flush();
      }
      oout.close();
      baos.close();
      dset_data = baos.toByteArray();
      byte[] write_data = new byte[dset_data.length-Grid_Datatype.MAGICNUMBER];
      for(int indx=0; indx<dset_data.length-Grid_Datatype.MAGICNUMBER;indx++)
        write_data[indx] = dset_data[indx+Grid_Datatype.MAGICNUMBER];
      if ((dataset_id >= 0) && (memtype_id >= 0))
        H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL,
            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, write_data);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // end access to dataset and release resources used by it
    try {
      if (dataset_id >= 0)
        H5.H5Dclose(dataset_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // terminate access to data space
    try {
      if (dataspace_id >= 0)
        H5.H5Sclose(dataspace_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // terminate access to file type
    try {
      if (filetype_id >= 0)
        H5.H5Tclose(filetype_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // terminate access to mem type
    try {
      if (memtype_id >= 0)
        H5.H5Tclose(memtype_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    try {
      if (strtype_id >= 0)
        H5.H5Tclose(strtype_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // close file
    try {
      if (file_id >= 0)
        H5.H5Fclose(file_id);
      JOptionPane.showMessageDialog(null, "Resampling and reprojecting to
Geographical Grid Systems complete!");
    }
    catch (Exception e) {
      e.printStackTrace();
    }
  }

  /**
   * using Java Externalization will add a two-byte object header in
   * stream, which needs to be called out in datatypes
   * @author Linda
   */
  private static class Grid_Datatype extends CompoundDatatype {
    static int numberMembers = 5;
    static int[] memberDims = {1, 1, 1, 1, 1};

    String[] memberNames = {"ObjectHeader", "Cell code",
        "x-Coordinate [m]", "y-Coordinate [m]", "Attribute"};
    int[] memberMemTypes = {HDF5Constants.H5T_NATIVE_SHORT,
HDF5Constants.H5T_C_S1,
        HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE,
HDF5Constants.H5T_NATIVE_DOUBLE};
    int[] memberFileTypes = {HDF5Constants.H5T_STD_I16BE,
HDF5Constants.H5T_C_S1,
        HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE,
HDF5Constants.H5T_IEEE_F64BE};
    static int[] memberStorage = {OBJHEADERSIZE, MAXSTRINGSIZE,
      DOUBLESIZE, DOUBLESIZE, DOUBLESIZE};

    /**
     * data size is storage size for members not object,
     * Java Externalization also adds a 4-byte "Magic Number" to beginning
     * of data stream
     * @param DIM0
     * @return
     */
    static int getTotalDataSize(int DIM0) {
      int data_size = 0;
      for (int indx = 0; indx < numberMembers; indx++)
        data_size += memberStorage[indx] * memberDims[indx];
      return DIM0 * data_size + MAGICNUMBER;
    }

    /**
     * method to get data size
     * @return
     */
    static int getDataSize() {
      int data_size = 0;
      for (int indx = 0; indx < numberMembers; indx++)
        data_size += memberStorage[indx] * memberDims[indx];
      return data_size;
    }

    /**
     * method to get offset
     * @param memberItem
     * @return
     */
    static int getOffset(int memberItem) {
      int data_offset = 0;
      for (int indx = 0; indx < memberItem; indx++)
        data_offset += memberStorage[indx];
      return data_offset;
    }
  }

  /**
   * method to round double values at two decimal places
   * @param d
   * @return
   */
  public static double roundScale2(double d) {
      return Math.rint(d*100)/100.;
    }
}

···

--
View this message in context: http://hdf-forum.184993.n3.nabble.com/string-double-data-set-tp981166p987711.html
Sent from the hdf-forum mailing list archive at Nabble.com.

I don't see anything obviously wrong in your code. Check if "write_data" buffer has the correct data before
you call H5Dwrite().

Another way you can try is to write data field by field.

Thanks
--pc

Adnila wrote:

···

hey guys,

I have a problem.^^ I tried to solve it with the code snippet below. but I
do not get the output I want to.

I wanted something like this:
cell code E-coordinate [m] N-coordinate [m] attribute
1kmN3116E4582 4582436.9081 3116378.8266 10.1
1kmN3112E4584 4584523.7417 3112367.3833 10.3

but I got something like this:

objectheader cell code E-coordinate [m] N-coordinate [m] 29303 1027mN4460E3035 -6.065937113828252E66 2.954988390438623E237

attribute
-6.065988000076415E66

what am I doing wrong? besides I tried to reject the column "object header"
because I used an example. But it does not work.

does anybody have any suggestions?

public class HDF5 {
  private static String fname;
  private static String DATASETNAME = "Grid";
  private static int DIM0;
  private static final int RANK = 1;

  /**
   * compoundDatatype class is used to capture basic externalization
information, * strings need to have a Maximum Size specified
   * @author Linda
   */
  private static class CompoundDatatype {
    protected static final int OBJHEADERSIZE = 2;
    protected static final int MAGICNUMBER = 4;
    protected static final int DOUBLESIZE = 8;
    protected final static int MAXSTRINGSIZE = 90;
  }

  /**
   * compound type class includes supporting Grid_Datatype class,
Grid_Datatype class could be external as well
   * @author Linda
   */
  private static class Grid {
    
    static Grid_Datatype datatypes;

    String cellcode;
    double x_coordinate;
    double y_coordinate;
    double attribute;

    Grid() {
      datatypes = new Grid_Datatype();
    }

    public void writeExternal(ObjectOutput out) throws IOException {
      for (int indx = 0; indx < Grid_Datatype.MAXSTRINGSIZE; indx++) {
        if (indx < cellcode.length())
          out.writeByte(cellcode.charAt(indx));
        else
          out.writeByte(0);
      }
      out.writeDouble(roundScale2(x_coordinate));
      out.writeDouble(roundScale2(y_coordinate));
      out.writeDouble(roundScale2(attribute));
    }
  }

  /**
   * method to create data set and write it to file
   * @param gridList
   * @param resolution
   */
  public void createDataset(double[][] gridList, double resolution, String
name) {

    DIM0 = gridList.length;

    int file_id = -1;
    int strtype_id = -1;
    int memtype_id = -1;
    int filetype_id = -1;
    int dataspace_id = -1;
    int dataset_id = -1;
    long[] dims = {DIM0};
    Grid[] grid_data = new Grid[DIM0];
    byte[] dset_data = null;

    // initialize data, initialize data via for loop
    for (int i = 0; i < DIM0; i++) {
      int north = (int) (gridList[i][0]/resolution);
      int east = (int) (gridList[i][1]/resolution);
      int intResolution = (int) resolution;
      grid_data[i] = new Grid();
      grid_data[i].cellcode = new String(intResolution + "m" + "N" + north + "E" + east);
      grid_data[i].x_coordinate = gridList[i][0];
      grid_data[i].y_coordinate = gridList[i][1];
      grid_data[i].attribute = gridList[i][2];
    }
    
    fname = name + ".hdf5";
    
    // create a new file using default properties
    try {
      file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC,
          HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create string datatype
    try {
      strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
      if (strtype_id >= 0)
        H5.H5Tset_size(strtype_id, Grid_Datatype.MAXSTRINGSIZE);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create compound datatype for memory
    try {
      memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Grid_Datatype
          .getDataSize());
      if (memtype_id >= 0) {
        for (int indx = 0; indx < Grid_Datatype.numberMembers; indx++) {
          int type_id = Grid.datatypes.memberMemTypes[indx];
          if (type_id == HDF5Constants.H5T_C_S1)
            type_id = strtype_id;
          H5.H5Tinsert(memtype_id, Grid.datatypes.memberNames[indx],
              Grid_Datatype.getOffset(indx), type_id);
        }
      }
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create compound datatype for the file because standard
    // types we are using for file may have different sizes than
    // corresponding native types, we must manually calculate
    // offset of each member
    try {
      filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Grid_Datatype
          .getDataSize());
      if (filetype_id >= 0) {
        for (int indx = 0; indx < Grid_Datatype.numberMembers; indx++) {
          int type_id = Grid.datatypes.memberFileTypes[indx];
          if (type_id == HDF5Constants.H5T_C_S1)
            type_id = strtype_id;
          H5.H5Tinsert(filetype_id, Grid.datatypes.memberNames[indx],
              Grid_Datatype.getOffset(indx), type_id);
        }
      }
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create dataspace, setting maximum size to NULL sets maximum
    // size to be current size
    try {
      dataspace_id = H5.H5Screate_simple(RANK, dims, null);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // create dataset
    try {
      if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
        dataset_id = H5.H5Dcreate(file_id, DATASETNAME, filetype_id,
            dataspace_id, HDF5Constants.H5P_DEFAULT);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // write compound data to dataset
    try {
      ByteArrayOutputStream baos = new ByteArrayOutputStream(Grid_Datatype
          .getTotalDataSize(DIM0));
      ObjectOutputStream oout = new ObjectOutputStream(baos);
      for (int indx = 0; indx < DIM0; indx++) {
        grid_data[indx].writeExternal(oout);
        oout.flush();
      }
      oout.close();
      baos.close();
      dset_data = baos.toByteArray();
      byte[] write_data = new byte[dset_data.length-Grid_Datatype.MAGICNUMBER];
      for(int indx=0; indx<dset_data.length-Grid_Datatype.MAGICNUMBER;indx++)
        write_data[indx] = dset_data[indx+Grid_Datatype.MAGICNUMBER];
      if ((dataset_id >= 0) && (memtype_id >= 0))
        H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL,
            HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, write_data);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // end access to dataset and release resources used by it
    try {
      if (dataset_id >= 0)
        H5.H5Dclose(dataset_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // terminate access to data space
    try {
      if (dataspace_id >= 0)
        H5.H5Sclose(dataspace_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // terminate access to file type
    try {
      if (filetype_id >= 0)
        H5.H5Tclose(filetype_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // terminate access to mem type
    try {
      if (memtype_id >= 0)
        H5.H5Tclose(memtype_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    try {
      if (strtype_id >= 0)
        H5.H5Tclose(strtype_id);
    }
    catch (Exception e) {
      e.printStackTrace();
    }

    // close file
    try {
      if (file_id >= 0)
        H5.H5Fclose(file_id);
      JOptionPane.showMessageDialog(null, "Resampling and reprojecting to
Geographical Grid Systems complete!");
    }
    catch (Exception e) {
      e.printStackTrace();
    }
  }

  /**
   * using Java Externalization will add a two-byte object header in
   * stream, which needs to be called out in datatypes
   * @author Linda
   */
  private static class Grid_Datatype extends CompoundDatatype {
    static int numberMembers = 5;
    static int[] memberDims = {1, 1, 1, 1, 1};

    String[] memberNames = {"ObjectHeader", "Cell code",
        "x-Coordinate [m]", "y-Coordinate [m]", "Attribute"};
    int[] memberMemTypes = {HDF5Constants.H5T_NATIVE_SHORT,
HDF5Constants.H5T_C_S1,
        HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE,
HDF5Constants.H5T_NATIVE_DOUBLE};
    int[] memberFileTypes = {HDF5Constants.H5T_STD_I16BE,
HDF5Constants.H5T_C_S1,
        HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE,
HDF5Constants.H5T_IEEE_F64BE};
    static int[] memberStorage = {OBJHEADERSIZE, MAXSTRINGSIZE,
      DOUBLESIZE, DOUBLESIZE, DOUBLESIZE};

    /**
     * data size is storage size for members not object, * Java Externalization also adds a 4-byte "Magic Number" to beginning * of data stream
     * @param DIM0
     * @return
     */
    static int getTotalDataSize(int DIM0) {
      int data_size = 0;
      for (int indx = 0; indx < numberMembers; indx++)
        data_size += memberStorage[indx] * memberDims[indx];
      return DIM0 * data_size + MAGICNUMBER;
    }

    /**
     * method to get data size
     * @return
     */
    static int getDataSize() {
      int data_size = 0;
      for (int indx = 0; indx < numberMembers; indx++)
        data_size += memberStorage[indx] * memberDims[indx];
      return data_size;
    }

    /**
     * method to get offset
     * @param memberItem
     * @return
     */
    static int getOffset(int memberItem) {
      int data_offset = 0;
      for (int indx = 0; indx < memberItem; indx++)
        data_offset += memberStorage[indx];
      return data_offset;
    }
  }

  /**
   * method to round double values at two decimal places
   * @param d
   * @return
   */
  public static double roundScale2(double d) { return Math.rint(d*100)/100.; } }