I have a 4D array that need to write in chunks because I am receiving the data from a data acquisition system. I will generally know the size of the data space before receiving any of the data so I am hoping to exploit that information. For example the data coming off the microscope is in the form of a 512w X 384h "map" where each scan point on the map is actually a 1280w X 960h UInt8 image. There are also other scalar and vector values for each scan point but I figure if I can get the "Image" done then I can get the rest working.
I have written some code to try and initialize a chunked data set so that I can write a pattern at a time to the data set using hyper slab selection but I am getting an error when trying to write the file. I am going to attach me code and hopefully someone can show me where I have gone wrong.
void Start()
{
// Open the HDF5 File over writing any existing file
hid_t fid = H5Utilities::createFile(outputFileArg.toString());
std::string patternName("KikuchiPatterns");
int scanWidth = scanWidthArg.getValue(); // 512
int scanHeight = scanHeightArg.getValue(); // 384
int patternWidth = patternWidthArg.getValue(); //1280
int patternHeight = patternHeightArg.getValue(); //960
// Describe our data space
int rank = 4;
std::vector<hsize_t> dims(4, 0);
dims[0] = scanWidth;
dims[1] = scanHeight;
dims[2] = patternWidth;
dims[3] = patternHeight;
// Get some dummy data that mimics a Kikuchi pattern coming from the scope
std::vector<uint8_t> data(patternWidth * patternHeight, 255);
herr_t err = H5ChunkUtils::Create2DExpandableDataset(fid, patternName, H5T_NATIVE_UINT8, rank, &(dims.front()), &(data.front()));
}
herr_t H5ChunkUtils::Create2DExpandableDataset(hid_t file, const std::string &DATASETNAME,
hid_t hdf_type, int RANK, hsize_t* dims,
uint8_t* initialData)
{
int fillvalue = 0;
hsize_t maxdims[4] = {H5S_UNLIMITED, H5S_UNLIMITED, H5S_UNLIMITED, H5S_UNLIMITED};
hsize_t count[4] = {1, 1, dims[2], dims[3]}; // Pull out the pattern size
/* Create the data space with unlimited dimensions. */
hid_t dataspace = H5Screate_simple(RANK, dims, maxdims);
hsize_t chunkDims[4] = {1, 1, dims[2], dims[3]};
int chunkRank = 4;
/* Modify dataset creation properties, i.e. enable chunking. */
hid_t cparms = H5Pcreate(H5P_DATASET_CREATE);
herr_t status = H5Pset_chunk( cparms, chunkRank, chunkDims);
status = H5Pset_fill_value (cparms, hdf_type, &fillvalue );
if(status < 0) { std::cout << "Error setting Fill value" << std::endl; return status; }
/* Create a new dataset within the file using cparms creation properties. */
hid_t dataset = H5Dcreate2(file, DATASETNAME.c_str(), hdf_type, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dataset < 0) { std::cout << "Error creating dataset" << std::endl; return status; }
/* * Extend the dataset. This call assures that dataset is at least one pattern in size. */
hsize_t size[4] = {1, 1, dims[2], dims[3]};
status = H5Dset_extent(dataset, size);
if(status < 0) { std::cout << "Error setting extent" << std::endl; return status; }
/* * Select a hyperslab. */
hid_t filespace = H5Dget_space(dataset);
hsize_t start[4] = {0, 0, 0, 0}; // write the initital data at the start of the data space
hsize_t* stride = NULL;
hsize_t* block = NULL;
status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, start, stride, count, block);
if(status < 0) { std::cout << "Error selecting hyperslab extent" << std::endl; return status; }
/* * Write the data to the hyperslab. */
status = H5Dwrite(dataset, hdf_type, dataspace, filespace, H5P_DEFAULT, initialData);
if(status < 0) { std::cout << "Error writing data to hyperslab" << std::endl; return status; }
H5Dclose(dataset);
H5Sclose(dataspace);
H5Sclose(filespace);
H5Pclose(cparms);
return status;
}
Thanks for any help
···
___________________________________________________________
Mike Jackson Principal Software Engineer
BlueQuartz Software Dayton, Ohio
mike.jackson@bluequartz.net www.bluequartz.net