help

There are one dataspace in the one file.
The dataspace is 5185*4967 size.
The file size is150MByte.
I’d like to read this data.

Metadata that checked by HDFView
Band 1 BB Image Pixel Values (800)
32-bit floating-point, 5185 x 4967
Number of attributes = 1

I made out the below code.

//////////
public static void main(String arag[]){
H5FileRead h5FileRead = new H5FileRead();

String fileName = File.separator + “COMS_GOCI_L2B_GA_20100820051641.he5”;
String grpPath = “/HDFEOS/GRIDS/Image Data/Data Fields”;

int DIM_X = 1000;
int DIM_Y = 1000;
Float[][] dset_data = new Float[DIM_X][DIM_Y];

int file_id = -1;
int filespace_id = -1;
int dataset_id = -1;
int dcpl_id = -1;

h5FileRead.setFileName(fileName);

H5File file = null;
Dataset dataSet = null;

file = new H5File(h5FileRead.getFileName(), H5File.READ);

// Open an existing file.
try {
file_id = H5.H5Fopen(h5FileRead.getFileName(), HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
}catch (Exception e) {
e.printStackTrace();
}

try{
Object rootObj = (Object)file.get(grpPath);
System.out.println(file.get(grpPath));
int count = 0;
Group rootGropu = null;

if(rootObj instanceof Group){
System.out.println(“Group”);

rootGropu = (Group)rootObj;
int gid = rootGropu.open();
count = rootGropu.getMemberList().size();
}else if(rootObj == null){
System.out.println(“null”);
}else{
System.out.println(“else”);
}

if(count > 0){
for(int i=0 ; i<count ; i++){
dataSet = (Dataset)rootGropu.getMemberList().get(i);
dataSet.init();

long hepSize = Runtime.getRuntime().totalMemory();
long[] dims = dataSet.getDims();

System.out.println(“dataSet:” + dataSet);
System.out.println(“size:”+ dataSet.getHeight() + “::” + dataSet.getWidth());

// Open an existing dataset.
try{
if(file_id >= 0){
dataset_id = H5.H5Dopen(file_id, grpPath + “/” + dataSet.toString());
}
}catch(Exception e){
e.printStackTrace();
}

int[] data = null;

try{
if(dataset_id >= 0){
filespace_id = H5.H5Dget_space(dataset_id);

 int rank = dataSet.getRank();   
 int[] selectedIndex = dataSet.getSelectedIndex();   
 long[] start = dataSet.getStartDims();   
 long[] stride = dataSet.getStride();   
 long[] cnt = dataSet.getSelectedDims();   
 long[] block = stride;   

 if(filespace_id >= 0){   
  /*---------------important*/   
  H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, cnt, block);   

  // Read the data using the previously defined hyperslab.   
  if((dataset_id >= 0) && (filespace_id >= 0)){   
   H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_FLOAT, HDF5Constants.H5S_ALL, filespace_id, HDF5Constants.H5P_DEFAULT, dset_data);   
  }   
  /*--------------------------*/   
 }   
}   

 // Output the data to the screen.   
 System.out.println("Data as read from disk by hyberslab:");   
 for (int indx = 0; indx < DIM_X; indx++) {   
 System.out.print(indx + ": [ ");   
 for (int jndx = 0; jndx < DIM_Y; jndx++)   
  System.out.print(dset_data[indx][jndx] + " ");   
  System.out.println("]");   
 }   
 System.out.println();   
}catch(OutOfMemoryError err){   
 err.printStackTrace();   
 throw new RuntimeException("Out of memory error.") ;   
}   

}
}

}catch(Exception ex){
log.error(ex.toString());
}finally{
try{
if(filespace_id >= 0)
H5.H5Sclose(filespace_id);
}catch(Exception e){
e.printStackTrace();
}

try{
if(dataset_id >= 0)
H5.H5Dclose(dataset_id);
}catch(Exception e){
e.printStackTrace();
}

// Close the file.
try{
if(file_id >= 0)
H5.H5Fclose(file_id);
}catch(Exception e){
e.printStackTrace();
}

if(file != null){
file.close();
}
}
}
/////////////////////////////////////////////////////
like this.

I can find out the error from above important part.
How do we them?

Thanks

What error message did you get?

You may want to use 1D array instead of 2D array, e.g.
Float[] dset_data = new Float[DIM_X*DIM_Y];

2D+ arrays require more memory and have bad performance since
Java arrays and C arrays of 2D or more do not match well in the JNI
level.

After you get the 1D array from the file, you can either directly use the
1D array by calculating the location of a 2D space, e.g.
A2D[i][j] = A1D[i*DIM_X+j];

Or you can use System.arraycopy() to copy the 1D array to a 2D array.

Thanks
--pc

···

On 10/3/2010 9:04 PM, 몰라 wrote:

There are one dataspace in the one file.
The dataspace is 5185*4967 size.
The file size is150MByte.
I'd like to read this data.

Metadata that checked by HDFView
Band 1 BB Image Pixel Values (800)
32-bit floating-point, 5185 x 4967
Number of attributes = 1

I made out the below code.

//////////
public static void main(String arag[]){
H5FileRead h5FileRead = new H5FileRead();

String fileName = File.separator + "COMS_GOCI_L2B_GA_20100820051641.he5";
String grpPath = "/HDFEOS/GRIDS/Image Data/Data Fields";

int DIM_X = 1000;
int DIM_Y = 1000;
Float[][] dset_data = new Float[DIM_X][DIM_Y];

int file_id = -1;
int filespace_id = -1;
int dataset_id = -1;
int dcpl_id = -1;

h5FileRead.setFileName(fileName);

H5File file = null;
Dataset dataSet = null;

file = new H5File(h5FileRead.getFileName(), H5File.READ);

// Open an existing file.
try {
file_id = H5.H5Fopen(h5FileRead.getFileName(),
HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
}catch (Exception e) {
e.printStackTrace();
}

try{
Object rootObj = (Object)file.get(grpPath);
System.out.println(file.get(grpPath));
int count = 0;
Group rootGropu = null;

if(rootObj instanceof Group){
System.out.println("Group");

rootGropu = (Group)rootObj;
int gid = rootGropu.open();
count = rootGropu.getMemberList().size();
}else if(rootObj == null){
System.out.println("null");
}else{
System.out.println("else");
}

if(count > 0){
for(int i=0 ; i<count ; i++){
dataSet = (Dataset)rootGropu.getMemberList().get(i);
dataSet.init();

long hepSize = Runtime.getRuntime().totalMemory();
long[] dims = dataSet.getDims();

System.out.println("dataSet:" + dataSet);
System.out.println("size:"+ dataSet.getHeight() + "::" +
dataSet.getWidth());

// Open an existing dataset.
try{
if(file_id >= 0){
dataset_id = H5.H5Dopen(file_id, grpPath + "/" + dataSet.toString());
}
}catch(Exception e){
e.printStackTrace();
}

int[] data = null;

try{
if(dataset_id >= 0){
filespace_id = H5.H5Dget_space(dataset_id);

int rank = dataSet.getRank();
int[] selectedIndex = dataSet.getSelectedIndex();
long[] start = dataSet.getStartDims();
long[] stride = dataSet.getStride();
long[] cnt = dataSet.getSelectedDims();
long[] block = stride;

if(filespace_id >= 0){
/*---------------important*/
H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET,
start, stride, cnt, block);

// Read the data using the previously defined hyperslab.
if((dataset_id >= 0) && (filespace_id >= 0)){
H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_FLOAT,
HDF5Constants.H5S_ALL, filespace_id, HDF5Constants.H5P_DEFAULT,
dset_data);
}
/*--------------------------*/
}
}

// Output the data to the screen.
System.out.println("Data as read from disk by hyberslab:");
for (int indx = 0; indx < DIM_X; indx++) {
System.out.print(indx + ": [ ");
for (int jndx = 0; jndx < DIM_Y; jndx++)
System.out.print(dset_data[indx][jndx] + " ");
System.out.println("]");
}
System.out.println();
}catch(OutOfMemoryError err){
err.printStackTrace();
throw new RuntimeException("Out of memory error.") ;
}
}
}

}catch(Exception ex){
log.error(ex.toString());
}finally{
try{
if(filespace_id >= 0)
H5.H5Sclose(filespace_id);
}catch(Exception e){
e.printStackTrace();
}

try{
if(dataset_id >= 0)
H5.H5Dclose(dataset_id);
}catch(Exception e){
e.printStackTrace();
}

// Close the file.
try{
if(file_id >= 0)
H5.H5Fclose(file_id);
}catch(Exception e){
e.printStackTrace();
}

if(file != null){
file.close();
}
}
}
/////////////////////////////////////////////////////
like this.

I can find out the error from above important part.
How do we them?

Thanks

<mailto:ji-hoang@hanmail.net>

_______________________________________________
Hdf-forum is for HDF software users discussion.
Hdf-forum@hdfgroup.org
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org

Thanks Thanks

But what I want, I want to read the data have already been created.

It is a 2D type data.

Error messages are out-of-memory.

Does it read 1D array type?

System.arraycopy () to copy the first data I do not not read?

It does not matter how many dimensions you have in file. you can always
read your data to an 1D memory buffer. 1D array requires a lot of less
memory.

Also try to increase your java heap size.

Thanks
--pc

···

On 10/4/2010 10:12 PM, 몰라 wrote:

Thanks Thanks

But what I want, I want to read the data have already been created.

It is a 2D type data.

Error messages are out-of-memory.

Does it read 1D array type?

System.arraycopy () to copy the first data I do not not read?

<mailto:ji-hoang@hanmail.net>

_______________________________________________
Hdf-forum is for HDF software users discussion.
Hdf-forum@hdfgroup.org
http://mail.hdfgroup.org/mailman/listinfo/hdf-forum_hdfgroup.org