1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 * Copyright by The HDF Group. * 3 * Copyright by the Board of Trustees of the University of Illinois. * 4 * All rights reserved. * 5 * * 6 * This file is part of HDF5. The full HDF5 copyright notice, including * 7 * terms governing use, modification, and redistribution, is contained in * 8 * the COPYING file, which can be found at the root of the source code * 9 * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. * 10 * If you do not have access to either file, you may request a copy from * 11 * help@hdfgroup.org. * 12 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 13 14 /************************************************************ 15 This example shows how to create a chunked dataset. The 16 program first writes integers in a hyperslab selection to 17 a chunked dataset with dataspace dimensions of DIM_XxDIM_Y 18 and chunk size of CHUNK_XxCHUNK_Y, then closes the file. 19 Next, it reopens the file, reads back the data, and 20 outputs it to the screen. Finally it reads the data again 21 using a different hyperslab selection, and outputs 22 the result to the screen. 23 ************************************************************/ 24 package examples.datasets; 25 26 import java.util.EnumSet; 27 import java.util.HashMap; 28 import java.util.Map; 29 30 import hdf.hdf5lib.H5; 31 import hdf.hdf5lib.HDF5Constants; 32 33 public class H5Ex_D_Chunk { 34 private static String FILENAME = "H5Ex_D_Chunk.h5"; 35 private static String DATASETNAME = "DS1"; 36 private static final int DIM_X = 6; 37 private static final int DIM_Y = 8; 38 private static final int CHUNK_X = 4; 39 private static final int CHUNK_Y = 4; 40 private static final int RANK = 2; 41 private static final int NDIMS = 2; 42 43 // Values for the status of space allocation 44 enum H5D_layout { 45 H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_VIRTUAL(3), H5D_NLAYOUTS(4); 46 private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>(); 47 48 static { 49 for (H5D_layout s : EnumSet.allOf(H5D_layout.class)) s.getCode()50 lookup.put(s.getCode(), s); 51 } 52 53 private int code; 54 H5D_layout(int layout_type)55 H5D_layout(int layout_type) { 56 this.code = layout_type; 57 } 58 getCode()59 public int getCode() { 60 return this.code; 61 } 62 get(int code)63 public static H5D_layout get(int code) { 64 return lookup.get(code); 65 } 66 } 67 writeChunk()68 private static void writeChunk() { 69 long file_id = -1; 70 long filespace_id = -1; 71 long dataset_id = -1; 72 long dcpl_id = -1; 73 long[] dims = { DIM_X, DIM_Y }; 74 long[] chunk_dims = { CHUNK_X, CHUNK_Y }; 75 int[][] dset_data = new int[DIM_X][DIM_Y]; 76 77 // Initialize data to "1", to make it easier to see the selections. 78 for (int indx = 0; indx < DIM_X; indx++) 79 for (int jndx = 0; jndx < DIM_Y; jndx++) 80 dset_data[indx][jndx] = 1; 81 82 // Print the data to the screen. 83 System.out.println("Original Data:"); 84 for (int indx = 0; indx < DIM_X; indx++) { 85 System.out.print(" [ "); 86 for (int jndx = 0; jndx < DIM_Y; jndx++) 87 System.out.print(dset_data[indx][jndx] + " "); 88 System.out.println("]"); 89 } 90 System.out.println(); 91 92 // Create a new file using default properties. 93 try { 94 file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, 95 HDF5Constants.H5P_DEFAULT); 96 } 97 catch (Exception e) { 98 e.printStackTrace(); 99 } 100 101 // Create dataspace. Setting maximum size to NULL sets the maximum 102 // size to be the current size. 103 try { 104 filespace_id = H5.H5Screate_simple(RANK, dims, null); 105 } 106 catch (Exception e) { 107 e.printStackTrace(); 108 } 109 110 // Create the dataset creation property list. 111 try { 112 dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); 113 } 114 catch (Exception e) { 115 e.printStackTrace(); 116 } 117 118 // Set the chunk size. 119 try { 120 if (dcpl_id >= 0) 121 H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); 122 } 123 catch (Exception e) { 124 e.printStackTrace(); 125 } 126 127 // Create the chunked dataset. 128 try { 129 if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) 130 dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, 131 HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); 132 } 133 catch (Exception e) { 134 e.printStackTrace(); 135 } 136 137 // Define and select the first part of the hyperslab selection. 138 long[] start = { 0, 0 }; 139 long[] stride = { 3, 3 }; 140 long[] count = { 2, 3 }; 141 long[] block = { 2, 2 }; 142 try { 143 if ((filespace_id >= 0)) 144 H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block); 145 } 146 catch (Exception e) { 147 e.printStackTrace(); 148 } 149 // Define and select the second part of the hyperslab selection, 150 // which is subtracted from the first selection by the use of 151 // H5S_SELECT_NOTB 152 block[0] = 1; 153 block[1] = 1; 154 try { 155 if ((filespace_id >= 0)) { 156 H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count, block); 157 158 // Write the data to the dataset. 159 if (dataset_id >= 0) 160 H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id, 161 HDF5Constants.H5P_DEFAULT, dset_data); 162 } 163 } 164 catch (Exception e) { 165 e.printStackTrace(); 166 } 167 168 // End access to the dataset and release resources used by it. 169 try { 170 if (dcpl_id >= 0) 171 H5.H5Pclose(dcpl_id); 172 } 173 catch (Exception e) { 174 e.printStackTrace(); 175 } 176 177 try { 178 if (dataset_id >= 0) 179 H5.H5Dclose(dataset_id); 180 } 181 catch (Exception e) { 182 e.printStackTrace(); 183 } 184 185 try { 186 if (filespace_id >= 0) 187 H5.H5Sclose(filespace_id); 188 } 189 catch (Exception e) { 190 e.printStackTrace(); 191 } 192 193 // Close the file. 194 try { 195 if (file_id >= 0) 196 H5.H5Fclose(file_id); 197 } 198 catch (Exception e) { 199 e.printStackTrace(); 200 } 201 } 202 readChunk()203 private static void readChunk() { 204 long file_id = -1; 205 long filespace_id = -1; 206 long dataset_id = -1; 207 long dcpl_id = -1; 208 int[][] dset_data = new int[DIM_X][DIM_Y]; 209 210 // Open an existing file. 211 try { 212 file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 213 } 214 catch (Exception e) { 215 e.printStackTrace(); 216 } 217 218 // Open an existing dataset. 219 try { 220 if (file_id >= 0) 221 dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); 222 } 223 catch (Exception e) { 224 e.printStackTrace(); 225 } 226 227 // Retrieve the dataset creation property list. 228 try { 229 if (dataset_id >= 0) 230 dcpl_id = H5.H5Dget_create_plist(dataset_id); 231 } 232 catch (Exception e) { 233 e.printStackTrace(); 234 } 235 236 // Print the storage layout. 237 try { 238 if (dcpl_id >= 0) { 239 int layout_type = H5.H5Pget_layout(dcpl_id); 240 System.out.print("Storage layout for " + DATASETNAME + " is: "); 241 switch (H5D_layout.get(layout_type)) { 242 case H5D_COMPACT: 243 System.out.println("H5D_COMPACT"); 244 break; 245 case H5D_CONTIGUOUS: 246 System.out.println("H5D_CONTIGUOUS"); 247 break; 248 case H5D_CHUNKED: 249 System.out.println("H5D_CHUNKED"); 250 break; 251 case H5D_VIRTUAL: 252 System.out.println("H5D_VIRTUAL"); 253 break; 254 case H5D_LAYOUT_ERROR: 255 break; 256 case H5D_NLAYOUTS: 257 break; 258 default: 259 break; 260 } 261 System.out.println(); 262 } 263 } 264 catch (Exception e) { 265 e.printStackTrace(); 266 } 267 268 // Read the data using the default properties. 269 try { 270 if (dataset_id >= 0) 271 H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, 272 HDF5Constants.H5P_DEFAULT, dset_data); 273 } 274 catch (Exception e) { 275 e.printStackTrace(); 276 } 277 278 // Output the data to the screen. 279 System.out.println("Data as written to disk by hyberslabs:"); 280 for (int indx = 0; indx < DIM_X; indx++) { 281 System.out.print(" [ "); 282 for (int jndx = 0; jndx < DIM_Y; jndx++) 283 System.out.print(dset_data[indx][jndx] + " "); 284 System.out.println("]"); 285 } 286 System.out.println(); 287 288 // Initialize the read array. 289 for (int indx = 0; indx < DIM_X; indx++) 290 for (int jndx = 0; jndx < DIM_Y; jndx++) 291 dset_data[indx][jndx] = 0; 292 293 // Define and select the hyperslab to use for reading. 294 try { 295 if (dataset_id >= 0) { 296 filespace_id = H5.H5Dget_space(dataset_id); 297 298 long[] start = { 0, 1 }; 299 long[] stride = { 4, 4 }; 300 long[] count = { 2, 2 }; 301 long[] block = { 2, 3 }; 302 303 if (filespace_id >= 0) { 304 H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block); 305 306 // Read the data using the previously defined hyperslab. 307 if ((dataset_id >= 0) && (filespace_id >= 0)) 308 H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id, 309 HDF5Constants.H5P_DEFAULT, dset_data); 310 } 311 } 312 } 313 catch (Exception e) { 314 e.printStackTrace(); 315 } 316 317 // Output the data to the screen. 318 System.out.println("Data as read from disk by hyberslab:"); 319 for (int indx = 0; indx < DIM_X; indx++) { 320 System.out.print(" [ "); 321 for (int jndx = 0; jndx < DIM_Y; jndx++) 322 System.out.print(dset_data[indx][jndx] + " "); 323 System.out.println("]"); 324 } 325 System.out.println(); 326 327 // End access to the dataset and release resources used by it. 328 try { 329 if (dcpl_id >= 0) 330 H5.H5Pclose(dcpl_id); 331 } 332 catch (Exception e) { 333 e.printStackTrace(); 334 } 335 336 try { 337 if (dataset_id >= 0) 338 H5.H5Dclose(dataset_id); 339 } 340 catch (Exception e) { 341 e.printStackTrace(); 342 } 343 344 try { 345 if (filespace_id >= 0) 346 H5.H5Sclose(filespace_id); 347 } 348 catch (Exception e) { 349 e.printStackTrace(); 350 } 351 352 // Close the file. 353 try { 354 if (file_id >= 0) 355 H5.H5Fclose(file_id); 356 } 357 catch (Exception e) { 358 e.printStackTrace(); 359 } 360 } 361 main(String[] args)362 public static void main(String[] args) { 363 H5Ex_D_Chunk.writeChunk(); 364 H5Ex_D_Chunk.readChunk(); 365 } 366 367 } 368