1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * 2 * Copyright by The HDF Group. * 3 * Copyright by the Board of Trustees of the University of Illinois. * 4 * All rights reserved. * 5 * * 6 * This file is part of HDF5. The full HDF5 copyright notice, including * 7 * terms governing use, modification, and redistribution, is contained in * 8 * the COPYING file, which can be found at the root of the source code * 9 * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. * 10 * If you do not have access to either file, you may request a copy from * 11 * help@hdfgroup.org. * 12 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ 13 14 /************************************************************ 15 This example shows how to read and write data to a compact 16 dataset. The program first writes integers to a compact 17 dataset with dataspace dimensions of DIM_XxDIM_Y, then 18 closes the file. Next, it reopens the file, reads back 19 the data, and outputs it to the screen. 20 ************************************************************/ 21 package examples.datasets; 22 23 import java.util.EnumSet; 24 import java.util.HashMap; 25 import java.util.Map; 26 27 import hdf.hdf5lib.H5; 28 import hdf.hdf5lib.HDF5Constants; 29 30 public class H5Ex_D_Compact { 31 private static String FILENAME = "H5Ex_D_Compact.h5"; 32 private static String DATASETNAME = "DS1"; 33 private static final int DIM_X = 4; 34 private static final int DIM_Y = 7; 35 private static final int RANK = 2; 36 37 // Values for the status of space allocation 38 enum H5D_layout { 39 H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_VIRTUAL(3), H5D_NLAYOUTS(4); 40 private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>(); 41 42 static { 43 for (H5D_layout s : EnumSet.allOf(H5D_layout.class)) s.getCode()44 lookup.put(s.getCode(), s); 45 } 46 47 private int code; 48 H5D_layout(int layout_type)49 H5D_layout(int layout_type) { 50 this.code = layout_type; 51 } 52 getCode()53 public int getCode() { 54 return this.code; 55 } 56 get(int code)57 public static H5D_layout get(int code) { 58 return lookup.get(code); 59 } 60 } 61 writeCompact()62 private static void writeCompact() { 63 long file_id = -1; 64 long filespace_id = -1; 65 long dataset_id = -1; 66 long dcpl_id = -1; 67 long[] dims = { DIM_X, DIM_Y }; 68 int[][] dset_data = new int[DIM_X][DIM_Y]; 69 70 // Initialize data. 71 for (int indx = 0; indx < DIM_X; indx++) 72 for (int jndx = 0; jndx < DIM_Y; jndx++) 73 dset_data[indx][jndx] = indx * jndx - jndx; 74 75 // Create a new file using default properties. 76 try { 77 file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, 78 HDF5Constants.H5P_DEFAULT); 79 } 80 catch (Exception e) { 81 e.printStackTrace(); 82 } 83 84 // Create dataspace. Setting maximum size to NULL sets the maximum 85 // size to be the current size. 86 try { 87 filespace_id = H5.H5Screate_simple(RANK, dims, null); 88 } 89 catch (Exception e) { 90 e.printStackTrace(); 91 } 92 93 // Create the dataset creation property list. 94 try { 95 dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); 96 } 97 catch (Exception e) { 98 e.printStackTrace(); 99 } 100 101 // Set the layout to compact. 102 try { 103 if (dcpl_id >= 0) 104 H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode()); 105 } 106 catch (Exception e) { 107 e.printStackTrace(); 108 } 109 110 // Create the dataset. We will use all default properties for this example. 111 try { 112 if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) 113 dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id, 114 HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); 115 } 116 catch (Exception e) { 117 e.printStackTrace(); 118 } 119 120 // Write the data to the dataset. 121 try { 122 if (dataset_id >= 0) 123 H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, 124 HDF5Constants.H5P_DEFAULT, dset_data); 125 } 126 catch (Exception e) { 127 e.printStackTrace(); 128 } 129 130 // End access to the dataset and release resources used by it. 131 try { 132 if (dcpl_id >= 0) 133 H5.H5Pclose(dcpl_id); 134 } 135 catch (Exception e) { 136 e.printStackTrace(); 137 } 138 139 try { 140 if (dataset_id >= 0) 141 H5.H5Dclose(dataset_id); 142 } 143 catch (Exception e) { 144 e.printStackTrace(); 145 } 146 147 try { 148 if (filespace_id >= 0) 149 H5.H5Sclose(filespace_id); 150 } 151 catch (Exception e) { 152 e.printStackTrace(); 153 } 154 155 // Close the file. 156 try { 157 if (file_id >= 0) 158 H5.H5Fclose(file_id); 159 } 160 catch (Exception e) { 161 e.printStackTrace(); 162 } 163 } 164 readCompact()165 private static void readCompact() { 166 long file_id = -1; 167 long filespace_id = -1; 168 long dataset_id = -1; 169 long dcpl_id = -1; 170 int[][] dset_data = new int[DIM_X][DIM_Y]; 171 172 // Open file and dataset using the default properties. 173 try { 174 file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); 175 } 176 catch (Exception e) { 177 e.printStackTrace(); 178 } 179 180 // Open an existing dataset. 181 try { 182 if (file_id >= 0) 183 dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT); 184 } 185 catch (Exception e) { 186 e.printStackTrace(); 187 } 188 189 // Retrieve the dataset creation property list. 190 try { 191 if (dataset_id >= 0) 192 dcpl_id = H5.H5Dget_create_plist(dataset_id); 193 } 194 catch (Exception e) { 195 e.printStackTrace(); 196 } 197 198 // Print the storage layout. 199 try { 200 if (dcpl_id >= 0) { 201 int layout_type = H5.H5Pget_layout(dcpl_id); 202 System.out.print("Storage layout for " + DATASETNAME + " is: "); 203 switch (H5D_layout.get(layout_type)) { 204 case H5D_COMPACT: 205 System.out.println("H5D_COMPACT"); 206 break; 207 case H5D_CONTIGUOUS: 208 System.out.println("H5D_CONTIGUOUS"); 209 break; 210 case H5D_CHUNKED: 211 System.out.println("H5D_CHUNKED"); 212 break; 213 case H5D_VIRTUAL: 214 System.out.println("H5D_VIRTUAL"); 215 break; 216 case H5D_LAYOUT_ERROR: 217 break; 218 case H5D_NLAYOUTS: 219 break; 220 default: 221 break; 222 } 223 System.out.println(); 224 } 225 } 226 catch (Exception e) { 227 e.printStackTrace(); 228 } 229 230 // Read the data using the default properties. 231 try { 232 if (dataset_id >= 0) 233 H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, 234 HDF5Constants.H5P_DEFAULT, dset_data); 235 } 236 catch (Exception e) { 237 e.printStackTrace(); 238 } 239 240 // Output the data to the screen. 241 System.out.println("Data for " + DATASETNAME + " is: "); 242 for (int indx = 0; indx < DIM_X; indx++) { 243 System.out.print(" [ "); 244 for (int jndx = 0; jndx < DIM_Y; jndx++) 245 System.out.print(dset_data[indx][jndx] + " "); 246 System.out.println("]"); 247 } 248 System.out.println(); 249 250 // End access to the dataset and release resources used by it. 251 try { 252 if (dcpl_id >= 0) 253 H5.H5Pclose(dcpl_id); 254 } 255 catch (Exception e) { 256 e.printStackTrace(); 257 } 258 259 try { 260 if (dataset_id >= 0) 261 H5.H5Dclose(dataset_id); 262 } 263 catch (Exception e) { 264 e.printStackTrace(); 265 } 266 267 try { 268 if (filespace_id >= 0) 269 H5.H5Sclose(filespace_id); 270 } 271 catch (Exception e) { 272 e.printStackTrace(); 273 } 274 275 // Close the file. 276 try { 277 if (file_id >= 0) 278 H5.H5Fclose(file_id); 279 } 280 catch (Exception e) { 281 e.printStackTrace(); 282 } 283 } 284 main(String[] args)285 public static void main(String[] args) { 286 H5Ex_D_Compact.writeCompact(); 287 H5Ex_D_Compact.readCompact(); 288 } 289 290 } 291