1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2  * Copyright by The HDF Group.                                               *
3  * Copyright by the Board of Trustees of the University of Illinois.         *
4  * All rights reserved.                                                      *
5  *                                                                           *
6  * This file is part of HDF5.  The full HDF5 copyright notice, including     *
7  * terms governing use, modification, and redistribution, is contained in    *
8  * the COPYING file, which can be found at the root of the source code       *
9  * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.  *
10  * If you do not have access to either file, you may request a copy from     *
11  * help@hdfgroup.org.                                                        *
12  * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
13 
14 /************************************************************
15       This example shows how to read and write data to a dataset
16       using the Scale-Offset filter.  The program first checks
17       if the Scale-Offset filter is available, then if it is it
18       writes integers to a dataset using Scale-Offset, then
19       closes the file Next, it reopens the file, reads back the
20       data, and outputs the type of filter and the maximum value
21       in the dataset to the screen.
22  ************************************************************/
23 package examples.datasets;
24 
25 import java.util.EnumSet;
26 import java.util.HashMap;
27 import java.util.Map;
28 
29 import hdf.hdf5lib.H5;
30 import hdf.hdf5lib.HDF5Constants;
31 
32 public class H5Ex_D_Soint {
33 
34     private static String FILENAME = "H5Ex_D_Soint.h5";
35     private static String DATASETNAME = "DS1";
36     private static final int DIM_X = 32;
37     private static final int DIM_Y = 64;
38     private static final int CHUNK_X = 4;
39     private static final int CHUNK_Y = 8;
40     private static final int RANK = 2;
41     private static final int NDIMS = 2;
42 
43     // Values for the status of space allocation
44     enum H5Z_filter {
45         H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR), H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE), H5Z_FILTER_DEFLATE(
46                 HDF5Constants.H5Z_FILTER_DEFLATE), H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE), H5Z_FILTER_FLETCHER32(
47                 HDF5Constants.H5Z_FILTER_FLETCHER32), H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP), H5Z_FILTER_NBIT(
48                 HDF5Constants.H5Z_FILTER_NBIT), H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET), H5Z_FILTER_RESERVED(
49                 HDF5Constants.H5Z_FILTER_RESERVED), H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
50         private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
51 
52         static {
53             for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
s.getCode()54                 lookup.put(s.getCode(), s);
55         }
56 
57         private int code;
58 
H5Z_filter(int layout_type)59         H5Z_filter(int layout_type) {
60             this.code = layout_type;
61         }
62 
getCode()63         public int getCode() {
64             return this.code;
65         }
66 
get(int code)67         public static H5Z_filter get(int code) {
68             return lookup.get(code);
69         }
70     }
71 
checkScaleoffsetFilter()72     private static boolean checkScaleoffsetFilter() {
73         try {
74             int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
75             if (available == 0) {
76                 System.out.println("Scale-Offset filter not available.");
77                 return false;
78             }
79         }
80         catch (Exception e) {
81             e.printStackTrace();
82         }
83 
84         try {
85             int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
86             if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0)
87                     || ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
88                 System.out.println("Scale-Offset filter not available for encoding and decoding.");
89                 return false;
90             }
91         }
92         catch (Exception e) {
93             e.printStackTrace();
94         }
95         return true;
96     }
97 
writeData()98     private static void writeData() {
99         long file_id = -1;
100         long filespace_id = -1;
101         long dataset_id = -1;
102         long dcpl_id = -1;
103         long[] dims = { DIM_X, DIM_Y };
104         long[] chunk_dims = { CHUNK_X, CHUNK_Y };
105         int[][] dset_data = new int[DIM_X][DIM_Y];
106 
107         // Initialize data.
108         for (int indx = 0; indx < DIM_X; indx++)
109             for (int jndx = 0; jndx < DIM_Y; jndx++)
110                 dset_data[indx][jndx] = indx * jndx - jndx;
111 
112         // Create a new file using the default properties.
113         try {
114             file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
115                     HDF5Constants.H5P_DEFAULT);
116         }
117         catch (Exception e) {
118             e.printStackTrace();
119         }
120 
121         // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
122         try {
123             filespace_id = H5.H5Screate_simple(RANK, dims, null);
124         }
125         catch (Exception e) {
126             e.printStackTrace();
127         }
128 
129         // Create the dataset creation property list, add the Scale-Offset
130         // filter and set the chunk size.
131         try {
132             dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
133             if (dcpl_id >= 0) {
134                 H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT, HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT);
135                 H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
136             }
137         }
138         catch (Exception e) {
139             e.printStackTrace();
140         }
141 
142         // Create the dataset.
143         try {
144             if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
145                 dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
146                         HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
147         }
148         catch (Exception e) {
149             e.printStackTrace();
150         }
151 
152         // Write the data to the dataset.
153         try {
154             if (dataset_id >= 0)
155                 H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
156                         HDF5Constants.H5P_DEFAULT, dset_data);
157         }
158         catch (Exception e) {
159             e.printStackTrace();
160         }
161 
162         // Close and release resources.
163         try {
164             if (dcpl_id >= 0)
165                 H5.H5Pclose(dcpl_id);
166         }
167         catch (Exception e) {
168             e.printStackTrace();
169         }
170 
171         try {
172             if (dataset_id >= 0)
173                 H5.H5Dclose(dataset_id);
174         }
175         catch (Exception e) {
176             e.printStackTrace();
177         }
178 
179         try {
180             if (filespace_id >= 0)
181                 H5.H5Sclose(filespace_id);
182         }
183         catch (Exception e) {
184             e.printStackTrace();
185         }
186 
187         // Close file
188         try {
189             if (file_id >= 0)
190                 H5.H5Fclose(file_id);
191         }
192         catch (Exception e) {
193             e.printStackTrace();
194         }
195     }
196 
readData()197     private static void readData() {
198         long file_id = -1;
199         long dataset_id = -1;
200         long dcpl_id = -1;
201         int[][] dset_data = new int[DIM_X][DIM_Y];
202 
203         // Open file using the default properties.
204         try {
205             file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
206         }
207         catch (Exception e) {
208             e.printStackTrace();
209         }
210         // Open dataset using the default properties.
211         try {
212             if (file_id >= 0)
213                 dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
214         }
215         catch (Exception e) {
216             e.printStackTrace();
217         }
218 
219         // Retrieve dataset creation property list.
220         try {
221             if (dataset_id >= 0)
222                 dcpl_id = H5.H5Dget_create_plist(dataset_id);
223         }
224         catch (Exception e) {
225             e.printStackTrace();
226         }
227 
228         // Retrieve and print the filter type. Here we only retrieve the
229         // first filter because we know that we only added one filter.
230         try {
231             if (dcpl_id >= 0) {
232                 // Java lib requires a valid filter_name object and cd_values
233                 int[] flags = { 0 };
234                 long[] cd_nelmts = { 1 };
235                 int[] cd_values = { 0 };
236                 String[] filter_name = { "" };
237                 int[] filter_config = { 0 };
238                 int filter_type = -1;
239 
240                 filter_type = H5
241                         .H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name, filter_config);
242                 System.out.print("Filter type is: ");
243                 switch (H5Z_filter.get(filter_type)) {
244                 case H5Z_FILTER_DEFLATE:
245                     System.out.println("H5Z_FILTER_DEFLATE");
246                     break;
247                 case H5Z_FILTER_SHUFFLE:
248                     System.out.println("H5Z_FILTER_SHUFFLE");
249                     break;
250                 case H5Z_FILTER_FLETCHER32:
251                     System.out.println("H5Z_FILTER_FLETCHER32");
252                     break;
253                 case H5Z_FILTER_SZIP:
254                     System.out.println("H5Z_FILTER_SZIP");
255                     break;
256                 case H5Z_FILTER_NBIT:
257                     System.out.println("H5Z_FILTER_NBIT");
258                     break;
259                 case H5Z_FILTER_SCALEOFFSET:
260                     System.out.println("H5Z_FILTER_SCALEOFFSET");
261                     break;
262                 default:
263                     System.out.println("H5Z_FILTER_ERROR");
264                 }
265                 System.out.println();
266             }
267         }
268         catch (Exception e) {
269             e.printStackTrace();
270         }
271 
272         // Read the data using the default properties.
273         try {
274             if (dataset_id >= 0)
275                 H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
276                         HDF5Constants.H5P_DEFAULT, dset_data);
277         }
278         catch (Exception e) {
279             e.printStackTrace();
280         }
281 
282         // Find the maximum value in the dataset, to verify that it was read correctly.
283         int max = dset_data[0][0];
284         for (int indx = 0; indx < DIM_X; indx++)
285             for (int jndx = 0; jndx < DIM_Y; jndx++) {
286                 if (max < dset_data[indx][jndx])
287                     max = dset_data[indx][jndx];
288             }
289 
290         // Print the maximum value.
291         System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
292 
293         // End access to the dataset and release resources used by it.
294         try {
295             if (dcpl_id >= 0)
296                 H5.H5Pclose(dcpl_id);
297         }
298         catch (Exception e) {
299             e.printStackTrace();
300         }
301 
302         try {
303             if (dataset_id >= 0)
304                 H5.H5Dclose(dataset_id);
305         }
306         catch (Exception e) {
307             e.printStackTrace();
308         }
309 
310         // Close the file.
311         try {
312             if (file_id >= 0)
313                 H5.H5Fclose(file_id);
314         }
315         catch (Exception e) {
316             e.printStackTrace();
317         }
318     }
319 
main(String[] args)320     public static void main(String[] args) {
321 
322         // Check if Scale-Offset compression is available and can be used
323         // for both compression and decompression. Normally we do not
324         // perform error checking in these examples for the sake of
325         // clarity, but in this case we will make an exception because this
326         // filter is an optional part of the hdf5 library.
327         if (H5Ex_D_Soint.checkScaleoffsetFilter()) {
328             H5Ex_D_Soint.writeData();
329             H5Ex_D_Soint.readData();
330         }
331     }
332 
333 }
334