1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2  * Copyright by The HDF Group.                                               *
3  * Copyright by the Board of Trustees of the University of Illinois.         *
4  * All rights reserved.                                                      *
5  *                                                                           *
6  * This file is part of HDF5.  The full HDF5 copyright notice, including     *
7  * terms governing use, modification, and redistribution, is contained in    *
8  * the COPYING file, which can be found at the root of the source code       *
9  * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.  *
10  * If you do not have access to either file, you may request a copy from     *
11  * help@hdfgroup.org.                                                        *
12  * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
13 
14 /************************************************************
15   This example shows how to read and write compound
16   datatypes to an attribute.  The program first writes
17   compound structures to an attribute with a dataspace of
18   DIM0, then closes the file.  Next, it reopens the file,
19   reads back the data, and outputs it to the screen.
20  ************************************************************/
21 
22 package examples.datatypes;
23 
24 import hdf.hdf5lib.H5;
25 import hdf.hdf5lib.HDF5Constants;
26 
27 import java.nio.ByteBuffer;
28 import java.nio.ByteOrder;
29 import java.nio.charset.Charset;
30 
31 public class H5Ex_T_CompoundAttribute {
32     private static String FILENAME = "H5Ex_T_CompoundAttribute.h5";
33     private static String DATASETNAME = "DS1";
34     private static String ATTRIBUTENAME = "A1";
35     private static final int DIM0 = 4;
36     private static final int RANK = 1;
37     protected static final int INTEGERSIZE = 4;
38     protected static final int DOUBLESIZE = 8;
39     protected final static int MAXSTRINGSIZE = 80;
40 
41     // Using Java Externalization will add a two-byte object header in
42     // the stream, which needs to be called out in the datatypes.
43     static class Sensor_Datatype {
44         static int numberMembers = 4;
45         static int[] memberDims = { 1, 1, 1, 1 };
46 
47         static String[] memberNames = { "Serial number", "Location", "Temperature (F)", "Pressure (inHg)" };
48         static long[] memberMemTypes = { HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
49                 HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE };
50         static long[] memberFileTypes = { HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
51                 HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE };
52         static int[] memberStorage = { INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE };
53 
54         // Data size is the storage size for the members not the object.
getTotalDataSize()55         static long getTotalDataSize() {
56             long data_size = 0;
57             for (int indx = 0; indx < numberMembers; indx++)
58                 data_size += memberStorage[indx] * memberDims[indx];
59             return DIM0 * data_size;
60         }
61 
getDataSize()62         static long getDataSize() {
63             long data_size = 0;
64             for (int indx = 0; indx < numberMembers; indx++)
65                 data_size += memberStorage[indx] * memberDims[indx];
66             return data_size;
67         }
68 
getOffset(int memberItem)69         static int getOffset(int memberItem) {
70             int data_offset = 0;
71             for (int indx = 0; indx < memberItem; indx++)
72                 data_offset += memberStorage[indx];
73             return data_offset;
74         }
75     }
76 
77     static class Sensor {
78         public int serial_no;
79         public String location;
80         public double temperature;
81         public double pressure;
82 
Sensor(int serial_no, String location, double temperature, double pressure)83         Sensor(int serial_no, String location, double temperature, double pressure) {
84             this.serial_no = serial_no;
85             this.location = location;
86             this.temperature = temperature;
87             this.pressure = pressure;
88         }
89 
Sensor(ByteBuffer databuf, int dbposition)90         Sensor(ByteBuffer databuf, int dbposition) {
91             readBuffer(databuf, dbposition);
92         }
93 
writeBuffer(ByteBuffer databuf, int dbposition)94         void writeBuffer(ByteBuffer databuf, int dbposition) {
95             databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
96             byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
97             int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
98             for (int ndx = 0; ndx < arraylen; ndx++)
99                 databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
100             for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
101                 databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte) 0);
102             databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
103             databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
104         }
105 
readBuffer(ByteBuffer databuf, int dbposition)106         void readBuffer(ByteBuffer databuf, int dbposition) {
107             this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
108             ByteBuffer stringbuf = databuf.duplicate();
109             stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
110             stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
111             byte[] bytearr = new byte[stringbuf.remaining()];
112             stringbuf.get(bytearr);
113             this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
114             this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
115             this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
116         }
117 
118         @Override
toString()119         public String toString() {
120             return String.format("Serial number   : " + serial_no  + "%n" +
121                                  "Location        : " + location + "%n" +
122                                  "Temperature (F) : " + temperature + "%n" +
123                                  "Pressure (inHg) : " + pressure + "%n");
124         }
125     }
126 
CreateDataset()127     private static void CreateDataset() {
128         long file_id = -1;
129         long strtype_id = -1;
130         long memtype_id = -1;
131         long filetype_id = -1;
132         long dataspace_id = -1;
133         long dataset_id = -1;
134         long attribute_id = -1;
135         long[] dims = { DIM0 };
136         Sensor[] object_data = new Sensor[DIM0];
137         byte[] dset_data = null;
138 
139         // Initialize data.
140         object_data[0] = new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57);
141         object_data[1] = new Sensor(1184, new String("Intake"), 55.12, 22.95);
142         object_data[2] = new Sensor(1027, new String("Intake manifold"), 103.55, 31.23);
143         object_data[3] = new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11);
144 
145         // Create a new file using default properties.
146         try {
147             file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
148                     HDF5Constants.H5P_DEFAULT);
149         }
150         catch (Exception e) {
151             e.printStackTrace();
152         }
153 
154         // Create string datatype.
155         try {
156             strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
157             if (strtype_id >= 0)
158                 H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
159         }
160         catch (Exception e) {
161             e.printStackTrace();
162         }
163 
164         // Create the compound datatype for memory.
165         try {
166             memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
167             if (memtype_id >= 0) {
168                 for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
169                     long type_id = Sensor_Datatype.memberMemTypes[indx];
170                     if (type_id == HDF5Constants.H5T_C_S1)
171                         type_id = strtype_id;
172                     H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
173                             type_id);
174                 }
175             }
176         }
177         catch (Exception e) {
178             e.printStackTrace();
179         }
180 
181         // Create the compound datatype for the file. Because the standard
182         // types we are using for the file may have different sizes than
183         // the corresponding native types, we must manually calculate the
184         // offset of each member.
185         try {
186             filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
187             if (filetype_id >= 0) {
188                 for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
189                     long type_id = Sensor_Datatype.memberFileTypes[indx];
190                     if (type_id == HDF5Constants.H5T_C_S1)
191                         type_id = strtype_id;
192                     H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
193                             type_id);
194                 }
195             }
196         }
197         catch (Exception e) {
198             e.printStackTrace();
199         }
200 
201         // Create dataset with a scalar dataspace.
202         try {
203             dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
204             if (dataspace_id >= 0) {
205                 dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
206                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
207                 H5.H5Sclose(dataspace_id);
208                 dataspace_id = -1;
209             }
210         }
211         catch (Exception e) {
212             e.printStackTrace();
213         }
214 
215         // Create dataspace. Setting maximum size to NULL sets the maximum
216         // size to be the current size.
217         try {
218             dataspace_id = H5.H5Screate_simple(RANK, dims, null);
219         }
220         catch (Exception e) {
221             e.printStackTrace();
222         }
223 
224         // Create the attribute.
225         try {
226             if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
227                 attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
228                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
229         }
230         catch (Exception e) {
231             e.printStackTrace();
232         }
233 
234         // Write the compound data.
235         dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
236         ByteBuffer outBuf = ByteBuffer.wrap(dset_data);
237         outBuf.order(ByteOrder.nativeOrder());
238         for (int indx = 0; indx < (int) dims[0]; indx++) {
239             object_data[indx].writeBuffer(outBuf, indx * (int)Sensor_Datatype.getDataSize());
240         }
241         try {
242             if ((attribute_id >= 0) && (memtype_id >= 0))
243                 H5.H5Awrite(attribute_id, memtype_id, dset_data);
244         }
245         catch (Exception e) {
246             e.printStackTrace();
247         }
248 
249         // End access to the dataset and release resources used by it.
250         try {
251             if (attribute_id >= 0)
252                 H5.H5Aclose(attribute_id);
253         }
254         catch (Exception e) {
255             e.printStackTrace();
256         }
257 
258         try {
259             if (dataset_id >= 0)
260                 H5.H5Dclose(dataset_id);
261         }
262         catch (Exception e) {
263             e.printStackTrace();
264         }
265 
266         // Terminate access to the data space.
267         try {
268             if (dataspace_id >= 0)
269                 H5.H5Sclose(dataspace_id);
270         }
271         catch (Exception e) {
272             e.printStackTrace();
273         }
274 
275         // Terminate access to the file type.
276         try {
277             if (filetype_id >= 0)
278                 H5.H5Tclose(filetype_id);
279         }
280         catch (Exception e) {
281             e.printStackTrace();
282         }
283 
284         // Terminate access to the mem type.
285         try {
286             if (memtype_id >= 0)
287                 H5.H5Tclose(memtype_id);
288         }
289         catch (Exception e) {
290             e.printStackTrace();
291         }
292 
293         try {
294             if (strtype_id >= 0)
295                 H5.H5Tclose(strtype_id);
296         }
297         catch (Exception e) {
298             e.printStackTrace();
299         }
300 
301         // Close the file.
302         try {
303             if (file_id >= 0)
304                 H5.H5Fclose(file_id);
305         }
306         catch (Exception e) {
307             e.printStackTrace();
308         }
309 
310     }
311 
ReadDataset()312     private static void ReadDataset() {
313         long file_id = -1;
314         long strtype_id = -1;
315         long memtype_id = -1;
316         long dataspace_id = -1;
317         long dataset_id = -1;
318         long attribute_id = -1;
319         long[] dims = { DIM0 };
320         Sensor[] object_data2;
321         byte[] dset_data;
322 
323         // Open an existing file.
324         try {
325             file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
326         }
327         catch (Exception e) {
328             e.printStackTrace();
329         }
330 
331         // Open an existing dataset.
332         try {
333             if (file_id >= 0)
334                 dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
335         }
336         catch (Exception e) {
337             e.printStackTrace();
338         }
339 
340         try {
341             if (dataset_id >= 0)
342                 attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
343                         HDF5Constants.H5P_DEFAULT);
344         }
345         catch (Exception e) {
346             e.printStackTrace();
347         }
348 
349         // Get dataspace and allocate memory for read buffer. This is a
350         // three dimensional dataset when the array datatype is included so
351         // the dynamic allocation must be done in steps.
352         try {
353             if (attribute_id >= 0)
354                 dataspace_id = H5.H5Aget_space(attribute_id);
355         }
356         catch (Exception e) {
357             e.printStackTrace();
358         }
359 
360         try {
361             if (dataspace_id >= 0)
362                 H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
363         }
364         catch (Exception e) {
365             e.printStackTrace();
366         }
367 
368         // Create string datatype.
369         try {
370             strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
371             if (strtype_id >= 0)
372                 H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
373         }
374         catch (Exception e) {
375             e.printStackTrace();
376         }
377 
378         // Create the compound datatype for memory.
379         try {
380             memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
381             if (memtype_id >= 0) {
382                 for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
383                     long type_id = Sensor_Datatype.memberMemTypes[indx];
384                     if (type_id == HDF5Constants.H5T_C_S1)
385                         type_id = strtype_id;
386                     H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx], Sensor_Datatype.getOffset(indx),
387                             type_id);
388                 }
389             }
390         }
391         catch (Exception e) {
392             e.printStackTrace();
393         }
394 
395         // allocate memory for read buffer.
396         dset_data = new byte[(int) dims[0] * (int)Sensor_Datatype.getDataSize()];
397 
398         object_data2 = new Sensor[(int) dims[0]];
399 
400         // Read data.
401         try {
402             if ((attribute_id >= 0) && (memtype_id >= 0))
403                 H5.H5Aread(attribute_id, memtype_id, dset_data);
404 
405             ByteBuffer inBuf = ByteBuffer.wrap(dset_data);
406             inBuf.order(ByteOrder.nativeOrder());
407             for (int indx = 0; indx < (int) dims[0]; indx++) {
408                 object_data2[indx] = new Sensor(inBuf, indx * (int)Sensor_Datatype.getDataSize());
409             }
410         }
411         catch (Exception e) {
412             e.printStackTrace();
413         }
414 
415         // Output the data to the screen.
416         for (int indx = 0; indx < dims[0]; indx++) {
417             System.out.println(ATTRIBUTENAME + " [" + indx + "]:");
418             System.out.println(object_data2[indx].toString());
419         }
420         System.out.println();
421 
422         try {
423             if (attribute_id >= 0)
424                 H5.H5Aclose(attribute_id);
425         }
426         catch (Exception e) {
427             e.printStackTrace();
428         }
429 
430         try {
431             if (dataset_id >= 0)
432                 H5.H5Dclose(dataset_id);
433         }
434         catch (Exception e) {
435             e.printStackTrace();
436         }
437 
438         // Terminate access to the data space.
439         try {
440             if (dataspace_id >= 0)
441                 H5.H5Sclose(dataspace_id);
442         }
443         catch (Exception e) {
444             e.printStackTrace();
445         }
446 
447         // Terminate access to the mem type.
448         try {
449             if (memtype_id >= 0)
450                 H5.H5Tclose(memtype_id);
451         }
452         catch (Exception e) {
453             e.printStackTrace();
454         }
455 
456         try {
457             if (strtype_id >= 0)
458                 H5.H5Tclose(strtype_id);
459         }
460         catch (Exception e) {
461             e.printStackTrace();
462         }
463 
464         // Close the file.
465         try {
466             if (file_id >= 0)
467                 H5.H5Fclose(file_id);
468         }
469         catch (Exception e) {
470             e.printStackTrace();
471         }
472 
473     }
474 
main(String[] args)475     public static void main(String[] args) {
476         H5Ex_T_CompoundAttribute.CreateDataset();
477         // Now we begin the read section of this example. Here we assume
478         // the dataset and array have the same name and rank, but can have
479         // any size. Therefore we must allocate a new array to read in
480         // data using malloc().
481         H5Ex_T_CompoundAttribute.ReadDataset();
482     }
483 
484 }
485