1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2  * Copyright by The HDF Group.                                               *
3  * Copyright by the Board of Trustees of the University of Illinois.         *
4  * All rights reserved.                                                      *
5  *                                                                           *
6  * This file is part of HDF5.  The full HDF5 copyright notice, including     *
7  * terms governing use, modification, and redistribution, is contained in    *
8  * the COPYING file, which can be found at the root of the source code       *
9  * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.  *
10  * If you do not have access to either file, you may request a copy from     *
11  * help@hdfgroup.org.                                                        *
12  * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
13 
14 /************************************************************
15   This example shows how to read and write bitfield
16   datatypes to an attribute.  The program first writes bit
17   fields to an attribute with a dataspace of DIM0xDIM1, then
18   closes the file.  Next, it reopens the file, reads back
19   the data, and outputs it to the screen.
20  ************************************************************/
21 
22 package examples.datatypes;
23 
24 import hdf.hdf5lib.H5;
25 import hdf.hdf5lib.HDF5Constants;
26 
27 public class H5Ex_T_BitAttribute {
28     private static String FILENAME = "H5Ex_T_BitAttribute.h5";
29     private static String DATASETNAME = "DS1";
30     private static String ATTRIBUTENAME = "A1";
31     private static final int DIM0 = 4;
32     private static final int DIM1 = 7;
33     private static final int RANK = 2;
34 
CreateDataset()35     private static void CreateDataset() {
36         long file_id = -1;
37         long dataspace_id = -1;
38         long dataset_id = -1;
39         long attribute_id = -1;
40         long[] dims = { DIM0, DIM1 };
41         int[][] dset_data = new int[DIM0][DIM1];
42 
43         // Initialize data.
44         for (int indx = 0; indx < DIM0; indx++)
45             for (int jndx = 0; jndx < DIM1; jndx++) {
46                 dset_data[indx][jndx] = 0;
47                 dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
48                 dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
49                 dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
50                 dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
51             }
52 
53         // Create a new file using default properties.
54         try {
55             file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
56                     HDF5Constants.H5P_DEFAULT);
57         }
58         catch (Exception e) {
59             e.printStackTrace();
60         }
61 
62         // Create dataset with a scalar dataspace.
63         try {
64             dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
65             if (dataspace_id >= 0) {
66                 dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
67                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
68                 H5.H5Sclose(dataspace_id);
69                 dataspace_id = -1;
70             }
71         }
72         catch (Exception e) {
73             e.printStackTrace();
74         }
75 
76         // Create dataspace. Setting maximum size to NULL sets the maximum
77         // size to be the current size.
78         try {
79             dataspace_id = H5.H5Screate_simple(RANK, dims, null);
80         }
81         catch (Exception e) {
82             e.printStackTrace();
83         }
84 
85         // Create the attribute and write the array data to it.
86         try {
87             if ((dataset_id >= 0) && (dataspace_id >= 0))
88                 attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
89                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
90         }
91         catch (Exception e) {
92             e.printStackTrace();
93         }
94 
95         // Write the dataset.
96         try {
97             if (attribute_id >= 0)
98                 H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
99         }
100         catch (Exception e) {
101             e.printStackTrace();
102         }
103 
104         // End access to the dataset and release resources used by it.
105         try {
106             if (attribute_id >= 0)
107                 H5.H5Aclose(attribute_id);
108         }
109         catch (Exception e) {
110             e.printStackTrace();
111         }
112 
113         try {
114             if (dataset_id >= 0)
115                 H5.H5Dclose(dataset_id);
116         }
117         catch (Exception e) {
118             e.printStackTrace();
119         }
120 
121         // Terminate access to the data space.
122         try {
123             if (dataspace_id >= 0)
124                 H5.H5Sclose(dataspace_id);
125         }
126         catch (Exception e) {
127             e.printStackTrace();
128         }
129 
130         // Close the file.
131         try {
132             if (file_id >= 0)
133                 H5.H5Fclose(file_id);
134         }
135         catch (Exception e) {
136             e.printStackTrace();
137         }
138 
139     }
140 
ReadDataset()141     private static void ReadDataset() {
142         long file_id = -1;
143         long dataspace_id = -1;
144         long dataset_id = -1;
145         long attribute_id = -1;
146         long[] dims = { DIM0, DIM1 };
147         int[][] dset_data;
148 
149         // Open an existing file.
150         try {
151             file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
152         }
153         catch (Exception e) {
154             e.printStackTrace();
155         }
156 
157         // Open an existing dataset.
158         try {
159             if (file_id >= 0)
160                 dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
161         }
162         catch (Exception e) {
163             e.printStackTrace();
164         }
165 
166         try {
167             if (dataset_id >= 0)
168                 attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
169                         HDF5Constants.H5P_DEFAULT);
170         }
171         catch (Exception e) {
172             e.printStackTrace();
173         }
174 
175         // Get dataspace and allocate memory for read buffer.
176         try {
177             if (attribute_id >= 0)
178                 dataspace_id = H5.H5Aget_space(attribute_id);
179         }
180         catch (Exception e) {
181             e.printStackTrace();
182         }
183 
184         try {
185             if (dataspace_id >= 0)
186                 H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
187         }
188         catch (Exception e) {
189             e.printStackTrace();
190         }
191 
192         // Allocate array of pointers to two-dimensional arrays (the
193         // elements of the dataset.
194         dset_data = new int[(int) dims[0]][(int) (dims[1])];
195 
196         // Read data.
197         try {
198             if (attribute_id >= 0)
199                 H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
200         }
201         catch (Exception e) {
202             e.printStackTrace();
203         }
204 
205         // Output the data to the screen.
206         System.out.println(ATTRIBUTENAME + ":");
207         for (int indx = 0; indx < dims[0]; indx++) {
208             System.out.print(" [");
209             for (int jndx = 0; jndx < dims[1]; jndx++) {
210                 System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", ");
211                 System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", ");
212                 System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", ");
213                 System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}");
214             }
215             System.out.println("]");
216         }
217         System.out.println();
218 
219         // End access to the dataset and release resources used by it.
220         try {
221             if (attribute_id >= 0)
222                 H5.H5Aclose(attribute_id);
223         }
224         catch (Exception e) {
225             e.printStackTrace();
226         }
227 
228         try {
229             if (dataset_id >= 0)
230                 H5.H5Dclose(dataset_id);
231         }
232         catch (Exception e) {
233             e.printStackTrace();
234         }
235 
236         // Terminate access to the data space.
237         try {
238             if (dataspace_id >= 0)
239                 H5.H5Sclose(dataspace_id);
240         }
241         catch (Exception e) {
242             e.printStackTrace();
243         }
244 
245         // Close the file.
246         try {
247             if (file_id >= 0)
248                 H5.H5Fclose(file_id);
249         }
250         catch (Exception e) {
251             e.printStackTrace();
252         }
253 
254     }
255 
main(String[] args)256     public static void main(String[] args) {
257         H5Ex_T_BitAttribute.CreateDataset();
258         // Now we begin the read section of this example. Here we assume
259         // the dataset and array have the same name and rank, but can have
260         // any size. Therefore we must allocate a new array to read in
261         // data using malloc().
262         H5Ex_T_BitAttribute.ReadDataset();
263     }
264 
265 }
266