1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2  * Copyright by The HDF Group.                                               *
3  * Copyright by the Board of Trustees of the University of Illinois.         *
4  * All rights reserved.                                                      *
5  *                                                                           *
6  * This file is part of HDF5.  The full HDF5 copyright notice, including     *
7  * terms governing use, modification, and redistribution, is contained in    *
8  * the COPYING file, which can be found at the root of the source code       *
9  * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.  *
10  * If you do not have access to either file, you may request a copy from     *
11  * help@hdfgroup.org.                                                        *
12  * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
13 
14 /************************************************************
15   This example shows how to read and write opaque datatypes
16   to a dataset.  The program first writes opaque data to a
17   dataset with a dataspace of DIM0, then closes the file.
18   Next, it reopens the file, reads back the data, and
19   outputs it to the screen.
20  ************************************************************/
21 
22 package examples.datatypes;
23 
24 import hdf.hdf5lib.H5;
25 import hdf.hdf5lib.HDF5Constants;
26 
27 public class H5Ex_T_Opaque {
28     private static String FILENAME = "H5Ex_T_Opaque.h5";
29     private static String DATASETNAME = "DS1";
30     private static final int DIM0 = 4;
31     private static final int LEN = 7;
32     private static final int RANK = 1;
33 
CreateDataset()34     private static void CreateDataset() {
35         long file_id = -1;
36         long dataspace_id = -1;
37         long datatype_id = -1;
38         long dataset_id = -1;
39         long[] dims = { DIM0 };
40         byte[] dset_data = new byte[DIM0 * LEN];
41         byte[] str_data = { 'O', 'P', 'A', 'Q', 'U', 'E' };
42 
43         // Initialize data.
44         for (int indx = 0; indx < DIM0; indx++) {
45             for (int jndx = 0; jndx < LEN - 1; jndx++)
46                 dset_data[jndx + indx * LEN] = str_data[jndx];
47             dset_data[LEN - 1 + indx * LEN] = (byte) (indx + '0');
48         }
49 
50         // Create a new file using default properties.
51         try {
52             file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
53                     HDF5Constants.H5P_DEFAULT);
54         }
55         catch (Exception e) {
56             e.printStackTrace();
57         }
58 
59         // Create opaque datatype and set the tag to something appropriate.
60         // For this example we will write and view the data as a character
61         // array.
62         try {
63             datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN);
64             if (datatype_id >= 0)
65                 H5.H5Tset_tag(datatype_id, "Character array");
66         }
67         catch (Exception e) {
68             e.printStackTrace();
69         }
70 
71         // Create dataspace. Setting maximum size to NULL sets the maximum
72         // size to be the current size.
73         try {
74             dataspace_id = H5.H5Screate_simple(RANK, dims, null);
75         }
76         catch (Exception e) {
77             e.printStackTrace();
78         }
79 
80         // Create the dataset and write the integer data to it. In this
81         // example we will save the data as 64 bit big endian integers,
82         // regardless of the native integer type. The HDF5 library
83         // automatically converts between different integer types.
84         try {
85             if ((file_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
86                 dataset_id = H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
87                         HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
88         }
89         catch (Exception e) {
90             e.printStackTrace();
91         }
92 
93         // Write the opaque data to the dataset.
94         try {
95             if ((dataset_id >= 0) && (datatype_id >= 0))
96                 H5.H5Dwrite(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
97                         HDF5Constants.H5P_DEFAULT, dset_data);
98         }
99         catch (Exception e) {
100             e.printStackTrace();
101         }
102 
103         // End access to the dataset and release resources used by it.
104         try {
105             if (dataset_id >= 0)
106                 H5.H5Dclose(dataset_id);
107         }
108         catch (Exception e) {
109             e.printStackTrace();
110         }
111 
112         // Terminate access to the data space.
113         try {
114             if (dataspace_id >= 0)
115                 H5.H5Sclose(dataspace_id);
116         }
117         catch (Exception e) {
118             e.printStackTrace();
119         }
120 
121         try {
122             if (datatype_id >= 0)
123                 H5.H5Tclose(datatype_id);
124         }
125         catch (Exception e) {
126             e.printStackTrace();
127         }
128 
129         // Close the file.
130         try {
131             if (file_id >= 0)
132                 H5.H5Fclose(file_id);
133         }
134         catch (Exception e) {
135             e.printStackTrace();
136         }
137 
138     }
139 
ReadDataset()140     private static void ReadDataset() {
141         long file_id = -1;
142         long datatype_id = -1;
143         long dataspace_id = -1;
144         long dataset_id = -1;
145         long type_len = -1;
146         long[] dims = { DIM0 };
147         byte[] dset_data;
148         String tag_name = null;
149 
150         // Open an existing file.
151         try {
152             file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
153         }
154         catch (Exception e) {
155             e.printStackTrace();
156         }
157 
158         // Open an existing dataset.
159         try {
160             if (file_id >= 0)
161                 dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
162         }
163         catch (Exception e) {
164             e.printStackTrace();
165         }
166 
167         // Get datatype and properties for the datatype.
168         try {
169             if (dataset_id >= 0)
170                 datatype_id = H5.H5Dget_type(dataset_id);
171             if (datatype_id >= 0) {
172                 type_len = H5.H5Tget_size(datatype_id);
173                 tag_name = H5.H5Tget_tag(datatype_id);
174             }
175         }
176         catch (Exception e) {
177             e.printStackTrace();
178         }
179 
180         // Get dataspace and allocate memory for read buffer.
181         try {
182             if (dataset_id >= 0)
183                 dataspace_id = H5.H5Dget_space(dataset_id);
184         }
185         catch (Exception e) {
186             e.printStackTrace();
187         }
188 
189         try {
190             if (dataspace_id >= 0)
191                 H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
192         }
193         catch (Exception e) {
194             e.printStackTrace();
195         }
196 
197         // Allocate buffer.
198         dset_data = new byte[(int) (dims[0] * type_len)];
199 
200         // Read data.
201         try {
202             if ((dataset_id >= 0) && (datatype_id >= 0))
203                 H5.H5Dread(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
204                         HDF5Constants.H5P_DEFAULT, dset_data);
205         }
206         catch (Exception e) {
207             e.printStackTrace();
208         }
209 
210         // Output the data to the screen.
211         System.out.println("Datatype tag for " + DATASETNAME + " is: \"" + tag_name + "\"");
212         for (int indx = 0; indx < dims[0]; indx++) {
213             System.out.print(DATASETNAME + "[" + indx + "]: ");
214             for (int jndx = 0; jndx < type_len; jndx++) {
215                 char temp = (char) dset_data[jndx + indx * (int)type_len];
216                 System.out.print(temp);
217             }
218             System.out.println("");
219         }
220         System.out.println();
221 
222         // End access to the dataset and release resources used by it.
223         try {
224             if (dataset_id >= 0)
225                 H5.H5Dclose(dataset_id);
226         }
227         catch (Exception e) {
228             e.printStackTrace();
229         }
230 
231         // Terminate access to the data space.
232         try {
233             if (dataspace_id >= 0)
234                 H5.H5Sclose(dataspace_id);
235         }
236         catch (Exception e) {
237             e.printStackTrace();
238         }
239 
240         try {
241             if (datatype_id >= 0)
242                 H5.H5Tclose(datatype_id);
243         }
244         catch (Exception e) {
245             e.printStackTrace();
246         }
247 
248         // Close the file.
249         try {
250             if (file_id >= 0)
251                 H5.H5Fclose(file_id);
252         }
253         catch (Exception e) {
254             e.printStackTrace();
255         }
256 
257     }
258 
main(String[] args)259     public static void main(String[] args) {
260         H5Ex_T_Opaque.CreateDataset();
261         // Now we begin the read section of this example. Here we assume
262         // the dataset and array have the same name and rank, but can have
263         // any size. Therefore we must allocate a new array to read in
264         // data using malloc().
265         H5Ex_T_Opaque.ReadDataset();
266     }
267 
268 }
269