1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * Copyright by The HDF Group. *
3 * Copyright by the Board of Trustees of the University of Illinois. *
4 * All rights reserved. *
5 * *
6 * This file is part of HDF5. The full HDF5 copyright notice, including *
7 * terms governing use, modification, and redistribution, is contained in *
8 * the COPYING file, which can be found at the root of the source code *
9 * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
10 * If you do not have access to either file, you may request a copy from *
11 * help@hdfgroup.org. *
12 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
13
14 /*
15 * Programmer: Robb Matzke <matzke@llnl.gov>
16 * Tuesday, December 9, 1997
17 *
18 * Purpose: Tests the dataset interface (H5D)
19 */
20
21 #include <stdlib.h>
22 #include <time.h>
23
24 #include "h5test.h"
25 #include "H5srcdir.h"
26 #ifdef H5_HAVE_SZLIB_H
27 # include "szlib.h"
28 #endif
29
30 /*
31 * This file needs to access private datatypes from the H5Z package.
32 */
33 #define H5Z_PACKAGE
34 #include "H5Zpkg.h"
35
36
37 const char *FILENAME[] = {
38 "dataset",
39 "compact_dataset",
40 "dset_offset",
41 "max_compact_dataset",
42 "simple",
43 "set_local",
44 "random_chunks",
45 "huge_chunks",
46 "chunk_cache",
47 "big_chunk",
48 "chunk_expand",
49 "copy_dcpl_newfile",
50 "layout_extend",
51 "zero_chunk",
52 NULL
53 };
54 #define FILENAME_BUF_SIZE 1024
55 #define KB 1024
56
57 #define FILE_DEFLATE_NAME "deflate.h5"
58
59 /* Dataset names for testing filters */
60 #define DSET_DEFAULT_NAME "default"
61 #define DSET_CHUNKED_NAME "chunked"
62 #define DSET_COMPACT_NAME "compact"
63 #define DSET_SIMPLE_IO_NAME "simple_io"
64 #define DSET_USERBLOCK_IO_NAME "userblock_io"
65 #define DSET_COMPACT_IO_NAME "compact_io"
66 #define DSET_COMPACT_MAX_NAME "max_compact"
67 #define DSET_COMPACT_MAX2_NAME "max_compact_2"
68 #define DSET_CONV_BUF_NAME "conv_buf"
69 #define DSET_TCONV_NAME "tconv"
70 #define DSET_DEFLATE_NAME "deflate"
71 #define DSET_SHUFFLE_NAME "shuffle"
72 #define DSET_FLETCHER32_NAME "fletcher32"
73 #define DSET_FLETCHER32_NAME_2 "fletcher32_2"
74 #define DSET_FLETCHER32_NAME_3 "fletcher32_3"
75 #define DSET_SHUF_DEF_FLET_NAME "shuffle+deflate+fletcher32"
76 #define DSET_SHUF_DEF_FLET_NAME_2 "shuffle+deflate+fletcher32_2"
77 #ifdef H5_HAVE_FILTER_SZIP
78 #define DSET_SZIP_NAME "szip"
79 #define DSET_SHUF_SZIP_FLET_NAME "shuffle+szip+fletcher32"
80 #define DSET_SHUF_SZIP_FLET_NAME_2 "shuffle+szip+fletcher32_2"
81 #endif /* H5_HAVE_FILTER_SZIP */
82
83 #define DSET_BOGUS_NAME "bogus"
84 #define DSET_MISSING_NAME "missing"
85 #define DSET_CAN_APPLY_NAME "can_apply"
86 #define DSET_CAN_APPLY_NAME2 "can_apply2"
87 #ifdef H5_HAVE_FILTER_SZIP
88 #define DSET_CAN_APPLY_SZIP_NAME "can_apply_szip"
89 #endif /* H5_HAVE_FILTER_SZIP */
90 #define DSET_SET_LOCAL_NAME "set_local"
91 #define DSET_SET_LOCAL_NAME_2 "set_local_2"
92 #define DSET_ONEBYTE_SHUF_NAME "onebyte_shuffle"
93 #define DSET_NBIT_INT_NAME "nbit_int"
94 #define DSET_NBIT_FLOAT_NAME "nbit_float"
95 #define DSET_NBIT_DOUBLE_NAME "nbit_double"
96 #define DSET_NBIT_ARRAY_NAME "nbit_array"
97 #define DSET_NBIT_COMPOUND_NAME "nbit_compound"
98 #define DSET_NBIT_COMPOUND_NAME_2 "nbit_compound_2"
99 #define DSET_NBIT_COMPOUND_NAME_3 "nbit_compound_3"
100 #define DSET_NBIT_INT_SIZE_NAME "nbit_int_size"
101 #define DSET_NBIT_FLT_SIZE_NAME "nbit_flt_size"
102 #define DSET_SCALEOFFSET_INT_NAME "scaleoffset_int"
103 #define DSET_SCALEOFFSET_INT_NAME_2 "scaleoffset_int_2"
104 #define DSET_SCALEOFFSET_FLOAT_NAME "scaleoffset_float"
105 #define DSET_SCALEOFFSET_FLOAT_NAME_2 "scaleoffset_float_2"
106 #define DSET_SCALEOFFSET_DOUBLE_NAME "scaleoffset_double"
107 #define DSET_SCALEOFFSET_DOUBLE_NAME_2 "scaleoffset_double_2"
108 #define DSET_COMPARE_DCPL_NAME "compare_dcpl"
109 #define DSET_COMPARE_DCPL_NAME_2 "compare_dcpl_2"
110 #define DSET_COPY_DCPL_NAME_1 "copy_dcpl_1"
111 #define DSET_COPY_DCPL_NAME_2 "copy_dcpl_2"
112 #define COPY_DCPL_EXTFILE_NAME "ext_file"
113 #define DSET_DEPREC_NAME "deprecated"
114 #define DSET_DEPREC_NAME_CHUNKED "deprecated_chunked"
115 #define DSET_DEPREC_NAME_COMPACT "deprecated_compact"
116 #define DSET_DEPREC_NAME_FILTER "deprecated_filter"
117
118 #define USER_BLOCK 1024
119 #define SIXTY_FOUR_KB 65536
120
121 /* Temporary filter IDs used for testing */
122 #define H5Z_FILTER_BOGUS 305
123 #define H5Z_FILTER_CORRUPT 306
124 #define H5Z_FILTER_CAN_APPLY_TEST 307
125 #define H5Z_FILTER_SET_LOCAL_TEST 308
126 #define H5Z_FILTER_DEPREC 309
127 #define H5Z_FILTER_EXPAND 310
128 #define H5Z_FILTER_CAN_APPLY_TEST2 311
129
130 /* Flags for testing filters */
131 #define DISABLE_FLETCHER32 0
132 #define ENABLE_FLETCHER32 1
133 #define DATA_CORRUPTED 1
134 #define DATA_NOT_CORRUPTED 0
135
136 /* Parameters for the "set local" test */
137 #define BOGUS2_PERM_NPARMS 2 /* Number of "permanent" parameters */
138 #define BOGUS2_PARAM_1 13 /* (No particular meaning, just for checking value) */
139 #define BOGUS2_PARAM_2 35 /* (No particular meaning, just for checking value) */
140 #define BOGUS2_ALL_NPARMS 4 /* Total number of parameter = permanent + "local" parameters */
141
142 /* Dimensionality for conversion buffer test */
143 #define DIM1 100 /* Dim. Size of data member # 1 */
144 #define DIM2 5000 /* Dim. Size of data member # 2 */
145 #define DIM3 10 /* Dim. Size of data member # 3 */
146
147 /* Parameters for internal filter test */
148 #define FILTER_CHUNK_DIM1 2
149 #define FILTER_CHUNK_DIM2 25
150 #define FILTER_HS_OFFSET1 7
151 #define FILTER_HS_OFFSET2 30
152 #define FILTER_HS_SIZE1 4
153 #define FILTER_HS_SIZE2 50
154
155 /* Names for noencoder test */
156 #ifdef H5_HAVE_FILTER_SZIP
157 #define NOENCODER_FILENAME "noencoder.h5"
158 #define NOENCODER_COPY_FILENAME "noencoder.h5.copy"
159 #define NOENCODER_TEST_DATASET "noencoder_tdset.h5"
160 #define NOENCODER_SZIP_DATASET "noencoder_szip_dset.h5"
161 #define NOENCODER_SZIP_SHUFF_FLETCH_DATASET "noencoder_szip_shuffle_fletcher_dset.h5"
162 #endif /* H5_HAVE_FILTER_SZIP */
163
164 /* Names for zero-dim test */
165 #define ZERODIM_DATASET "zerodim"
166
167 /* Parameters for zero-dim test */
168 #define MISSING_CHUNK_DATASET "missing_chunk"
169 #define MISSING_CHUNK_DIM 100
170
171 /* Names for random chunks test */
172 #define NPOINTS 50
173
174 /* Parameters for huge chunks test */
175 #define HUGE_DATASET "Dataset"
176 #define HUGE_DIM ((hsize_t)16 * 1024 * 1024 * 1024)
177 #define HUGE_CHUNK_DIM ((hsize_t)2 * 1024 * 1024 * 1024)
178 #define TOO_HUGE_CHUNK_DIM ((hsize_t)4 * 1024 * 1024 * 1024)
179 #define HUGE_DATASET2 "Dataset2"
180 #define HUGE_DIM2_0 ((hsize_t)16 * 1024)
181 #define HUGE_DIM2_1 ((hsize_t)16 * 1024)
182 #define HUGE_DIM2_2 ((hsize_t)16 * 1024)
183 #define HUGE_CHUNK_DIM2_0 ((hsize_t)2 * 1024)
184 #define HUGE_CHUNK_DIM2_1 ((hsize_t)1024)
185 #define HUGE_CHUNK_DIM2_2 ((hsize_t)1024)
186 #define TOO_HUGE_CHUNK_DIM2_0 ((hsize_t)4 * 1024)
187 #define TOO_HUGE_CHUNK_DIM2_1 ((hsize_t)1024)
188 #define TOO_HUGE_CHUNK_DIM2_2 ((hsize_t)1024)
189
190 /* Parameters for testing bypassing chunk cache */
191 #define BYPASS_DATASET1 "Dset1"
192 #define BYPASS_DATASET2 "Dset2"
193 #define BYPASS_DIM 1000
194 #define BYPASS_CHUNK_DIM 500
195 #define BYPASS_FILL_VALUE 7
196
197 /* Declarations for test_idx_compatible() */
198 #define FIXED_IDX_FILE "fixed_idx.h5"
199 #define DSET "dset"
200 #define DSET_FILTER "dset_filter"
201
202 /* Shared global arrays */
203 #define DSET_DIM1 100
204 #define DSET_DIM2 200
205 int points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2];
206 double points_dbl[DSET_DIM1][DSET_DIM2], check_dbl[DSET_DIM1][DSET_DIM2];
207
208 /* Local prototypes for filter functions */
209 static size_t filter_bogus(unsigned int flags, size_t cd_nelmts,
210 const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf);
211 static htri_t can_apply_bogus(hid_t dcpl_id, hid_t type_id, hid_t space_id);
212 static herr_t set_local_bogus2(hid_t dcpl_id, hid_t type_id, hid_t space_id);
213 static size_t filter_bogus2(unsigned int flags, size_t cd_nelmts,
214 const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf);
215 static size_t filter_bogus3(unsigned int flags, size_t cd_nelmts,
216 const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf);
217 static size_t filter_corrupt(unsigned int flags, size_t cd_nelmts,
218 const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf);
219 static size_t filter_expand(unsigned int flags, size_t cd_nelmts,
220 const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf);
221
222
223 /*-------------------------------------------------------------------------
224 * Function: test_create
225 *
226 * Purpose: Attempts to create a dataset.
227 *
228 * Return: Success: 0
229 *
230 * Failure: -1
231 *
232 * Programmer: Robb Matzke
233 * Tuesday, December 9, 1997
234 *
235 *-------------------------------------------------------------------------
236 */
237 static herr_t
test_create(hid_t file)238 test_create(hid_t file)
239 {
240 hid_t dataset, space, small_space, create_parms;
241 hsize_t dims[2], small_dims[2];
242 herr_t status;
243 hsize_t csize[2];
244
245 TESTING("create, open, close");
246
247 /* Create the data space */
248 dims[0] = 256;
249 dims[1] = 512;
250 space = H5Screate_simple(2, dims, NULL);
251 assert(space>=0);
252
253 /* Create a small data space for compact dataset */
254 small_dims[0] = 16;
255 small_dims[1] = 8;
256 small_space = H5Screate_simple(2, small_dims, NULL);
257 assert(space>=0);
258
259 /*
260 * Create a dataset using the default dataset creation properties. We're
261 * not sure what they are, so we won't check.
262 */
263 dataset = H5Dcreate2(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
264 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
265 if(dataset < 0) goto error;
266
267 /* Close the dataset */
268 if(H5Dclose(dataset) < 0) goto error;
269
270 /* Add a comment to the dataset */
271 status = H5Oset_comment_by_name(file, DSET_DEFAULT_NAME, "This is a dataset", H5P_DEFAULT);
272 if(status < 0) goto error;
273
274 /*
275 * Try creating a dataset that already exists. This should fail since a
276 * dataset can only be created once. Temporarily turn off error
277 * reporting.
278 */
279 H5E_BEGIN_TRY {
280 dataset = H5Dcreate2(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
281 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
282 } H5E_END_TRY;
283 if(dataset >= 0) {
284 H5_FAILED();
285 puts(" Library allowed overwrite of existing dataset.");
286 goto error;
287 }
288
289 /*
290 * Open the dataset we created above and then close it. This is how
291 * existing datasets are accessed.
292 */
293 if(H5Fflush(file, H5F_SCOPE_GLOBAL) < 0) goto error;
294 if((dataset = H5Dopen2(file, DSET_DEFAULT_NAME, H5P_DEFAULT)) < 0) goto error;
295 if(H5Dclose(dataset) < 0) goto error;
296
297 /*
298 * Try opening a non-existent dataset. This should fail since new datasets
299 * cannot be created with this function. Temporarily turn off error
300 * reporting.
301 */
302 H5E_BEGIN_TRY {
303 dataset = H5Dopen2(file, "does_not_exist", H5P_DEFAULT);
304 } H5E_END_TRY;
305 if(dataset >= 0) {
306 H5_FAILED();
307 puts(" Opened a non-existent dataset.");
308 goto error;
309 }
310
311 /*
312 * Create a new dataset that uses chunked storage instead of the default
313 * layout.
314 */
315 create_parms = H5Pcreate(H5P_DATASET_CREATE);
316 assert(create_parms >= 0);
317
318 /* Attempt to create a dataset with invalid chunk sizes */
319 csize[0] = dims[0]*2;
320 csize[1] = dims[1]*2;
321 status = H5Pset_chunk(create_parms, 2, csize);
322 assert(status >= 0);
323 H5E_BEGIN_TRY {
324 dataset = H5Dcreate2(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space,
325 H5P_DEFAULT, create_parms, H5P_DEFAULT);
326 } H5E_END_TRY;
327 if(dataset >= 0) {
328 H5_FAILED();
329 puts(" Opened a dataset with incorrect chunking parameters.");
330 goto error;
331 }
332
333 csize[0] = 5;
334 csize[1] = 100;
335 status = H5Pset_chunk(create_parms, 2, csize);
336 assert(status >= 0);
337
338 dataset = H5Dcreate2(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space,
339 H5P_DEFAULT, create_parms, H5P_DEFAULT);
340 if(dataset < 0) goto error;
341 H5Pclose(create_parms);
342
343 /* Test dataset address. Should be undefined. */
344 if(H5Dget_offset(dataset)!=HADDR_UNDEF) goto error;
345
346 /*
347 * Close the chunked dataset.
348 */
349 if(H5Dclose(dataset) < 0) goto error;
350
351 /*
352 * Create a compact dataset, then close it.
353 */
354 create_parms = H5Pcreate(H5P_DATASET_CREATE);
355 assert(create_parms >= 0);
356 status = H5Pset_layout(create_parms, H5D_COMPACT);
357 assert(status >= 0);
358 status = H5Pset_alloc_time(create_parms, H5D_ALLOC_TIME_EARLY);
359 assert(status >= 0);
360
361 dataset = H5Dcreate2(file, DSET_COMPACT_NAME, H5T_NATIVE_DOUBLE,
362 small_space, H5P_DEFAULT, create_parms, H5P_DEFAULT);
363 if(dataset < 0) goto error;
364 H5Pclose(create_parms);
365 if(H5Dclose(dataset) < 0) goto error;
366
367 PASSED();
368 return 0;
369
370 error:
371 return -1;
372 }
373
374
375 /*-------------------------------------------------------------------------
376 * Function: test_simple_io
377 *
378 * Purpose: Tests simple I/O. That is, reading and writing a complete
379 * multi-dimensional array without data type or data space
380 * conversions, without compression, and stored contiguously.
381 *
382 * Return: Success: 0
383 *
384 * Failure: -1
385 *
386 * Programmer: Robb Matzke
387 * Wednesday, December 10, 1997
388 *
389 *-------------------------------------------------------------------------
390 */
391 static herr_t
test_simple_io(const char * env_h5_drvr,hid_t fapl)392 test_simple_io(const char *env_h5_drvr, hid_t fapl)
393 {
394 char filename[FILENAME_BUF_SIZE];
395 hid_t file, dataset, space, xfer;
396 int i, j, n;
397 hsize_t dims[2];
398 void *tconv_buf = NULL;
399 int f;
400 haddr_t offset;
401 int rdata[DSET_DIM1][DSET_DIM2];
402
403 TESTING("simple I/O");
404
405 /* Can't run this test with multi-file VFDs */
406 if(HDstrcmp(env_h5_drvr, "split") && HDstrcmp(env_h5_drvr, "multi") && HDstrcmp(env_h5_drvr, "family")) {
407 h5_fixname(FILENAME[4], fapl, filename, sizeof filename);
408
409 /* Initialize the dataset */
410 for(i = n = 0; i < DSET_DIM1; i++)
411 for(j = 0; j < DSET_DIM2; j++)
412 points[i][j] = n++;
413
414 if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
415 goto error;
416
417 /* Create the data space */
418 dims[0] = DSET_DIM1;
419 dims[1] = DSET_DIM2;
420 if((space = H5Screate_simple(2, dims, NULL)) < 0) goto error;
421
422 /* Create a small conversion buffer to test strip mining */
423 tconv_buf = HDmalloc((size_t)1000);
424 xfer = H5Pcreate(H5P_DATASET_XFER);
425 assert(xfer>=0);
426 if(H5Pset_buffer (xfer, (size_t)1000, tconv_buf, NULL) < 0) goto error;
427
428 /* Create the dataset */
429 if((dataset = H5Dcreate2(file, DSET_SIMPLE_IO_NAME, H5T_NATIVE_INT, space,
430 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
431
432 /* Test dataset address. Should be undefined. */
433 if(H5Dget_offset(dataset) != HADDR_UNDEF) goto error;
434
435 /* Write the data to the dataset */
436 if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, points) < 0)
437 goto error;
438
439 /* Test dataset address in file. Open the same file as a C file, seek
440 * the data position as H5Dget_offset points to, read the dataset, and
441 * compare it with the data written in.*/
442 if((offset=H5Dget_offset(dataset))==HADDR_UNDEF) goto error;
443
444 /* Read the dataset back */
445 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, check) < 0)
446 goto error;
447
448 /* Check that the values read are the same as the values written */
449 for(i = 0; i < DSET_DIM1; i++) {
450 for(j = 0; j < DSET_DIM2; j++) {
451 if(points[i][j] != check[i][j]) {
452 H5_FAILED();
453 printf(" Read different values than written.\n");
454 printf(" At index %d,%d\n", i, j);
455 goto error;
456 }
457 }
458 }
459
460 if(H5Pclose (xfer) < 0) goto error;
461 if(H5Dclose(dataset) < 0) goto error;
462 if(H5Fclose(file) < 0) goto error;
463
464 f = HDopen(filename, O_RDONLY, 0);
465 HDlseek(f, (off_t)offset, SEEK_SET);
466 HDread(f, rdata, sizeof(int)*DSET_DIM1*DSET_DIM2);
467
468 /* Check that the values read are the same as the values written */
469 for(i = 0; i < DSET_DIM1; i++) {
470 for(j = 0; j < DSET_DIM2; j++) {
471 if(points[i][j] != rdata[i][j]) {
472 H5_FAILED();
473 printf(" Read different values than written.\n");
474 printf(" At index %d,%d\n", i, j);
475 goto error;
476 }
477 }
478 }
479
480 HDclose(f);
481
482 HDfree (tconv_buf);
483 PASSED();
484 } /* end if */
485 else {
486 SKIPPED();
487 puts(" Current VFD doesn't support continuous address space");
488 } /* end else */
489
490 return 0;
491
492 error:
493 return -1;
494 }
495
496
497 /*-------------------------------------------------------------------------
498 * Function: test_userblock_offset
499 *
500 * Purpose: Tests H5Dget_offset when user block exists.
501 *
502 * Return: Success: 0
503 *
504 * Failure: -1
505 *
506 * Programmer: Raymond Lu
507 * Wednesday, November 27, 2002
508 *
509 *-------------------------------------------------------------------------
510 */
511 static herr_t
test_userblock_offset(const char * env_h5_drvr,hid_t fapl)512 test_userblock_offset(const char *env_h5_drvr, hid_t fapl)
513 {
514 char filename[FILENAME_BUF_SIZE];
515 hid_t file, fcpl, dataset, space;
516 int i, j;
517 hsize_t dims[2];
518 int f;
519 haddr_t offset;
520 int rdata[DSET_DIM1][DSET_DIM2];
521
522 TESTING("dataset offset with user block");
523
524 /* Can't run this test with multi-file VFDs */
525 if(HDstrcmp(env_h5_drvr, "split") && HDstrcmp(env_h5_drvr, "multi") && HDstrcmp(env_h5_drvr, "family")) {
526 h5_fixname(FILENAME[2], fapl, filename, sizeof filename);
527
528 if((fcpl=H5Pcreate(H5P_FILE_CREATE)) < 0) goto error;
529 if(H5Pset_userblock(fcpl, (hsize_t)USER_BLOCK) < 0) goto error;
530
531 if((file=H5Fcreate(filename, H5F_ACC_TRUNC, fcpl, fapl)) < 0)
532 goto error;
533
534 /* Create the data space */
535 dims[0] = DSET_DIM1;
536 dims[1] = DSET_DIM2;
537 if((space = H5Screate_simple(2, dims, NULL)) < 0) goto error;
538
539 /* Create the dataset */
540 if((dataset = H5Dcreate2(file, DSET_USERBLOCK_IO_NAME, H5T_NATIVE_INT, space,
541 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
542
543 /* Write the data to the dataset */
544 if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0)
545 goto error;
546
547 /* Test dataset address in file. Open the same file as a C file, seek
548 * the data position as H5Dget_offset points to, read the dataset, and
549 * compare it with the data written in.*/
550 if((offset = H5Dget_offset(dataset)) == HADDR_UNDEF) goto error;
551
552 if(H5Dclose(dataset) < 0) goto error;
553 if(H5Fclose(file) < 0) goto error;
554
555 f = HDopen(filename, O_RDONLY, 0);
556 HDlseek(f, (off_t)offset, SEEK_SET);
557 HDread(f, rdata, sizeof(int)*DSET_DIM1*DSET_DIM2);
558
559 /* Check that the values read are the same as the values written */
560 for(i = 0; i < DSET_DIM1; i++) {
561 for(j = 0; j < DSET_DIM2; j++) {
562 if(points[i][j] != rdata[i][j]) {
563 H5_FAILED();
564 printf(" Read different values than written.\n");
565 printf(" At index %d,%d\n", i, j);
566 goto error;
567 }
568 }
569 }
570
571 HDclose(f);
572
573 PASSED();
574 } /* end if */
575 else {
576 SKIPPED();
577 puts(" Current VFD doesn't support continuous address space");
578 } /* end else */
579
580 return 0;
581
582 error:
583 return -1;
584 }
585
586
587 /*-------------------------------------------------------------------------
588 * Function: test_compact_io
589 *
590 * Purpose: Tests compact dataset I/O. That is, reading and writing a
591 * complete multi-dimensional array without data type or data
592 * space conversions, without compression, and store in
593 * compact dataset.
594 *
595 * Return: Success: 0
596 *
597 * Failure: -1
598 *
599 * Programmer: Raymond Lu
600 * August 8, 2002
601 *
602 *-------------------------------------------------------------------------
603 */
604 static herr_t
test_compact_io(hid_t fapl)605 test_compact_io(hid_t fapl)
606 {
607 hid_t file, dataset, space, plist;
608 hsize_t dims[2];
609 int wbuf[16][8], rbuf[16][8];
610 char filename[FILENAME_BUF_SIZE];
611 int i, j, n;
612
613 TESTING("compact dataset I/O");
614
615 /* Initialize data */
616 n = 0;
617 for(i = 0; i < 16; i++)
618 for(j = 0; j < 8; j++)
619 wbuf[i][j] = n++;
620
621 /* Create a small data space for compact dataset */
622 dims[0] = 16;
623 dims[1] = 8;
624 if((space = H5Screate_simple(2, dims, NULL)) < 0) TEST_ERROR
625
626 /* Create a file */
627 h5_fixname(FILENAME[1], fapl, filename, sizeof filename);
628 if((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
629 TEST_ERROR
630
631 /* Create property list for compact dataset creation */
632 if((plist = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR
633 if(H5Pset_layout(plist, H5D_COMPACT) < 0) TEST_ERROR
634 if(H5Pset_alloc_time(plist, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR
635
636 /* Create and write to a compact dataset */
637 if((dataset = H5Dcreate2(file, DSET_COMPACT_IO_NAME, H5T_NATIVE_INT, space, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0)
638 TEST_ERROR
639
640 /* Test dataset address. Should be undefined. */
641 if(H5Dget_offset(dataset) != HADDR_UNDEF) TEST_ERROR
642
643 if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0)
644 TEST_ERROR
645
646 /* Test dataset address. Should be undefined. */
647 if(H5Dget_offset(dataset)!=HADDR_UNDEF) TEST_ERROR
648
649 /* Close file */
650 if(H5Sclose(space) < 0) TEST_ERROR
651 if(H5Pclose(plist) < 0) TEST_ERROR
652 if(H5Dclose(dataset) < 0) TEST_ERROR
653 if(H5Fclose(file) < 0) TEST_ERROR
654
655 /*
656 * Open the file and check data
657 */
658 if((file = H5Fopen(filename, H5F_ACC_RDONLY, fapl)) < 0)
659 TEST_ERROR
660 if((dataset = H5Dopen2(file, DSET_COMPACT_IO_NAME, H5P_DEFAULT)) < 0)
661 TEST_ERROR
662 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
663 TEST_ERROR
664
665 /* Check that the values read are the same as the values written */
666 for(i = 0; i < 16; i++)
667 for(j = 0; j < 8; j++)
668 if(rbuf[i][j] != wbuf[i][j]) {
669 H5_FAILED();
670 printf(" Read different values than written.\n");
671 printf(" At index %d,%d\n", i, j);
672 printf(" wbuf[%d][%d]=%d\n", i, j, wbuf[i][j]);
673 printf(" rbuf[%d][%d]=%d\n", i, j, rbuf[i][j]);
674 goto error;
675 } /* end if */
676
677 if(H5Dclose(dataset) < 0) TEST_ERROR
678 if(H5Fclose(file) < 0) TEST_ERROR
679
680 PASSED();
681 return 0;
682
683 error:
684 return -1;
685 }
686
687
688 /*-------------------------------------------------------------------------
689 * Function: test_max_compact
690 *
691 * Purpose: Tests compact dataset of maximal size.
692 *
693 * Return: Success: 0
694 * Failure: -1
695 *
696 * Programmer: Raymond Lu
697 * August 8, 2002
698 *
699 *-------------------------------------------------------------------------
700 */
701 static herr_t
test_max_compact(hid_t fapl)702 test_max_compact(hid_t fapl)
703 {
704 hid_t file = -1;
705 hid_t dataset = -1;
706 hid_t space = -1;
707 hid_t plist = -1;
708 hsize_t dims[1];
709 size_t compact_size;
710 int *wbuf = NULL;
711 int *rbuf = NULL;
712 char filename[FILENAME_BUF_SIZE];
713 int n;
714 size_t u;
715
716 TESTING("compact dataset of maximal size");
717
718 /* Test compact dataset of size 64KB-64 */
719
720 /* Initialize data */
721 compact_size = (SIXTY_FOUR_KB - 64) / sizeof(int);
722
723 if(NULL == (wbuf = (int *)HDmalloc(sizeof(int) * compact_size)))
724 TEST_ERROR
725 if(NULL == (rbuf = (int *)HDmalloc(sizeof(int) * compact_size)))
726 TEST_ERROR
727
728 n = 0;
729 for(u = 0; u < compact_size; u++)
730 wbuf[u] = n++;
731
732 /* Create a small data space for compact dataset */
733 dims[0] = (hsize_t)compact_size;
734 if((space = H5Screate_simple(1, dims, NULL)) < 0)
735 FAIL_STACK_ERROR
736
737 /* Create a file */
738 h5_fixname(FILENAME[3], fapl, filename, sizeof filename);
739 if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
740 FAIL_STACK_ERROR
741
742 /* Create property list for compact dataset creation */
743 if((plist = H5Pcreate(H5P_DATASET_CREATE)) < 0)
744 FAIL_STACK_ERROR
745 if(H5Pset_layout(plist, H5D_COMPACT) < 0)
746 FAIL_STACK_ERROR
747
748 /* Create and write to a compact dataset */
749 if((dataset = H5Dcreate2(file, DSET_COMPACT_MAX_NAME, H5T_NATIVE_INT, space, H5P_DEFAULT, plist, H5P_DEFAULT)) < 0)
750 FAIL_STACK_ERROR
751
752 if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0)
753 FAIL_STACK_ERROR
754
755 /* Close file */
756 if(H5Sclose(space) < 0)
757 FAIL_STACK_ERROR
758 if(H5Pclose(plist) < 0)
759 FAIL_STACK_ERROR
760 if(H5Dclose(dataset) < 0)
761 FAIL_STACK_ERROR
762 if(H5Fclose(file) < 0)
763 FAIL_STACK_ERROR
764
765 /*
766 * Open the file and check data
767 */
768 if((file = H5Fopen(filename, H5F_ACC_RDONLY, fapl)) < 0)
769 FAIL_STACK_ERROR
770 if((dataset = H5Dopen2(file, DSET_COMPACT_MAX_NAME, H5P_DEFAULT)) < 0)
771 FAIL_STACK_ERROR
772 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
773 FAIL_STACK_ERROR
774
775 /* Check that the values read are the same as the values written */
776 for(u = 0; u < compact_size; u++)
777 if(rbuf[u] != wbuf[u]) {
778 H5_FAILED();
779 printf(" Read different values than written.\n");
780 printf(" At index %u\n", (unsigned)u);
781 goto error;
782 } /* end if */
783
784 if(H5Dclose(dataset) < 0)
785 FAIL_STACK_ERROR
786 if(H5Fclose(file) < 0)
787 FAIL_STACK_ERROR
788 HDfree(wbuf);
789 wbuf = NULL;
790 HDfree(rbuf);
791 rbuf = NULL;
792
793 /* Test compact dataset of size 64KB */
794
795 /* Create a data space for compact dataset */
796 compact_size = SIXTY_FOUR_KB / sizeof(int);
797 dims[0] = (hsize_t)compact_size;
798 if((space = H5Screate_simple(1, dims, NULL)) < 0)
799 FAIL_STACK_ERROR
800
801 /* Open file */
802 if((file = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0)
803 goto error;
804
805 /* Create property list for compact dataset creation */
806 if((plist = H5Pcreate(H5P_DATASET_CREATE)) < 0)
807 FAIL_STACK_ERROR
808 if(H5Pset_layout(plist, H5D_COMPACT) < 0)
809 FAIL_STACK_ERROR
810
811 /* Create and write to a compact dataset */
812 H5E_BEGIN_TRY {
813 H5Dcreate2(file, DSET_COMPACT_MAX2_NAME, H5T_NATIVE_INT, space, H5P_DEFAULT, plist, H5P_DEFAULT);
814 } H5E_END_TRY;
815
816 /* Close file */
817 if(H5Sclose(space) < 0)
818 FAIL_STACK_ERROR
819 if(H5Pclose(plist) < 0)
820 FAIL_STACK_ERROR
821 if(H5Fclose(file) < 0)
822 FAIL_STACK_ERROR
823
824 PASSED();
825 return 0;
826
827 error:
828 if(wbuf)
829 HDfree(wbuf);
830 if(rbuf)
831 HDfree(rbuf);
832
833 H5E_BEGIN_TRY {
834 /* Close file */
835 H5Sclose(space);
836 H5Pclose(plist);
837 H5Dclose(dataset);
838 H5Fclose(file);
839 } H5E_END_TRY;
840
841 return -1;
842 } /* end test_max_compact() */
843
844
845 /*-------------------------------------------------------------------------
846 * Function: test_layout_extend
847 *
848 * Purpose: Verify that the creation of extendible dataset with dataspace:
849 * cur_dims < max_dims (max_dims can be fixed size or H5S_UNLIMITED)
850 * will behave as follows:
851 * H5D_COMPACT layout: fail
852 * H5D_CONTIGUOUS layout: fail
853 * H5D_CHUNKED layout: succeed
854 *
855 * Return: Success: 0
856 * Failure: -1
857 *
858 * Programmer: Vailin Choi; August 2010
859 *
860 *-------------------------------------------------------------------------
861 */
862 static herr_t
test_layout_extend(hid_t fapl)863 test_layout_extend(hid_t fapl)
864 {
865 char filename[FILENAME_BUF_SIZE]; /* File name */
866 hid_t fid = -1; /* File id */
867 hid_t sid_fix = -1, sid_unlim = -1; /* Dataspace id */
868 hid_t dcpl_compact = -1, dcpl_contig = -1, dcpl_chunked = -1; /* Dataset creation property list id */
869 hid_t did_fixed = -1, did_unlim = -1; /* Dataset id */
870 hsize_t cur_size[1] = {10}; /* Current size of dataspace */
871 hsize_t max_unlim[1] = {H5S_UNLIMITED}; /* Maximum size of dataspace (unlimited) */
872 hsize_t max_fix[1] = {100}; /* Maximum size of dataspace (fixed) */
873 hsize_t chunk_dim[1] = {10}; /* Chunk size */
874
875 TESTING("extendible dataset with various layout");
876
877 /* Create a file */
878 h5_fixname(FILENAME[12], fapl, filename, sizeof filename);
879 if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
880 FAIL_STACK_ERROR
881
882 /* Create dataspace */
883 if((sid_fix = H5Screate_simple(1, cur_size, max_fix)) < 0)
884 FAIL_STACK_ERROR
885 if((sid_unlim = H5Screate_simple(1, cur_size, max_unlim)) < 0)
886 FAIL_STACK_ERROR
887
888 /* Create property list for compact dataset creation */
889 if((dcpl_compact = H5Pcreate(H5P_DATASET_CREATE)) < 0)
890 FAIL_STACK_ERROR
891 if(H5Pset_layout(dcpl_compact, H5D_COMPACT) < 0)
892 FAIL_STACK_ERROR
893
894 /* Create dataset with extendible dataspace (fixed max_dims) should fail */
895 H5E_BEGIN_TRY {
896 if(H5Dcreate2(fid, "compact", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_compact, H5P_DEFAULT) != FAIL)
897 TEST_ERROR
898 } H5E_END_TRY;
899
900 /* Create dataset with extendible dataspace (unlimited max_dims) should fail */
901 H5E_BEGIN_TRY {
902 if(H5Dcreate2(fid, "compact", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_compact, H5P_DEFAULT) != FAIL)
903 TEST_ERROR
904 } H5E_END_TRY;
905
906 /* Create property list for contiguous dataset creation */
907 if((dcpl_contig = H5Pcreate(H5P_DATASET_CREATE)) < 0)
908 FAIL_STACK_ERROR
909 if((H5Pset_layout(dcpl_contig, H5D_CONTIGUOUS)) < 0)
910 FAIL_STACK_ERROR
911
912 /* Create dataset with extendible dataspace (fixed max_dims) should fail */
913 H5E_BEGIN_TRY {
914 if(H5Dcreate2(fid, "contig", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_contig, H5P_DEFAULT) != FAIL)
915 TEST_ERROR
916 } H5E_END_TRY;
917
918 /* Create dataset with extendible dataspace (unlimited max_dims) should fail*/
919 H5E_BEGIN_TRY {
920 if(H5Dcreate2(fid, "contig", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_contig, H5P_DEFAULT) != FAIL)
921 TEST_ERROR
922 } H5E_END_TRY;
923
924 /* Create property list for chunked dataset creation */
925 if((dcpl_chunked = H5Pcreate(H5P_DATASET_CREATE)) < 0)
926 FAIL_STACK_ERROR
927 if(H5Pset_layout(dcpl_chunked, H5D_CHUNKED) < 0)
928 FAIL_STACK_ERROR
929 if(H5Pset_chunk(dcpl_chunked, 1, chunk_dim) < 0) FAIL_STACK_ERROR
930
931 /* Create dataset with extendible dataspace (fixed max_dims) should succeed */
932 if((did_fixed = H5Dcreate2(fid, "chunked_fixed", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_chunked, H5P_DEFAULT)) < 0)
933 FAIL_STACK_ERROR
934
935 /* Create dataset with extendible dataspace (unlimited max_dims) should succeed */
936 if((did_unlim = H5Dcreate2(fid, "chunked_unlim", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_chunked, H5P_DEFAULT)) < 0)
937 FAIL_STACK_ERROR
938
939 /* Closing */
940 if(H5Sclose(sid_fix) < 0) FAIL_STACK_ERROR
941 if(H5Sclose(sid_unlim) < 0) FAIL_STACK_ERROR
942
943 if(H5Pclose(dcpl_compact) < 0) FAIL_STACK_ERROR
944 if(H5Pclose(dcpl_contig) < 0) FAIL_STACK_ERROR
945 if(H5Pclose(dcpl_chunked) < 0) FAIL_STACK_ERROR
946
947 if(H5Dclose(did_fixed) < 0) FAIL_STACK_ERROR
948 if(H5Dclose(did_unlim) < 0) FAIL_STACK_ERROR
949
950 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
951
952 PASSED();
953 return 0;
954
955 error:
956 H5E_BEGIN_TRY {
957 H5Sclose(sid_fix);
958 H5Sclose(sid_unlim);
959 H5Pclose(dcpl_compact);
960 H5Pclose(dcpl_contig);
961 H5Pclose(dcpl_chunked);
962 H5Dclose(did_fixed);
963 H5Dclose(did_unlim);
964 H5Fclose(fid);
965 } H5E_END_TRY;
966
967 return -1;
968 } /* end test_layout_extend() */
969
970
971 /*-------------------------------------------------------------------------
972 * Function: test_conv_buffer
973 *
974 * Purpose: Test size of data type conversion buffer.
975 *
976 * Return: Success: 0
977 *
978 * Failure: -1
979 *
980 * Programmer: Raymond Lu
981 * Monday, May 12, 2003
982 *
983 *-------------------------------------------------------------------------
984 */
985 static herr_t
test_conv_buffer(hid_t fid)986 test_conv_buffer(hid_t fid)
987 {
988 typedef struct
989 {
990 int a[DIM1][DIM2][DIM3];
991 float b[DIM2];
992 double c[DIM3];
993 } CmpField;
994
995 typedef struct
996 {
997 float b[DIM2];
998 double c[DIM3];
999 } CmpFieldR;
1000
1001 herr_t status = -1;
1002 int j, k, l;
1003
1004 CmpField *cf = NULL;
1005 CmpFieldR *cfrR = NULL;
1006
1007 hid_t dataset = -1; /* dataset ID */
1008 hid_t space = -1; /* data space ID */
1009 hid_t ctype1, ctype2; /* data type ID */
1010 hid_t arr_type1, arr_type2, arr_type3, arr_type4, arr_type5;
1011 hsize_t dimsa[3];
1012 hsize_t dimsb[1];
1013 hsize_t dimsc[1];
1014 hid_t xfer_list;
1015 size_t size;
1016
1017 TESTING("data type conversion buffer size");
1018
1019 if ((cf = (CmpField *)HDcalloc((size_t)1, sizeof(CmpField))) == 0) goto error;
1020
1021 /* Populate the data members */
1022 for(j = 0; j < DIM1; j++)
1023 for(k = 0; k < DIM2; k++)
1024 for(l = 0; l < DIM3; l++)
1025 cf->a[j][k][l] = 10*(j+1) + l + k;
1026
1027 for(j = 0; j < DIM2; j++)
1028 cf->b[j] = (float)(100.0f*(j+1) + 0.01f*j);
1029
1030 for(j = 0; j < DIM3; j++)
1031 cf->c[j] = 100.0f*(j+1) + 0.02f*j;
1032
1033
1034 /* Create data space */
1035 if((space=H5Screate(H5S_SCALAR)) < 0) goto error;
1036
1037 /* Add members to the compound data type */
1038 dimsa[0] = DIM1;
1039 dimsa[1] = DIM2;
1040 dimsa[2] = DIM3;
1041 dimsb[0] = DIM2;
1042 dimsc[0] = DIM3;
1043
1044 /* Create the memory data type */
1045 if((ctype1 = H5Tcreate(H5T_COMPOUND, sizeof (CmpField))) < 0) goto error;
1046
1047 if((arr_type1 = H5Tarray_create2(H5T_NATIVE_INT, 3, dimsa)) < 0) goto error;
1048 if((arr_type2 = H5Tarray_create2(H5T_NATIVE_FLOAT, 1, dimsb)) < 0) goto error;
1049 if((arr_type3 = H5Tarray_create2(H5T_NATIVE_DOUBLE, 1, dimsc)) < 0) goto error;
1050
1051 if(H5Tinsert(ctype1, "A", HOFFSET(CmpField, a), arr_type1) < 0) goto error;
1052 if(H5Tinsert(ctype1, "B", HOFFSET(CmpField, b), arr_type2) < 0) goto error;
1053 if(H5Tinsert(ctype1, "C", HOFFSET(CmpField, c), arr_type3) < 0) goto error;
1054
1055 /* Create the dataset */
1056 if((dataset = H5Dcreate2(fid, DSET_CONV_BUF_NAME, ctype1, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
1057 if(H5Dwrite(dataset, ctype1, H5S_ALL, H5S_ALL, H5P_DEFAULT, cf) < 0) goto error;
1058
1059 if((ctype2 = H5Tcreate(H5T_COMPOUND, sizeof (CmpFieldR))) < 0) goto error;
1060
1061 if((arr_type4 = H5Tarray_create2(H5T_NATIVE_FLOAT, 1, dimsb)) < 0) goto error;
1062 if((arr_type5 = H5Tarray_create2(H5T_NATIVE_DOUBLE, 1, dimsc)) < 0) goto error;
1063
1064 if(H5Tinsert(ctype2, "B", HOFFSET(CmpFieldR, b), arr_type4) < 0) goto error;
1065 if(H5Tinsert(ctype2, "C", HOFFSET(CmpFieldR, c), arr_type5) < 0) goto error;
1066
1067 /* Read should succeed since library will set conversion buffer big enough */
1068 if ((cfrR = (CmpFieldR *)HDcalloc((size_t)1, sizeof(CmpFieldR))) == 0) goto error;
1069 if(H5Dread(dataset, ctype2, H5S_ALL, H5S_ALL, H5P_DEFAULT, cfrR) < 0) goto error;
1070
1071 /* Read should fail since conversion buffer isn't big enough */
1072 xfer_list = H5Pcreate(H5P_DATASET_XFER);
1073 size = (DIM2 * DIM3 * (sizeof(int))+ DIM2 * (sizeof(float))+
1074 DIM3 * (sizeof(double)));
1075 if(H5Pset_buffer(xfer_list, size, NULL, NULL) < 0) goto error;
1076
1077 H5E_BEGIN_TRY {
1078 status = H5Dread(dataset, ctype2, H5S_ALL, H5S_ALL, xfer_list, cfrR);
1079 } H5E_END_TRY;
1080 if(status >= 0) {
1081 H5_FAILED();
1082 puts(" Library shouldn't allow conversion buffer too small");
1083 goto error;
1084 }
1085
1086 /* Read will succeed since conversion buffer is big enough */
1087 size = (DIM1 * DIM2 * DIM3 * (sizeof(int))+ DIM2 * (sizeof(float))+
1088 DIM3 * (sizeof(double)));
1089 if(H5Pset_buffer(xfer_list, size, NULL, NULL) < 0) goto error;
1090
1091 if(H5Dread(dataset, ctype2, H5S_ALL, H5S_ALL, xfer_list, cfrR) < 0) goto error;
1092
1093
1094 if(H5Pclose(xfer_list) < 0) goto error;
1095 if(H5Sclose(space) < 0) goto error;
1096 if(H5Tclose(arr_type1) < 0) goto error;
1097 if(H5Tclose(arr_type2) < 0) goto error;
1098 if(H5Tclose(arr_type3) < 0) goto error;
1099 if(H5Tclose(ctype1) < 0) goto error;
1100 if(H5Tclose(ctype2) < 0) goto error;
1101 if(H5Tclose(arr_type4) < 0) goto error;
1102 if(H5Tclose(arr_type5) < 0) goto error;
1103 if(H5Dclose(dataset) < 0) goto error;
1104
1105 HDfree(cf);
1106 HDfree(cfrR);
1107 puts(" PASSED");
1108 return(0);
1109
1110 error:
1111 return -1;
1112 }
1113
1114
1115 /*-------------------------------------------------------------------------
1116 * Function: test_tconv
1117 *
1118 * Purpose: Test some simple data type conversion stuff.
1119 *
1120 * Return: Success: 0
1121 *
1122 * Failure: -1
1123 *
1124 * Programmer: Robb Matzke
1125 * Wednesday, January 14, 1998
1126 *
1127 *-------------------------------------------------------------------------
1128 */
1129 static herr_t
test_tconv(hid_t file)1130 test_tconv(hid_t file)
1131 {
1132 char *out = NULL, *in = NULL;
1133 hsize_t dims[1];
1134 hid_t space = -1, dataset = -1;
1135 int i;
1136
1137 if ((out = (char *)HDmalloc((size_t)(4 * 1000 * 1000))) == NULL)
1138 goto error;
1139 if ((in = (char *)HDmalloc((size_t)(4 * 1000 * 1000))) == NULL)
1140 goto error;
1141
1142 TESTING("data type conversion");
1143
1144 /* Initialize the dataset */
1145 for(i = 0; i < 1000000; i++) {
1146 out[i * 4 + 0] = 0x11;
1147 out[i * 4 + 1] = 0x22;
1148 out[i * 4 + 2] = 0x33;
1149 out[i * 4 + 3] = 0x44;
1150 } /* end for */
1151
1152 /* Create the data space */
1153 dims[0] = 1000000;
1154 if((space = H5Screate_simple (1, dims, NULL)) < 0) goto error;
1155
1156 /* Create the data set */
1157 if((dataset = H5Dcreate2(file, DSET_TCONV_NAME, H5T_STD_I32LE, space,
1158 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
1159 goto error;
1160
1161 /* Write the data to the dataset */
1162 if(H5Dwrite(dataset, H5T_STD_I32LE, H5S_ALL, H5S_ALL, H5P_DEFAULT, out) < 0)
1163 goto error;
1164
1165 /* Read data with byte order conversion */
1166 if(H5Dread(dataset, H5T_STD_I32BE, H5S_ALL, H5S_ALL, H5P_DEFAULT, in) < 0)
1167 goto error;
1168
1169 /* Check */
1170 for(i = 0; i < 1000000; i++) {
1171 if(in[4 * i + 0] != out[4 * i + 3] ||
1172 in[4 * i + 1] != out[4 * i + 2] ||
1173 in[4 * i + 2] != out[4 * i + 1] ||
1174 in[4 * i + 3] != out[4 * i + 0]) {
1175 H5_FAILED();
1176 puts(" Read with byte order conversion failed.");
1177 goto error;
1178 }
1179 }
1180
1181 if(H5Dclose(dataset) < 0) goto error;
1182 if(H5Sclose(space) < 0) goto error;
1183 HDfree(out);
1184 HDfree(in);
1185
1186 puts(" PASSED");
1187 return 0;
1188
1189 error:
1190 if(out)
1191 HDfree(out);
1192 if(in)
1193 HDfree(in);
1194
1195 H5E_BEGIN_TRY {
1196 H5Dclose(dataset);
1197 H5Sclose(space);
1198 } H5E_END_TRY;
1199
1200 return -1;
1201 }
1202
1203 /* This message derives from H5Z */
1204 const H5Z_class2_t H5Z_BOGUS[1] = {{
1205 H5Z_CLASS_T_VERS, /* H5Z_class_t version */
1206 H5Z_FILTER_BOGUS, /* Filter id number */
1207 1, 1, /* Encoding and decoding enabled */
1208 "bogus", /* Filter name for debugging */
1209 NULL, /* The "can apply" callback */
1210 NULL, /* The "set local" callback */
1211 filter_bogus, /* The actual filter function */
1212 }};
1213
1214
1215 /*-------------------------------------------------------------------------
1216 * Function: can_apply_bogus
1217 *
1218 * Purpose: A bogus 'can apply' callback that returns 0 for H5T_NATIVE_DOUBLE
1219 * dataype, but returns 1 for all other datatypes
1220 *
1221 * Return: Success: Described above
1222 * Failure: 0
1223 *
1224 * Programmer: Quincey Koziol
1225 * Friday, April 5, 2003
1226 *
1227 *-------------------------------------------------------------------------
1228 */
1229 static htri_t
can_apply_bogus(hid_t H5_ATTR_UNUSED dcpl_id,hid_t type_id,hid_t H5_ATTR_UNUSED space_id)1230 can_apply_bogus(hid_t H5_ATTR_UNUSED dcpl_id, hid_t type_id, hid_t H5_ATTR_UNUSED space_id)
1231 {
1232 if(H5Tequal(type_id,H5T_NATIVE_DOUBLE))
1233 return 0;
1234 else if(H5Tequal(type_id,H5T_NATIVE_INT))
1235 return 1;
1236 else
1237 return -1;
1238 }
1239
1240
1241 /*-------------------------------------------------------------------------
1242 * Function: filter_bogus
1243 *
1244 * Purpose: A bogus compression method that doesn't do anything.
1245 *
1246 * Return: Success: Data chunk size
1247 *
1248 * Failure: 0
1249 *
1250 * Programmer: Robb Matzke
1251 * Tuesday, April 21, 1998
1252 *
1253 *-------------------------------------------------------------------------
1254 */
1255 static size_t
filter_bogus(unsigned int H5_ATTR_UNUSED flags,size_t H5_ATTR_UNUSED cd_nelmts,const unsigned int H5_ATTR_UNUSED * cd_values,size_t nbytes,size_t H5_ATTR_UNUSED * buf_size,void H5_ATTR_UNUSED ** buf)1256 filter_bogus(unsigned int H5_ATTR_UNUSED flags, size_t H5_ATTR_UNUSED cd_nelmts,
1257 const unsigned int H5_ATTR_UNUSED *cd_values, size_t nbytes,
1258 size_t H5_ATTR_UNUSED *buf_size, void H5_ATTR_UNUSED **buf)
1259 {
1260 return nbytes;
1261 }
1262
1263
1264 /*-------------------------------------------------------------------------
1265 * Function: set_local_bogus2
1266 *
1267 * Purpose: A 'set local' callback that stores the size of the datatype
1268 * and adds it to all the H5T_NATIVE_INT values during
1269 * filter operation.
1270 *
1271 * Return: Success: non-negative
1272 * Failure: negative
1273 *
1274 * Programmer: Quincey Koziol
1275 * Friday, April 5, 2003
1276 *
1277 *-------------------------------------------------------------------------
1278 */
1279 static herr_t
set_local_bogus2(hid_t dcpl_id,hid_t type_id,hid_t H5_ATTR_UNUSED space_id)1280 set_local_bogus2(hid_t dcpl_id, hid_t type_id, hid_t H5_ATTR_UNUSED space_id)
1281 {
1282 unsigned add_on=0; /* Value to add to data going through */
1283 unsigned flags; /* Filter flags */
1284 size_t cd_nelmts=BOGUS2_PERM_NPARMS; /* Number of filter parameters */
1285 unsigned cd_values[4]; /* Filter parameters */
1286
1287 /* Check for native integer datatype and set private property */
1288 if(H5Tequal(type_id,H5T_NATIVE_INT)>0)
1289 add_on=(unsigned)H5Tget_size(type_id);
1290
1291 /* Get the filter's current parameters */
1292 if(H5Pget_filter_by_id2(dcpl_id, H5Z_FILTER_SET_LOCAL_TEST, &flags, &cd_nelmts, cd_values, (size_t)0, NULL, NULL) < 0)
1293 return(FAIL);
1294
1295 /* Check that the parameter values were passed along correctly */
1296 if(cd_values[0]!=BOGUS2_PARAM_1)
1297 return(FAIL);
1298 if(cd_values[1]!=BOGUS2_PARAM_2)
1299 return(FAIL);
1300
1301 /* Set "local" parameters for this dataset */
1302 cd_values[2]=(unsigned)(add_on>0); /* Flag to indicate data is modified */
1303 cd_values[3]=add_on; /* Amount the data was modified by */
1304
1305 /* Modify the filter's parameters for this dataset */
1306 if(H5Pmodify_filter(dcpl_id, H5Z_FILTER_SET_LOCAL_TEST, flags, (size_t)BOGUS2_ALL_NPARMS,
1307 cd_values) < 0)
1308 return(FAIL);
1309
1310 return(SUCCEED);
1311 } /* end set_local_bogus2() */
1312
1313
1314 /*-------------------------------------------------------------------------
1315 * Function: filter_bogus2
1316 *
1317 * Purpose: A filter method that adds a value to data values on writing
1318 * (if the parameter is set), but does not modify data values on
1319 * reading (so that correct operation of the filter can be
1320 * checked).
1321 *
1322 * Return: Success: Data chunk size
1323 * Failure: 0
1324 *
1325 * Programmer: Quincey Koziol
1326 * Monday, April 7, 2003
1327 *
1328 *-------------------------------------------------------------------------
1329 */
1330 static size_t
filter_bogus2(unsigned int flags,size_t cd_nelmts,const unsigned int * cd_values,size_t nbytes,size_t * buf_size,void ** buf)1331 filter_bogus2(unsigned int flags, size_t cd_nelmts,
1332 const unsigned int *cd_values, size_t nbytes,
1333 size_t *buf_size, void **buf)
1334 {
1335 /* Check for the correct number of parameters */
1336 if(cd_nelmts!=BOGUS2_ALL_NPARMS)
1337 return(0);
1338
1339 /* Check that permanent parameters are set correctly */
1340 if(cd_values[0]!=BOGUS2_PARAM_1)
1341 return(0);
1342 if(cd_values[1]!=BOGUS2_PARAM_2)
1343 return(0);
1344
1345 /* Check if this filter is supposed to do something */
1346 if(cd_values[2]>0) {
1347 /* Check whether we are "uncompressing" */
1348 if(flags & H5Z_FLAG_REVERSE) {
1349 /* Do nothing */
1350 } /* end if */
1351 /* "Compressing" */
1352 else {
1353 unsigned add_on=cd_values[3]; /* Get "add on" value */
1354 int *int_ptr=(int *)*buf; /* Pointer to the data values */
1355 size_t buf_left=*buf_size; /* Amount of data buffer left to process */
1356
1357 /* Add the "add on" value to all the data values */
1358 while(buf_left>0) {
1359 *int_ptr++ += (int)add_on;
1360 buf_left -= sizeof(int);
1361 } /* end while */
1362 } /* end else */
1363
1364 return(nbytes);
1365 } /* end if */
1366 /* Filter is "no op" */
1367 else
1368 return(nbytes);
1369 }
1370
1371
1372 /*-------------------------------------------------------------------------
1373 * Function: filter_bogus3
1374 *
1375 * Purpose: A bogus compression method that returns a failure.
1376 *
1377 * Return: Success: Data chunk size
1378 *
1379 * Failure: 0
1380 *
1381 * Programmer: Raymond Lu
1382 * 4 August 2010
1383 *
1384 *-------------------------------------------------------------------------
1385 */
1386 static size_t
filter_bogus3(unsigned int H5_ATTR_UNUSED flags,size_t H5_ATTR_UNUSED cd_nelmts,const unsigned int H5_ATTR_UNUSED * cd_values,size_t H5_ATTR_UNUSED nbytes,size_t H5_ATTR_UNUSED * buf_size,void H5_ATTR_UNUSED ** buf)1387 filter_bogus3(unsigned int H5_ATTR_UNUSED flags, size_t H5_ATTR_UNUSED cd_nelmts,
1388 const unsigned int H5_ATTR_UNUSED *cd_values, size_t H5_ATTR_UNUSED nbytes,
1389 size_t H5_ATTR_UNUSED *buf_size, void H5_ATTR_UNUSED **buf)
1390 {
1391 return 0;
1392 }
1393
1394 /* This message derives from H5Z */
1395 const H5Z_class2_t H5Z_CORRUPT[1] = {{
1396 H5Z_CLASS_T_VERS, /* H5Z_class_t version */
1397 H5Z_FILTER_CORRUPT, /* Filter id number */
1398 1, 1, /* Encoding and decoding enabled */
1399 "corrupt", /* Filter name for debugging */
1400 NULL, /* The "can apply" callback */
1401 NULL, /* The "set local" callback */
1402 filter_corrupt, /* The actual filter function */
1403 }};
1404
1405
1406 /*-------------------------------------------------------------------------
1407 * Function: filter_corrupt
1408 *
1409 * Purpose: For testing Fletcher32 checksum. modify data slightly during
1410 * writing so that when data is read back, the checksum should
1411 * fail.
1412 *
1413 * Return: Success: Data chunk size
1414 *
1415 * Failure: 0
1416 *
1417 * Programmer: Raymond Lu
1418 * Jan 14, 2003
1419 *
1420 *-------------------------------------------------------------------------
1421 */
1422 static size_t
filter_corrupt(unsigned int flags,size_t cd_nelmts,const unsigned int * cd_values,size_t nbytes,size_t * buf_size,void ** buf)1423 filter_corrupt(unsigned int flags, size_t cd_nelmts,
1424 const unsigned int *cd_values, size_t nbytes,
1425 size_t *buf_size, void **buf)
1426 {
1427 void *data = NULL;
1428 unsigned char *dst = (unsigned char*)(*buf);
1429 unsigned int offset;
1430 unsigned int length;
1431 unsigned int value;
1432 size_t ret_value = 0;
1433
1434 if(cd_nelmts != 3 || !cd_values)
1435 TEST_ERROR
1436 offset = cd_values[0];
1437 length = cd_values[1];
1438 value = cd_values[2];
1439 if(offset > nbytes || (offset + length) > nbytes || length < sizeof(unsigned int))
1440 TEST_ERROR
1441
1442 if(NULL == (data = HDmalloc((size_t)length)))
1443 TEST_ERROR
1444 HDmemset(data, (int)value, (size_t)length);
1445
1446 if(flags & H5Z_FLAG_REVERSE) { /* Varify data is actually corrupted during read */
1447 dst += offset;
1448 if(HDmemcmp(data, dst, (size_t)length) != 0)
1449 TEST_ERROR
1450 else {
1451 *buf_size = nbytes;
1452 ret_value = nbytes;
1453 } /* end else */
1454 } /* end if */
1455 else { /* Write corrupted data */
1456 dst += offset;
1457 HDmemcpy(dst, data, (size_t)length);
1458 *buf_size = nbytes;
1459 ret_value = *buf_size;
1460 } /* end else */
1461
1462 error:
1463 if(data)
1464 HDfree(data);
1465
1466 return ret_value;
1467 } /* end filter_corrupt() */
1468
1469
1470 /*-------------------------------------------------------------------------
1471 * Function: filter_cb_cont
1472 *
1473 * Purpose: Callback function to handle checksum failure. Let it continue.
1474 *
1475 * Return: continue
1476 *
1477 * Programmer: Raymond Lu
1478 * Jan 14, 2003
1479 *
1480 *-------------------------------------------------------------------------
1481 */
1482 static H5Z_cb_return_t
filter_cb_cont(H5Z_filter_t filter,void H5_ATTR_UNUSED * buf,size_t H5_ATTR_UNUSED buf_size,void H5_ATTR_UNUSED * op_data)1483 filter_cb_cont(H5Z_filter_t filter, void H5_ATTR_UNUSED *buf, size_t H5_ATTR_UNUSED buf_size,
1484 void H5_ATTR_UNUSED *op_data)
1485 {
1486 if(H5Z_FILTER_FLETCHER32==filter)
1487 return H5Z_CB_CONT;
1488 else
1489 return H5Z_CB_FAIL;
1490 }
1491
1492
1493 /*-------------------------------------------------------------------------
1494 * Function: filter_cb_fail
1495 *
1496 * Purpose: Callback function to handle checksum failure. Let it fail.
1497 *
1498 * Return: fail
1499 *
1500 * Programmer: Raymond Lu
1501 * Jan 14, 2003
1502 *
1503 *-------------------------------------------------------------------------
1504 */
1505 static H5Z_cb_return_t
filter_cb_fail(H5Z_filter_t filter,void H5_ATTR_UNUSED * buf,size_t H5_ATTR_UNUSED buf_size,void H5_ATTR_UNUSED * op_data)1506 filter_cb_fail(H5Z_filter_t filter, void H5_ATTR_UNUSED *buf, size_t H5_ATTR_UNUSED buf_size,
1507 void H5_ATTR_UNUSED *op_data)
1508 {
1509 if(H5Z_FILTER_FLETCHER32==filter)
1510 return H5Z_CB_FAIL;
1511 else
1512 return H5Z_CB_CONT;
1513 }
1514
1515
1516 /*-------------------------------------------------------------------------
1517 * Function: test_filter_internal
1518 *
1519 * Purpose: Tests dataset compression. If compression is requested when
1520 * it hasn't been compiled into the library (such as when
1521 * updating an existing compressed dataset) then data is sent to
1522 * the file uncompressed but no errors are returned.
1523 *
1524 * Return: Success: 0
1525 * Failure: -1
1526 *
1527 * Programmer: Robb Matzke
1528 * Wednesday, April 15, 1998
1529 *
1530 *-------------------------------------------------------------------------
1531 */
1532 static herr_t
test_filter_internal(hid_t fid,const char * name,hid_t dcpl,int if_fletcher32,int corrupted,hsize_t * dset_size)1533 test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
1534 int corrupted, hsize_t *dset_size)
1535 {
1536 hid_t dataset; /* Dataset ID */
1537 hid_t dxpl; /* Dataset xfer property list ID */
1538 hid_t write_dxpl; /* Dataset xfer property list ID for writing */
1539 hid_t sid; /* Dataspace ID */
1540 const hsize_t size[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */
1541 const hsize_t hs_offset[2] = {FILTER_HS_OFFSET1, FILTER_HS_OFFSET2}; /* Hyperslab offset */
1542 const hsize_t hs_size[2] = {FILTER_HS_SIZE1, FILTER_HS_SIZE2}; /* Hyperslab size */
1543 void *tconv_buf = NULL; /* Temporary conversion buffer */
1544 size_t i, j, n; /* Local index variables */
1545 herr_t status; /* Error status */
1546
1547 /* Create the data space */
1548 if((sid = H5Screate_simple(2, size, NULL)) < 0) goto error;
1549
1550 /*
1551 * Create a small conversion buffer to test strip mining. We
1552 * might as well test all we can!
1553 */
1554 if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0) goto error;
1555 tconv_buf = HDmalloc((size_t)1000);
1556 if(H5Pset_buffer(dxpl, (size_t)1000, tconv_buf, NULL) < 0) goto error;
1557 if((write_dxpl = H5Pcopy(dxpl)) < 0) TEST_ERROR;
1558
1559 if(if_fletcher32==DISABLE_FLETCHER32) {
1560 if(H5Pset_edc_check(dxpl, H5Z_DISABLE_EDC) < 0)
1561 goto error;
1562 if(H5Z_DISABLE_EDC != H5Pget_edc_check(dxpl))
1563 goto error;
1564 }
1565
1566 TESTING(" filters (setup)");
1567
1568 /* Check if all the filters are available */
1569 if(H5Pall_filters_avail(dcpl)!=TRUE) {
1570 H5_FAILED();
1571 printf(" Line %d: Incorrect filter availability\n",__LINE__);
1572 goto error;
1573 } /* end if */
1574
1575 /* Create the dataset */
1576 if((dataset = H5Dcreate2(fid, name, H5T_NATIVE_INT, sid, H5P_DEFAULT,
1577 dcpl, H5P_DEFAULT)) < 0) goto error;
1578
1579 PASSED();
1580
1581 /*----------------------------------------------------------------------
1582 * STEP 1: Read uninitialized data. It should be zero.
1583 *----------------------------------------------------------------------
1584 */
1585 TESTING(" filters (uninitialized read)");
1586
1587 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1588 TEST_ERROR;
1589
1590 for(i=0; i<(size_t)size[0]; i++) {
1591 for(j=0; j<(size_t)size[1]; j++) {
1592 if(0!=check[i][j]) {
1593 H5_FAILED();
1594 printf(" Read a non-zero value.\n");
1595 printf(" At index %lu,%lu\n",
1596 (unsigned long)i, (unsigned long)j);
1597 goto error;
1598 }
1599 }
1600 }
1601 PASSED();
1602
1603 /*----------------------------------------------------------------------
1604 * STEP 2: Test filters by setting up a chunked dataset and writing
1605 * to it.
1606 *----------------------------------------------------------------------
1607 */
1608 TESTING(" filters (write)");
1609
1610 for(i=n=0; i<size[0]; i++) {
1611 for(j=0; j<size[1]; j++) {
1612 points[i][j] = (int)(n++);
1613 }
1614 }
1615
1616 if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points) < 0)
1617 TEST_ERROR;
1618
1619 if((*dset_size=H5Dget_storage_size(dataset))==0) TEST_ERROR;
1620
1621 PASSED();
1622
1623 /*----------------------------------------------------------------------
1624 * STEP 3: Try to read the data we just wrote.
1625 *----------------------------------------------------------------------
1626 */
1627 TESTING(" filters (read)");
1628
1629 /* Read the dataset back */
1630 if(corrupted) {
1631 /* Default behavior is failure when data is corrupted. */
1632 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1633 H5E_BEGIN_TRY {
1634 status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1635 } H5E_END_TRY;
1636 if(status>=0) TEST_ERROR;
1637
1638 /* Callback decides to continue inspite data is corrupted. */
1639 if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
1640 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1641 TEST_ERROR;
1642
1643 /* Callback decides to fail when data is corrupted. */
1644 if(H5Pset_filter_callback(write_dxpl, filter_cb_fail, NULL) < 0) TEST_ERROR;
1645 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1646 H5E_BEGIN_TRY {
1647 status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1648 } H5E_END_TRY;
1649 if(status>=0) TEST_ERROR;
1650 } else {
1651 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1652 TEST_ERROR;
1653
1654 /* Check that the values read are the same as the values written */
1655 for(i=0; i<size[0]; i++) {
1656 for(j=0; j<size[1]; j++) {
1657 if(points[i][j] != check[i][j]) {
1658 H5_FAILED();
1659 fprintf(stderr," Read different values than written.\n");
1660 fprintf(stderr," At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
1661 fprintf(stderr," At original: %d\n", (int)points[i][j]);
1662 fprintf(stderr," At returned: %d\n", (int)check[i][j]);
1663 goto error;
1664 }
1665 }
1666 }
1667 }
1668
1669 PASSED();
1670
1671 /*----------------------------------------------------------------------
1672 * STEP 4: Write new data over the top of the old data. The new data is
1673 * random thus not very compressible, and will cause the chunks to move
1674 * around as they grow. We only change values for the left half of the
1675 * dataset although we rewrite the whole thing.
1676 *----------------------------------------------------------------------
1677 */
1678 TESTING(" filters (modify)");
1679
1680 for(i=0; i<size[0]; i++) {
1681 for(j=0; j<size[1]/2; j++) {
1682 points[i][j] = (int)HDrandom ();
1683 }
1684 }
1685 if(H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points) < 0)
1686 TEST_ERROR;
1687
1688 if(corrupted) {
1689 /* Default behavior is failure when data is corrupted. */
1690 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1691 H5E_BEGIN_TRY {
1692 status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1693 } H5E_END_TRY;
1694 if(status>=0) TEST_ERROR;
1695
1696 /* Callback decides to continue inspite data is corrupted. */
1697 if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
1698 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1699 TEST_ERROR;
1700
1701 /* Callback decides to fail when data is corrupted. */
1702 if(H5Pset_filter_callback(write_dxpl, filter_cb_fail, NULL) < 0) TEST_ERROR;
1703 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1704 H5E_BEGIN_TRY {
1705 status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1706 } H5E_END_TRY;
1707 if(status>=0) TEST_ERROR;
1708 } else {
1709 /* Read the dataset back and check it */
1710 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1711 TEST_ERROR;
1712
1713 /* Check that the values read are the same as the values written */
1714 for(i=0; i<size[0]; i++) {
1715 for(j=0; j<size[1]; j++) {
1716 if(points[i][j] != check[i][j]) {
1717 H5_FAILED();
1718 printf(" Read different values than written.\n");
1719 printf(" At index %lu,%lu\n",
1720 (unsigned long)i, (unsigned long)j);
1721 goto error;
1722 }
1723 }
1724 }
1725 }
1726
1727 if((*dset_size=H5Dget_storage_size(dataset))==0) TEST_ERROR;
1728 PASSED();
1729
1730 /*----------------------------------------------------------------------
1731 * STEP 5: Close the dataset and then open it and read it again. This
1732 * insures that the filters message is picked up properly from the
1733 * object header.
1734 *----------------------------------------------------------------------
1735 */
1736 TESTING(" filters (re-open)");
1737
1738 if(H5Dclose(dataset) < 0) TEST_ERROR;
1739 if((dataset = H5Dopen2(fid, name, H5P_DEFAULT)) < 0) TEST_ERROR;
1740
1741 if(corrupted) {
1742 /* Default behavior is failure when data is corrupted. */
1743 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1744 H5E_BEGIN_TRY {
1745 status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1746 } H5E_END_TRY;
1747 if(status >= 0) TEST_ERROR;
1748
1749 /* Callback decides to continue inspite data is corrupted. */
1750 if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
1751 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1752 TEST_ERROR;
1753
1754 /* Callback decides to fail when data is corrupted. */
1755 if(H5Pset_filter_callback(write_dxpl, filter_cb_fail, NULL) < 0) TEST_ERROR;
1756
1757 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1758 H5E_BEGIN_TRY {
1759 status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1760 } H5E_END_TRY;
1761 if(status >= 0) TEST_ERROR;
1762 } /* end if */
1763 else {
1764 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1765 TEST_ERROR;
1766
1767 /* Check that the values read are the same as the values written */
1768 for(i = 0; i < size[0]; i++)
1769 for(j = 0; j < size[1]; j++)
1770 if(points[i][j] != check[i][j]) {
1771 H5_FAILED();
1772 printf(" Read different values than written.\n");
1773 printf(" At index %lu,%lu\n",
1774 (unsigned long)i, (unsigned long)j);
1775 goto error;
1776 } /* end if */
1777 } /* end else */
1778
1779 PASSED();
1780
1781
1782 /*----------------------------------------------------------------------
1783 * STEP 6: Test partial I/O by writing to and then reading from a
1784 * hyperslab of the dataset. The hyperslab does not line up on chunk
1785 * boundaries (we know that case already works from above tests).
1786 *----------------------------------------------------------------------
1787 */
1788 TESTING(" filters (partial I/O)");
1789
1790 for(i=0; i<(size_t)hs_size[0]; i++) {
1791 for(j=0; j<(size_t)hs_size[1]; j++) {
1792 points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] = (int)HDrandom();
1793 }
1794 }
1795 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size,
1796 NULL) < 0) TEST_ERROR;
1797 /* (Use the "read" DXPL because partial I/O on corrupted data test needs to ignore errors during writing) */
1798 if(H5Dwrite (dataset, H5T_NATIVE_INT, sid, sid, dxpl, points) < 0)
1799 TEST_ERROR;
1800
1801 if(corrupted) {
1802 /* Default behavior is failure when data is corrupted. */
1803 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1804 H5E_BEGIN_TRY {
1805 status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1806 } H5E_END_TRY;
1807 if(status>=0) TEST_ERROR;
1808
1809 /* Callback decides to continue inspite data is corrupted. */
1810 if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
1811 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
1812 TEST_ERROR;
1813
1814 /* Callback decides to fail when data is corrupted. */
1815 if(H5Pset_filter_callback(write_dxpl, filter_cb_fail, NULL) < 0) TEST_ERROR;
1816 /* (Use the "write" DXPL in order to make certain corruption is seen) */
1817 H5E_BEGIN_TRY {
1818 status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
1819 } H5E_END_TRY;
1820 if(status>=0) TEST_ERROR;
1821 } else {
1822 if(H5Dread (dataset, H5T_NATIVE_INT, sid, sid, dxpl, check) < 0)
1823 TEST_ERROR;
1824
1825 /* Check that the values read are the same as the values written */
1826 for(i=0; i<(size_t)hs_size[0]; i++) {
1827 for(j=0; j<(size_t)hs_size[1]; j++) {
1828 if(points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] !=
1829 check[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]) {
1830 H5_FAILED();
1831 fprintf(stderr," Read different values than written.\n");
1832 fprintf(stderr," At index %lu,%lu\n",
1833 (unsigned long)((size_t)hs_offset[0]+i),
1834 (unsigned long)((size_t)hs_offset[1]+j));
1835 fprintf(stderr," At original: %d\n",
1836 (int)points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]);
1837 fprintf(stderr," At returned: %d\n",
1838 (int)check[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]);
1839 goto error;
1840 }
1841 }
1842 }
1843 }
1844
1845 PASSED();
1846
1847 /* Get the storage size of the dataset */
1848 if((*dset_size=H5Dget_storage_size(dataset))==0) goto error;
1849 /* Clean up objects used for this test */
1850 if(H5Dclose (dataset) < 0) goto error;
1851 if(H5Sclose (sid) < 0) goto error;
1852 if(H5Pclose (dxpl) < 0) goto error;
1853 HDfree (tconv_buf);
1854
1855 return(0);
1856
1857 error:
1858 if(tconv_buf)
1859 HDfree (tconv_buf);
1860 return -1;
1861 }
1862
1863 /*-------------------------------------------------------------------------
1864 * Function: test_filter_noencoder
1865 *
1866 * Purpose: Tests filters with no encoder present. Ensures that data
1867 * can still be decoded correctly and that errors are thrown
1868 * when the application tries to write.
1869 *
1870 * Return: Success: 0
1871 * Failure: -1
1872 *
1873 * Programmer: Nat Furrer and James Laird
1874 * Monday, June 7, 2004
1875 *
1876 *-------------------------------------------------------------------------
1877 */
1878 #ifdef H5_HAVE_FILTER_SZIP
1879 static herr_t
test_filter_noencoder(const char * dset_name)1880 test_filter_noencoder(const char *dset_name)
1881 {
1882 hid_t file_id = -1;
1883 hid_t dset_id = -1;
1884 hid_t test_dset_id = -1;
1885 hid_t dcpl_id = -1;
1886 hid_t space_id = -1;
1887 hsize_t dims = 10;
1888 herr_t err;
1889 int test_ints[10] = { 12 };
1890 int read_buf[10];
1891 int i;
1892
1893 /* Make a local copy of the file since this test writes to the data file
1894 from svn. */
1895 if (h5_make_local_copy(NOENCODER_FILENAME, NOENCODER_COPY_FILENAME) < 0)
1896 goto error;
1897
1898 /* Open file */
1899 file_id = H5Fopen(NOENCODER_COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
1900 if(file_id < 0) goto error;
1901
1902 dset_id = H5Dopen2(file_id, dset_name, H5P_DEFAULT);
1903 if(dset_id < 0) goto error;
1904
1905 space_id = H5Screate_simple(1, &dims, NULL);
1906 if(space_id < 0) goto error;
1907
1908 TESTING(" decoding without encoder");
1909
1910 /* Read the dataset and make sure the decoder is working correctly */
1911 err = H5Dread(dset_id, H5T_NATIVE_INT, space_id, space_id, H5P_DEFAULT, read_buf);
1912 if(err < 0) goto error;
1913
1914 for(i = 0; i < 10; i++)
1915 if(read_buf[i] != i)
1916 goto error;
1917
1918 H5Sclose(space_id);
1919
1920 PASSED();
1921
1922 /* Attempt to copy the DCPL and use it to create a new dataset.
1923 * Since the filter does not have an encoder, the creation
1924 * should fail.
1925 */
1926 TESTING(" trying to write without encoder");
1927
1928 dcpl_id = H5Dget_create_plist(dset_id);
1929 if(dcpl_id < 0) goto error;
1930
1931 space_id = H5Screate_simple(1, &dims, NULL);
1932 if(space_id < 0) goto error;
1933
1934 H5E_BEGIN_TRY{
1935 test_dset_id = H5Dcreate2(file_id, NOENCODER_TEST_DATASET, H5T_NATIVE_INT, space_id, H5P_DEFAULT, dcpl_id, H5P_DEFAULT);
1936 }H5E_END_TRY
1937
1938 if(test_dset_id >= 0) goto error;
1939
1940 /* Attempt to extend the dataset. This should fail because
1941 * the dataset has a fill value and is instructed to fill on
1942 * allocation.
1943 */
1944 dims = 20; /* Dataset is originally of size 10 */
1945 H5E_BEGIN_TRY{
1946 err = H5Dset_extent(dset_id, &dims);
1947 }H5E_END_TRY
1948
1949 if(err >= 0) goto error;
1950
1951 /* Attempt to write to the dataset. This should fail because
1952 * the filter does not have an encoder.
1953 */
1954 H5E_BEGIN_TRY{
1955 err = H5Dwrite(dset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, test_ints);
1956 }H5E_END_TRY
1957
1958 if(err >= 0) goto error;
1959
1960 H5Fclose(file_id);
1961 H5Dclose(dset_id);
1962 H5Sclose(space_id);
1963 H5Pclose(dcpl_id);
1964
1965 PASSED();
1966
1967 return 0;
1968
1969 error:
1970 H5_FAILED();
1971 if(dset_id != -1)
1972 H5Dclose(dset_id);
1973 if(test_dset_id != -1)
1974 H5Dclose(test_dset_id);
1975 if(space_id != -1)
1976 H5Sclose(space_id);
1977 if(dcpl_id != -1)
1978 H5Pclose(dcpl_id);
1979 if(file_id != -1)
1980 H5Fclose(file_id);
1981
1982 return -1;
1983 }
1984 #endif /* H5_HAVE_FILTER_SZIP */
1985
1986 /*-------------------------------------------------------------------------
1987 * Function: test_get_filter_info
1988 *
1989 * Purpose: Tests the H5Zget_filter_info function.
1990 *
1991 * Return: Success: 0
1992 * Failure: -1
1993 *
1994 * Programmer: Nat Furrer and James Laird
1995 * Thursday, June 10, 2004
1996 *
1997 *-------------------------------------------------------------------------
1998 */
1999 static herr_t
test_get_filter_info(void)2000 test_get_filter_info(void)
2001 {
2002 unsigned int flags; /* flags returned from H5Zget_filter_info */
2003 herr_t err;
2004
2005 TESTING("H5Zget_filter_info");
2006
2007 /* Verify that each filter is reported as having the right combination
2008 * of encoder and decoder.
2009 */
2010 if(H5Zget_filter_info(H5Z_FILTER_FLETCHER32, &flags) < 0) TEST_ERROR
2011
2012 if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
2013 ((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
2014 TEST_ERROR
2015
2016 if(H5Zget_filter_info(H5Z_FILTER_SHUFFLE, &flags) < 0) TEST_ERROR
2017
2018 if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
2019 ((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
2020 TEST_ERROR
2021
2022 #ifdef H5_HAVE_FILTER_DEFLATE
2023 if(H5Zget_filter_info(H5Z_FILTER_DEFLATE, &flags) < 0) TEST_ERROR
2024
2025 if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
2026 ((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
2027 TEST_ERROR
2028 #endif
2029
2030 #ifdef H5_HAVE_FILTER_SZIP
2031 if(H5Zget_filter_info(H5Z_FILTER_SZIP, &flags) < 0) TEST_ERROR
2032
2033 if(H5Z_SZIP->encoder_present) {
2034 if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
2035 ((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
2036 TEST_ERROR
2037 } /* end if */
2038 else {
2039 if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) != 0) ||
2040 ((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
2041 TEST_ERROR
2042 } /* end else */
2043 #endif /* H5_HAVE_FILTER_SZIP */
2044
2045 /* Verify that get_filter_info throws an error when given a bad filter */
2046 /* (Depends on 1.6 compatibility flag) */
2047 H5E_BEGIN_TRY {
2048 err = H5Zget_filter_info(-1, &flags);
2049 } H5E_END_TRY;
2050 if(err >= 0) TEST_ERROR
2051
2052 PASSED();
2053 return 0;
2054
2055 error:
2056 return -1;
2057 }
2058
2059 /*-------------------------------------------------------------------------
2060 * Function: test_filters
2061 *
2062 * Purpose: Tests dataset filter.
2063 *
2064 * Return: Success: 0
2065 * Failure: -1
2066 *
2067 * Programmer: Robb Matzke
2068 * Wednesday, April 15, 1998
2069 *
2070 *-------------------------------------------------------------------------
2071 */
2072 static herr_t
test_filters(hid_t file,hid_t H5_ATTR_UNUSED fapl)2073 test_filters(hid_t file, hid_t
2074 #ifndef H5_HAVE_FILTER_SZIP
2075 H5_ATTR_UNUSED
2076 #endif /* H5_HAVE_FILTER_SZIP */
2077 fapl)
2078 {
2079 hid_t dc; /* Dataset creation property list ID */
2080 const hsize_t chunk_size[2] = {FILTER_CHUNK_DIM1, FILTER_CHUNK_DIM2}; /* Chunk dimensions */
2081 hsize_t null_size; /* Size of dataset with null filter */
2082
2083 hsize_t fletcher32_size; /* Size of dataset with Fletcher32 checksum */
2084 unsigned data_corrupt[3]; /* position and length of data to be corrupted */
2085
2086 #ifdef H5_HAVE_FILTER_DEFLATE
2087 hsize_t deflate_size; /* Size of dataset with deflate filter */
2088 #endif /* H5_HAVE_FILTER_DEFLATE */
2089
2090 #ifdef H5_HAVE_FILTER_SZIP
2091 hsize_t szip_size; /* Size of dataset with szip filter */
2092 unsigned szip_options_mask=H5_SZIP_NN_OPTION_MASK;
2093 unsigned szip_pixels_per_block=4;
2094 #endif /* H5_HAVE_FILTER_SZIP */
2095
2096 hsize_t shuffle_size; /* Size of dataset with shuffle filter */
2097
2098 #if(defined H5_HAVE_FILTER_DEFLATE | defined H5_HAVE_FILTER_SZIP)
2099 hsize_t combo_size; /* Size of dataset with multiple filters */
2100 #endif /* defined H5_HAVE_FILTER_DEFLATE | defined H5_HAVE_FILTER_SZIP */
2101
2102 /* test the H5Zget_filter_info function */
2103 if(test_get_filter_info() < 0) goto error;
2104
2105 /*----------------------------------------------------------
2106 * STEP 0: Test null I/O filter by itself.
2107 *----------------------------------------------------------
2108 */
2109 puts("Testing 'null' filter");
2110 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2111 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2112 if(H5Zregister (H5Z_BOGUS) < 0) goto error;
2113 if(H5Pset_filter(dc, H5Z_FILTER_BOGUS, 0, (size_t)0, NULL) < 0) goto error;
2114
2115 if(test_filter_internal(file,DSET_BOGUS_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&null_size) < 0) goto error;
2116
2117 /* Clean up objects used for this test */
2118 if(H5Pclose (dc) < 0) goto error;
2119
2120 /*----------------------------------------------------------
2121 * STEP 1: Test Fletcher32 Checksum by itself.
2122 *----------------------------------------------------------
2123 */
2124 puts("Testing Fletcher32 checksum(enabled for read)");
2125 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2126 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2127 if(H5Pset_filter(dc, H5Z_FILTER_FLETCHER32, 0, (size_t)0, NULL) < 0) goto error;
2128
2129 /* Enable checksum during read */
2130 if(test_filter_internal(file,DSET_FLETCHER32_NAME,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&fletcher32_size) < 0) goto error;
2131 if(fletcher32_size<=null_size) {
2132 H5_FAILED();
2133 puts(" Size after checksumming is incorrect.");
2134 goto error;
2135 } /* end if */
2136
2137 /* Disable checksum during read */
2138 puts("Testing Fletcher32 checksum(disabled for read)");
2139 if(test_filter_internal(file,DSET_FLETCHER32_NAME_2,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&fletcher32_size) < 0) goto error;
2140 if(fletcher32_size<=null_size) {
2141 H5_FAILED();
2142 puts(" Size after checksumming is incorrect.");
2143 goto error;
2144 } /* end if */
2145
2146 /* Try to corrupt data and see if checksum fails */
2147 puts("Testing Fletcher32 checksum(when data is corrupted)");
2148 data_corrupt[0] = 52;
2149 data_corrupt[1] = 33;
2150 data_corrupt[2] = 27;
2151
2152 if(H5Zregister (H5Z_CORRUPT) < 0) goto error;
2153 if(H5Pset_filter(dc, H5Z_FILTER_CORRUPT, 0, (size_t)3, data_corrupt) < 0) goto error;
2154 if(test_filter_internal(file,DSET_FLETCHER32_NAME_3,dc,DISABLE_FLETCHER32,DATA_CORRUPTED,&fletcher32_size) < 0) goto error;
2155 if(fletcher32_size<=null_size) {
2156 H5_FAILED();
2157 puts(" Size after checksumming is incorrect.");
2158 goto error;
2159 } /* end if */
2160
2161 /* Clean up objects used for this test */
2162 if(H5Pclose (dc) < 0) goto error;
2163
2164
2165 /*----------------------------------------------------------
2166 * STEP 2: Test deflation by itself.
2167 *----------------------------------------------------------
2168 */
2169 #ifdef H5_HAVE_FILTER_DEFLATE
2170 puts("Testing deflate filter");
2171 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2172 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2173 if(H5Pset_deflate (dc, 6) < 0) goto error;
2174
2175 if(test_filter_internal(file,DSET_DEFLATE_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&deflate_size) < 0) goto error;
2176 /* Clean up objects used for this test */
2177 if(H5Pclose (dc) < 0) goto error;
2178 #else /* H5_HAVE_FILTER_DEFLATE */
2179 TESTING("deflate filter");
2180 SKIPPED();
2181 puts(" Deflate filter not enabled");
2182 #endif /* H5_HAVE_FILTER_DEFLATE */
2183
2184 /*----------------------------------------------------------
2185 * STEP 3: Test szip compression by itself.
2186 *----------------------------------------------------------
2187 */
2188 #ifdef H5_HAVE_FILTER_SZIP
2189 TESTING("szip filter (with encoder)");
2190 if( h5_szip_can_encode() == 1) {
2191 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2192 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2193
2194 puts("");
2195 if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
2196 if(test_filter_internal(file,DSET_SZIP_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&szip_size) < 0) goto error;
2197 if(H5Pclose (dc) < 0) goto error;
2198 } else {
2199 SKIPPED();
2200 }
2201
2202 TESTING("szip filter (without encoder)");
2203
2204 if( h5_szip_can_encode() != 1) {
2205 puts("");
2206 if(test_filter_noencoder(NOENCODER_SZIP_DATASET) < 0) goto error;
2207 } else {
2208 SKIPPED();
2209 }
2210
2211 #else /* H5_HAVE_FILTER_SZIP */
2212 TESTING("szip filter");
2213 SKIPPED();
2214 puts(" Szip filter not enabled");
2215 #endif /* H5_HAVE_FILTER_SZIP */
2216
2217 /*----------------------------------------------------------
2218 * STEP 4: Test shuffling by itself.
2219 *----------------------------------------------------------
2220 */
2221 puts("Testing shuffle filter");
2222 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2223 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2224 if(H5Pset_shuffle (dc) < 0) goto error;
2225
2226 if(test_filter_internal(file,DSET_SHUFFLE_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&shuffle_size) < 0) goto error;
2227 if(shuffle_size!=null_size) {
2228 H5_FAILED();
2229 puts(" Shuffled size not the same as uncompressed size.");
2230 goto error;
2231 } /* end if */
2232
2233 /* Clean up objects used for this test */
2234 if(H5Pclose (dc) < 0) goto error;
2235
2236 /*----------------------------------------------------------
2237 * STEP 5: Test shuffle + deflate + checksum in any order.
2238 *----------------------------------------------------------
2239 */
2240 #ifdef H5_HAVE_FILTER_DEFLATE
2241 puts("Testing shuffle+deflate+checksum filters(checksum first)");
2242 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2243 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2244 if(H5Pset_fletcher32 (dc) < 0) goto error;
2245 if(H5Pset_shuffle (dc) < 0) goto error;
2246 if(H5Pset_deflate (dc, 6) < 0) goto error;
2247
2248 if(test_filter_internal(file,DSET_SHUF_DEF_FLET_NAME,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
2249
2250 /* Clean up objects used for this test */
2251 if(H5Pclose (dc) < 0) goto error;
2252
2253 puts("Testing shuffle+deflate+checksum filters(checksum last)");
2254 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2255 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2256 if(H5Pset_shuffle (dc) < 0) goto error;
2257 if(H5Pset_deflate (dc, 6) < 0) goto error;
2258 if(H5Pset_fletcher32 (dc) < 0) goto error;
2259
2260 if(test_filter_internal(file,DSET_SHUF_DEF_FLET_NAME_2,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
2261
2262 /* Clean up objects used for this test */
2263 if(H5Pclose (dc) < 0) goto error;
2264 #else /* H5_HAVE_FILTER_DEFLATE */
2265 TESTING("shuffle+deflate+fletcher32 filters");
2266 SKIPPED();
2267 puts(" Deflate filter not enabled");
2268 #endif /* H5_HAVE_FILTER_DEFLATE */
2269
2270 /*----------------------------------------------------------
2271 * STEP 6: Test shuffle + szip + checksum in any order.
2272 *----------------------------------------------------------
2273 */
2274 #ifdef H5_HAVE_FILTER_SZIP
2275
2276 TESTING("shuffle+szip+checksum filters(checksum first, with encoder)");
2277 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2278 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2279 if(H5Pset_fletcher32 (dc) < 0) goto error;
2280 if(H5Pset_shuffle (dc) < 0) goto error;
2281
2282 /* Make sure encoding is enabled */
2283 if( h5_szip_can_encode() == 1) {
2284 puts("");
2285 if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
2286 if(test_filter_internal(file,DSET_SHUF_SZIP_FLET_NAME,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
2287 } else {
2288 SKIPPED();
2289 }
2290
2291 TESTING("shuffle+szip+checksum filters(checksum first, without encoder)");
2292
2293 if( h5_szip_can_encode() != 1) {
2294 puts("");
2295 if(test_filter_noencoder(NOENCODER_SZIP_SHUFF_FLETCH_DATASET) < 0) goto error;
2296 } else {
2297 SKIPPED();
2298 }
2299
2300 /* Clean up objects used for this test */
2301 if(H5Pclose (dc) < 0) goto error;
2302
2303 TESTING("shuffle+szip+checksum filters(checksum last, with encoder)");
2304
2305 /* Make sure encoding is enabled */
2306 if( h5_szip_can_encode() == 1) {
2307 puts("");
2308 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2309 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2310 if(H5Pset_shuffle (dc) < 0) goto error;
2311 if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
2312 if(H5Pset_fletcher32 (dc) < 0) goto error;
2313
2314 if(test_filter_internal(file,DSET_SHUF_SZIP_FLET_NAME_2,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
2315
2316 /* Clean up objects used for this test */
2317 if(H5Pclose (dc) < 0) goto error;
2318
2319 } else {
2320 SKIPPED();
2321 }
2322
2323 #else /* H5_HAVE_FILTER_SZIP */
2324 TESTING("shuffle+szip+fletcher32 filters");
2325 SKIPPED();
2326 puts(" szip filter not enabled");
2327 #endif /* H5_HAVE_FILTER_SZIP */
2328 return 0;
2329
2330 error:
2331 return -1;
2332 }
2333
2334
2335 /*-------------------------------------------------------------------------
2336 * Function: test_missing_filter
2337 *
2338 * Purpose: Tests library behavior when filter is missing
2339 *
2340 * Return: Success: 0
2341 * Failure: -1
2342 *
2343 * Programmer: Quincey Koziol
2344 * Thursday, November 14, 2002
2345 *
2346 *-------------------------------------------------------------------------
2347 */
2348 static herr_t
test_missing_filter(hid_t file)2349 test_missing_filter(hid_t file)
2350 {
2351 hid_t fid; /* File ID */
2352 hid_t dsid; /* Dataset ID */
2353 hid_t sid; /* Dataspace ID */
2354 hid_t dcpl; /* Dataspace creation property list ID */
2355 const hsize_t dims[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */
2356 const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */
2357 hsize_t dset_size; /* Dataset size */
2358 size_t i,j; /* Local index variables */
2359 herr_t ret; /* Generic return value */
2360 char testfile[512]=""; /* Buffer to hold name of existing test file */
2361 char *srcdir = HDgetenv("srcdir"); /* The source directory, if we are using the --srcdir configure option */
2362
2363 TESTING("dataset access with missing filter");
2364
2365 /* Unregister the deflate filter */
2366 #ifdef H5_HAVE_FILTER_DEFLATE
2367 /* Verify deflate filter is registered currently */
2368 if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=TRUE) {
2369 H5_FAILED();
2370 printf(" Line %d: Deflate filter not available\n",__LINE__);
2371 goto error;
2372 } /* end if */
2373
2374 /* Unregister deflate filter (use internal function) */
2375 if(H5Z_unregister(H5Z_FILTER_DEFLATE) < 0) {
2376 H5_FAILED();
2377 printf(" Line %d: Can't unregister deflate filter\n",__LINE__);
2378 goto error;
2379 } /* end if */
2380 #endif /* H5_HAVE_FILTER_DEFLATE */
2381 /* Verify deflate filter is not registered currently */
2382 if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=FALSE) {
2383 H5_FAILED();
2384 printf(" Line %d: Deflate filter available\n",__LINE__);
2385 goto error;
2386 } /* end if */
2387
2388 /* Create dcpl with deflate filter */
2389 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
2390 H5_FAILED();
2391 printf(" Line %d: Can't create dcpl\n",__LINE__);
2392 goto error;
2393 } /* end if */
2394 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) {
2395 H5_FAILED();
2396 printf(" Line %d: Can't set chunk sizes\n",__LINE__);
2397 goto error;
2398 } /* end if */
2399 if(H5Pset_deflate(dcpl, 9) < 0) {
2400 H5_FAILED();
2401 printf(" Line %d: Can't set deflate filter\n",__LINE__);
2402 goto error;
2403 } /* end if */
2404
2405 /* Check if all the filters are available */
2406 ret=H5Pall_filters_avail(dcpl);
2407 if(ret<0) {
2408 H5_FAILED();
2409 printf(" Line %d: Can't check filter availability\n",__LINE__);
2410 goto error;
2411 } /* end if */
2412 if(ret!=FALSE) {
2413 H5_FAILED();
2414 printf(" Line %d: Filter shouldn't be available\n",__LINE__);
2415 goto error;
2416 } /* end if */
2417
2418 /* Create the data space */
2419 if((sid = H5Screate_simple(2, dims, NULL)) < 0) {
2420 H5_FAILED();
2421 printf(" Line %d: Can't open dataspace\n",__LINE__);
2422 goto error;
2423 } /* end if */
2424
2425 /* Create new dataset */
2426 if((dsid = H5Dcreate2(file, DSET_MISSING_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) {
2427 H5_FAILED();
2428 printf(" Line %d: Can't create dataset\n",__LINE__);
2429 goto error;
2430 } /* end if */
2431
2432 /* Write data */
2433 if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
2434 H5_FAILED();
2435 printf(" Line %d: Error writing dataset data\n",__LINE__);
2436 goto error;
2437 } /* end if */
2438
2439 /* Flush the file (to clear the cache) */
2440 if(H5Fflush(file, H5F_SCOPE_GLOBAL) < 0) {
2441 H5_FAILED();
2442 printf(" Line %d: Error flushing file\n",__LINE__);
2443 goto error;
2444 } /* end if */
2445
2446 /* Query the dataset's size on disk */
2447 if(0 == (dset_size = H5Dget_storage_size(dsid))) {
2448 H5_FAILED();
2449 printf(" Line %d: Error querying dataset size, dset_size=%lu\n",__LINE__,(unsigned long)dset_size);
2450 goto error;
2451 } /* end if */
2452
2453 /* Verify that the size indicates data is uncompressed */
2454 /* (i.e. the deflation filter we asked for was silently ignored) */
2455 if((H5Tget_size(H5T_NATIVE_INT) * DSET_DIM1 * DSET_DIM2) != dset_size) {
2456 H5_FAILED();
2457 printf(" Line %d: Incorrect dataset size: %lu\n",__LINE__,(unsigned long)dset_size);
2458 goto error;
2459 } /* end if */
2460
2461 /* Read data */
2462 if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
2463 H5_FAILED();
2464 printf(" Line %d: Error reading dataset data\n",__LINE__);
2465 goto error;
2466 } /* end if */
2467
2468 /* Compare data */
2469 /* Check that the values read are the same as the values written */
2470 for(i=0; i<(size_t)dims[0]; i++) {
2471 for(j=0; j<(size_t)dims[1]; j++) {
2472 if(points[i][j] != check[i][j]) {
2473 H5_FAILED();
2474 printf(" Line %d: Read different values than written.\n",__LINE__);
2475 printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
2476 printf(" At original: %d\n",points[i][j]);
2477 printf(" At returned: %d\n",check[i][j]);
2478 goto error;
2479 } /* end if */
2480 } /* end for */
2481 } /* end for */
2482
2483 /* Close dataset */
2484 if(H5Dclose(dsid) < 0) {
2485 H5_FAILED();
2486 printf(" Line %d: Can't close dataset\n",__LINE__);
2487 goto error;
2488 } /* end if */
2489
2490 /* Close dataspace */
2491 if(H5Sclose(sid) < 0) {
2492 H5_FAILED();
2493 printf(" Line %d: Can't close dataspace\n",__LINE__);
2494 goto error;
2495 } /* end if */
2496
2497 /* Close dataset creation property list */
2498 if(H5Pclose(dcpl) < 0) {
2499 H5_FAILED();
2500 printf(" Line %d: Can't close dcpl\n",__LINE__);
2501 goto error;
2502 } /* end if */
2503
2504
2505 /* Try reading existing dataset with deflate filter */
2506
2507 /* Compose the name of the file to open, using the srcdir, if appropriate */
2508 if(srcdir && ((HDstrlen(srcdir) + HDstrlen(FILE_DEFLATE_NAME) + 1) < sizeof(testfile))){
2509 HDstrcpy(testfile, srcdir);
2510 HDstrcat(testfile, "/");
2511 }
2512 HDstrcat(testfile, FILE_DEFLATE_NAME);
2513
2514 /* Open existing file */
2515 if((fid = H5Fopen(testfile, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) {
2516 H5_FAILED();
2517 printf(" Line %d: Can't open existing deflated file\n", __LINE__);
2518 goto error;
2519 } /* end if */
2520
2521 /* Open dataset */
2522 if((dsid = H5Dopen2(fid, "Dataset1", H5P_DEFAULT)) < 0) {
2523 H5_FAILED();
2524 printf(" Line %d: Can't open dataset\n", __LINE__);
2525 goto error;
2526 } /* end if */
2527
2528 /* Read data (should fail, since deflate filter is missing) */
2529 H5E_BEGIN_TRY {
2530 ret = H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check);
2531 } H5E_END_TRY;
2532 if(ret>=0) {
2533 H5_FAILED();
2534 printf(" Line %d: Should not be able to read dataset data\n", __LINE__);
2535 goto error;
2536 } /* end if */
2537
2538 /* Close dataset */
2539 if(H5Dclose(dsid) < 0) {
2540 H5_FAILED();
2541 printf(" Line %d: Can't close dataset\n", __LINE__);
2542 goto error;
2543 } /* end if */
2544
2545 /* Close existing file */
2546 if(H5Fclose(fid) < 0) {
2547 H5_FAILED();
2548 printf(" Line %d: Can't close file\n", __LINE__);
2549 goto error;
2550 } /* end if */
2551
2552 /* Re-register the deflate filter */
2553 /* Verify deflate filter is not registered currently */
2554 if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=FALSE) {
2555 H5_FAILED();
2556 printf(" Line %d: Deflate filter available\n",__LINE__);
2557 goto error;
2558 } /* end if */
2559 #ifdef H5_HAVE_FILTER_DEFLATE
2560 /* Register deflate filter (use internal function to avoid range checks) */
2561 if(H5Z_register(H5Z_DEFLATE) < 0) {
2562 H5_FAILED();
2563 printf(" Line %d: Can't unregister deflate filter\n",__LINE__);
2564 goto error;
2565 } /* end if */
2566
2567 /* Verify deflate filter is registered currently */
2568 if(H5Zfilter_avail(H5Z_FILTER_DEFLATE)!=TRUE) {
2569 H5_FAILED();
2570 printf(" Line %d: Deflate filter not available\n",__LINE__);
2571 goto error;
2572 } /* end if */
2573 #endif /* H5_HAVE_FILTER_DEFLATE */
2574
2575 PASSED();
2576 return 0;
2577
2578 error:
2579 return -1;
2580 }
2581
2582
2583 /*-------------------------------------------------------------------------
2584 * Function: test_onebyte_shuffle
2585 *
2586 * Purpose: Tests the 8-bit array with shuffling algorithm.
2587 * The shuffled array should be the same result as
2588 * that before the shuffling.
2589 *
2590 * Return: Success: 0
2591 *
2592 * Failure: -1
2593 *
2594 * Programmer: Kent Yang
2595 * Wednesday, Nov. 13th, 2002
2596 *
2597 *-------------------------------------------------------------------------
2598 */
2599 static herr_t
test_onebyte_shuffle(hid_t file)2600 test_onebyte_shuffle(hid_t file)
2601 {
2602 hid_t dataset, space,dc;
2603 const hsize_t size[2] = {10, 20};
2604 const hsize_t chunk_size[2] = {10, 20};
2605 unsigned char orig_data[10][20];
2606 unsigned char new_data[10][20];
2607 size_t i, j;
2608
2609 TESTING("8-bit shuffling (setup)");
2610
2611 /* Create the data space */
2612 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
2613
2614 /* Use shuffling algorithm with 8-bit */
2615 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2616 if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
2617 if(H5Pset_shuffle (dc) < 0) goto error;
2618
2619 /* Create the dataset */
2620 if((dataset = H5Dcreate2(file, DSET_ONEBYTE_SHUF_NAME, H5T_NATIVE_UCHAR,
2621 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
2622
2623 for(i= 0;i< 10; i++)
2624 for(j = 0; j < 20; j++)
2625 orig_data[i][j] = (unsigned char)HDrandom();
2626
2627 PASSED();
2628
2629 /*----------------------------------------------------------------------
2630 * STEP 1: Test shuffling by setting up a chunked dataset and writing
2631 * to it.
2632 *----------------------------------------------------------------------
2633 */
2634 TESTING("8-bit shuffling (write)");
2635
2636 if(H5Dwrite(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT,
2637 orig_data) < 0)
2638 goto error;
2639
2640 PASSED();
2641
2642 /*----------------------------------------------------------------------
2643 * STEP 2: Try to read the data we just wrote.
2644 *----------------------------------------------------------------------
2645 */
2646 TESTING("8-bit shuffling (read)");
2647
2648 /* Read the dataset back */
2649 if(H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT,
2650 new_data) < 0)
2651 goto error;
2652
2653 /* Check that the values read are the same as the values written */
2654 for(i=0; i<(size_t)size[0]; i++) {
2655 for(j=0; j<(size_t)size[1]; j++) {
2656 if(new_data[i][j] != orig_data[i][j]) {
2657 H5_FAILED();
2658 printf(" Read different values than written.\n");
2659 printf(" At index %lu,%lu\n",
2660 (unsigned long)i, (unsigned long)j);
2661 goto error;
2662 }
2663 }
2664 }
2665
2666 /*----------------------------------------------------------------------
2667 * Cleanup
2668 *----------------------------------------------------------------------
2669 */
2670 if(H5Pclose (dc) < 0) goto error;
2671 if(H5Dclose(dataset) < 0) goto error;
2672
2673 PASSED();
2674
2675 return 0;
2676
2677 error:
2678 return -1;
2679 }
2680
2681
2682 /*-------------------------------------------------------------------------
2683 * Function: test_nbit_int
2684 *
2685 * Purpose: Tests the integer datatype for nbit filter
2686 *
2687 * Return: Success: 0
2688 *
2689 * Failure: -1
2690 *
2691 * Programmer: Xiaowen Wu
2692 * Wednesday, Dec. 23th, 2004
2693 *
2694 *-------------------------------------------------------------------------
2695 */
2696 static herr_t
test_nbit_int(hid_t file)2697 test_nbit_int(hid_t file)
2698 {
2699 hid_t dataset, datatype, mem_datatype, space, dc;
2700 hsize_t size[2] = {2, 5};
2701 hsize_t chunk_size[2] = {2,5};
2702 int orig_data[2][5];
2703 int new_data[2][5];
2704 unsigned int mask;
2705 size_t precision, offset;
2706 size_t i, j;
2707
2708 puts("Testing nbit filter");
2709 TESTING(" nbit int (setup)");
2710
2711 /* Define dataset datatype (integer), and set precision, offset */
2712 datatype = H5Tcopy(H5T_NATIVE_INT);
2713 precision = 17; /* precision includes sign bit */
2714 if(H5Tset_precision(datatype,precision) < 0) goto error;
2715 offset = 4;
2716 if(H5Tset_offset(datatype,offset) < 0) goto error;
2717
2718 /* Copy to memory datatype before setting order */
2719 mem_datatype = H5Tcopy(datatype);
2720
2721 /* Set order of dataset datatype */
2722 if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) goto error;
2723
2724 /* Create the data space */
2725 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
2726
2727 /* Use nbit filter */
2728 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2729 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
2730 if(H5Pset_nbit(dc) < 0) goto error;
2731
2732 /* Create the dataset */
2733 if((dataset = H5Dcreate2(file, DSET_NBIT_INT_NAME, datatype,
2734 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
2735
2736 /* Initialize data, assuming size of long long >= size of int */
2737 for(i= 0;i< (size_t)size[0]; i++)
2738 for(j = 0; j < (size_t)size[1]; j++) {
2739 orig_data[i][j] = (int)(((long long)HDrandom() %
2740 (long long)HDpow(2.0f, (double)(precision - 1))) << offset);
2741
2742 /* even-numbered values are negtive */
2743 if((i*size[1]+j+1)%2 == 0)
2744 orig_data[i][j] = -orig_data[i][j];
2745 }
2746
2747 PASSED();
2748
2749 /*----------------------------------------------------------------------
2750 * STEP 1: Test nbit by setting up a chunked dataset and writing
2751 * to it.
2752 *----------------------------------------------------------------------
2753 */
2754 TESTING(" nbit int (write)");
2755
2756 if(H5Dwrite(dataset, mem_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
2757 orig_data) < 0)
2758 goto error;
2759 PASSED();
2760
2761 /*----------------------------------------------------------------------
2762 * STEP 2: Try to read the data we just wrote.
2763 *----------------------------------------------------------------------
2764 */
2765 TESTING(" nbit int (read)");
2766
2767 /* Read the dataset back */
2768 if(H5Dread(dataset, mem_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
2769 new_data) < 0)
2770 goto error;
2771
2772 /* Check that the values read are the same as the values written
2773 * Use mask for checking the significant bits, ignoring the padding bits
2774 */
2775 mask = ~((unsigned)~0 << (precision + offset)) & ((unsigned)~0 << offset);
2776 for(i=0; i<(size_t)size[0]; i++) {
2777 for(j=0; j<(size_t)size[1]; j++) {
2778 if((new_data[i][j] & mask) != (orig_data[i][j] & mask)) {
2779 H5_FAILED();
2780 printf(" Read different values than written.\n");
2781 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
2782 goto error;
2783 }
2784 }
2785 }
2786
2787 /*----------------------------------------------------------------------
2788 * Cleanup
2789 *----------------------------------------------------------------------
2790 */
2791 if(H5Tclose(datatype) < 0) goto error;
2792 if(H5Tclose(mem_datatype) < 0) goto error;
2793 if(H5Pclose(dc) < 0) goto error;
2794 if(H5Sclose(space) < 0) goto error;
2795 if(H5Dclose(dataset) < 0) goto error;
2796
2797 PASSED();
2798
2799 return 0;
2800 error:
2801 return -1;
2802 }
2803
2804
2805 /*-------------------------------------------------------------------------
2806 * Function: test_nbit_float
2807 *
2808 * Purpose: Tests the float datatype of nbit filter
2809 *
2810 * Return: Success: 0
2811 *
2812 * Failure: -1
2813 *
2814 * Programmer: Xiaowen Wu
2815 * Friday, Jan. 21th, 2005
2816 *
2817 *-------------------------------------------------------------------------
2818 */
2819 static herr_t
test_nbit_float(hid_t file)2820 test_nbit_float(hid_t file)
2821 {
2822 hid_t dataset, datatype, space, dc;
2823 const hsize_t size[2] = {2, 5};
2824 const hsize_t chunk_size[2] = {2, 5};
2825 /* orig_data[] are initialized to be within the range that can be represented by
2826 * dataset datatype (no precision loss during datatype conversion)
2827 */
2828 float orig_data[2][5] = {{188384.0f, 19.103516f, -1.0831790e9f, -84.242188f, 5.2045898f},
2829 {-49140.0f, 2350.25f, -3.2110596e-1f, 6.4998865e-5f, -0.0f}};
2830 float new_data[2][5];
2831 size_t precision, offset;
2832 size_t i, j;
2833
2834 TESTING(" nbit float (setup)");
2835
2836 /* Define user-defined single-precision floating-point type for dataset */
2837 datatype = H5Tcopy(H5T_IEEE_F32BE);
2838 if(H5Tset_fields(datatype, (size_t)26, (size_t)20, (size_t)6, (size_t)7, (size_t)13) < 0) goto error;
2839 offset = 7;
2840 if(H5Tset_offset(datatype,offset) < 0) goto error;
2841 precision = 20;
2842 if(H5Tset_precision(datatype,precision) < 0) goto error;
2843 if(H5Tset_size(datatype, (size_t)4) < 0) goto error;
2844 if(H5Tset_ebias(datatype, (size_t)31) < 0) goto error;
2845
2846 /* Create the data space */
2847 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
2848
2849 /* Use nbit filter */
2850 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2851 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
2852 if(H5Pset_nbit(dc) < 0) goto error;
2853
2854 /* Create the dataset */
2855 if((dataset = H5Dcreate2(file, DSET_NBIT_FLOAT_NAME, datatype,
2856 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
2857 PASSED();
2858
2859 /*----------------------------------------------------------------------
2860 * STEP 1: Test nbit by setting up a chunked dataset and writing
2861 * to it.
2862 *----------------------------------------------------------------------
2863 */
2864 TESTING(" nbit float (write)");
2865
2866 if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
2867 orig_data) < 0)
2868 goto error;
2869
2870 PASSED();
2871
2872 /*----------------------------------------------------------------------
2873 * STEP 2: Try to read the data we just wrote.
2874 *----------------------------------------------------------------------
2875 */
2876 TESTING(" nbit float (read)");
2877
2878 /* Read the dataset back */
2879 if(H5Dread(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
2880 new_data) < 0)
2881 goto error;
2882
2883 /* Check that the values read are the same as the values written
2884 * Assume size of int = size of float
2885 */
2886 for(i=0; i<(size_t)size[0]; i++) {
2887 for(j=0; j<(size_t)size[1]; j++) {
2888 if(!(orig_data[i][j]==orig_data[i][j])) continue; /* skip if value is NaN */
2889 if(new_data[i][j] != orig_data[i][j]) {
2890 H5_FAILED();
2891 printf(" Read different values than written.\n");
2892 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
2893 goto error;
2894 }
2895 }
2896 }
2897
2898 /*----------------------------------------------------------------------
2899 * Cleanup
2900 *----------------------------------------------------------------------
2901 */
2902 if(H5Tclose(datatype) < 0) goto error;
2903 if(H5Pclose(dc) < 0) goto error;
2904 if(H5Sclose(space) < 0) goto error;
2905 if(H5Dclose(dataset) < 0) goto error;
2906
2907 PASSED();
2908
2909 return 0;
2910
2911 error:
2912 return -1;
2913 }
2914
2915
2916 /*-------------------------------------------------------------------------
2917 * Function: test_nbit_double
2918 *
2919 * Purpose: Tests the double datatype of nbit filter
2920 *
2921 * Return: Success: 0
2922 *
2923 * Failure: -1
2924 *
2925 * Programmer: Xiaowen Wu
2926 * Wednesday, Jan. 26th, 2005
2927 *
2928 *-------------------------------------------------------------------------
2929 */
2930 static herr_t
test_nbit_double(hid_t file)2931 test_nbit_double(hid_t file)
2932 {
2933 /* assume unsigned int and float has the same number of bytes */
2934 hid_t dataset, datatype, space, dc;
2935 const hsize_t size[2] = {2, 5};
2936 const hsize_t chunk_size[2] = {2, 5};
2937 /* orig_data[] are initialized to be within the range that can be represented by
2938 * dataset datatype (no precision loss during datatype conversion)
2939 */
2940 double orig_data[2][5] = {
2941 {
2942 H5_DOUBLE(1.6081706885101836e+60),
2943 H5_DOUBLE(-255.32099170994480),
2944 H5_DOUBLE(1.2677579992621376e-61),
2945 H5_DOUBLE(64568.289448797700),
2946 H5_DOUBLE(-1.0619721778839084e-75)
2947 },
2948 {
2949 H5_DOUBLE(2.1499497833454840e+56),
2950 H5_DOUBLE(6.6562295504670740e-3),
2951 H5_DOUBLE(-1.5747263393432150),
2952 H5_DOUBLE(1.0711093225222612),
2953 H5_DOUBLE(-9.8971679387636870e-1)
2954 }};
2955 double new_data[2][5];
2956 size_t precision, offset;
2957 size_t i, j;
2958
2959 TESTING(" nbit double (setup)");
2960
2961 /* Define user-defined doule-precision floating-point type for dataset */
2962 datatype = H5Tcopy(H5T_IEEE_F64BE);
2963 if(H5Tset_fields(datatype, (size_t)55, (size_t)46, (size_t)9, (size_t)5, (size_t)41) < 0) goto error;
2964 offset = 5;
2965 if(H5Tset_offset(datatype,offset) < 0) goto error;
2966 precision = 51;
2967 if(H5Tset_precision(datatype,precision) < 0) goto error;
2968 if(H5Tset_size(datatype, (size_t)8) < 0) goto error;
2969 if(H5Tset_ebias(datatype, (size_t)255) < 0) goto error;
2970
2971 /* Create the data space */
2972 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
2973
2974 /* Use nbit filter */
2975 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
2976 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
2977 if(H5Pset_nbit(dc) < 0) goto error;
2978
2979 /* Create the dataset */
2980 if((dataset = H5Dcreate2(file, DSET_NBIT_DOUBLE_NAME, datatype,
2981 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
2982
2983 PASSED();
2984
2985 /*----------------------------------------------------------------------
2986 * STEP 1: Test nbit by setting up a chunked dataset and writing
2987 * to it.
2988 *----------------------------------------------------------------------
2989 */
2990 TESTING(" nbit double (write)");
2991
2992 if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
2993 orig_data) < 0)
2994 goto error;
2995 PASSED();
2996
2997 /*----------------------------------------------------------------------
2998 * STEP 2: Try to read the data we just wrote.
2999 *----------------------------------------------------------------------
3000 */
3001 TESTING(" nbit double (read)");
3002
3003 /* Read the dataset back */
3004 if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3005 new_data) < 0)
3006 goto error;
3007
3008 /* Check that the values read are the same as the values written
3009 * Assume size of long long = size of double
3010 */
3011 for(i=0; i<(size_t)size[0]; i++) {
3012 for(j=0; j<(size_t)size[1]; j++) {
3013 if(!(orig_data[i][j]==orig_data[i][j])) continue; /* skip if value is NaN */
3014 if(new_data[i][j] != orig_data[i][j]) {
3015 H5_FAILED();
3016 printf(" Read different values than written.\n");
3017 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
3018 goto error;
3019 }
3020 }
3021 }
3022
3023 /*----------------------------------------------------------------------
3024 * Cleanup
3025 *----------------------------------------------------------------------
3026 */
3027 if(H5Tclose(datatype) < 0) goto error;
3028 if(H5Pclose(dc) < 0) goto error;
3029 if(H5Sclose(space) < 0) goto error;
3030 if(H5Dclose(dataset) < 0) goto error;
3031
3032 PASSED();
3033
3034 return 0;
3035
3036 error:
3037 return -1;
3038 }
3039
3040
3041 /*-------------------------------------------------------------------------
3042 * Function: test_nbit_array
3043 *
3044 * Purpose: Tests the simple version array datatype for nbit filter
3045 *
3046 * Return: Success: 0
3047 *
3048 * Failure: -1
3049 *
3050 * Programmer: Xiaowen Wu
3051 * Tuesday, Jan. 18th, 2005
3052 *
3053 *-------------------------------------------------------------------------
3054 */
3055 static herr_t
test_nbit_array(hid_t file)3056 test_nbit_array(hid_t file)
3057 {
3058 hid_t dataset, base_datatype, array_datatype, space, dc;
3059 hid_t mem_base_datatype, mem_array_datatype;
3060 const hsize_t size[2] = {2, 5};
3061 const hsize_t adims[2] = {3, 2};
3062 const hsize_t chunk_size[2] = {2,5};
3063 unsigned int orig_data[2][5][3][2];
3064 unsigned int new_data[2][5][3][2];
3065 size_t precision, offset;
3066 size_t i, j, m, n;
3067
3068 TESTING(" nbit array (setup)");
3069
3070 /* Define dataset array datatype's base datatype and set precision, offset */
3071 base_datatype = H5Tcopy(H5T_NATIVE_UINT);
3072 precision = 22;
3073 if(H5Tset_precision(base_datatype,precision) < 0) goto error;
3074 offset = 7;
3075 if(H5Tset_offset(base_datatype,offset) < 0) goto error;
3076
3077 /* Copy to memory array datatype's base datatype before setting order */
3078 mem_base_datatype = H5Tcopy(base_datatype);
3079
3080 /* Set order of dataset array datatype's base datatype */
3081 if(H5Tset_order(base_datatype, H5T_ORDER_BE) < 0) goto error;
3082
3083 /* Create dataset array datatype */
3084 array_datatype = H5Tarray_create2(base_datatype, 2, adims);
3085
3086 /* Create memory array datatype */
3087 mem_array_datatype = H5Tarray_create2(mem_base_datatype, 2, adims);
3088
3089 /* Create the data space */
3090 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
3091
3092 /* Use nbit filter */
3093 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
3094 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
3095 if(H5Pset_nbit(dc) < 0) goto error;
3096
3097 /* Create the dataset */
3098 if((dataset = H5Dcreate2(file, DSET_NBIT_ARRAY_NAME, array_datatype,
3099 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
3100
3101 /* Initialize data, assuming size of long long >= size of unsigned int */
3102 for(i= 0;i< (size_t)size[0]; i++)
3103 for(j = 0; j < (size_t)size[1]; j++)
3104 for(m = 0; m < (size_t)adims[0]; m++)
3105 for(n = 0; n < (size_t)adims[1]; n++)
3106 orig_data[i][j][m][n] = (unsigned int)(((long long)HDrandom() %
3107 (long long)HDpow(2.0F, (double)precision)) << offset);
3108 PASSED();
3109
3110 /*----------------------------------------------------------------------
3111 * STEP 1: Test nbit by setting up a chunked dataset and writing
3112 * to it.
3113 *----------------------------------------------------------------------
3114 */
3115 TESTING(" nbit array (write)");
3116
3117 if(H5Dwrite(dataset, mem_array_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3118 orig_data) < 0)
3119 goto error;
3120
3121 PASSED();
3122
3123 /*----------------------------------------------------------------------
3124 * STEP 2: Try to read the data we just wrote.
3125 *----------------------------------------------------------------------
3126 */
3127 TESTING(" nbit array (read)");
3128
3129 /* Read the dataset back */
3130 if(H5Dread(dataset, mem_array_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3131 new_data) < 0)
3132 goto error;
3133
3134 /* Check that the values read are the same as the values written
3135 */
3136 for(i=0; i<(size_t)size[0]; i++)
3137 for(j=0; j<(size_t)size[1]; j++)
3138 for(m = 0; m < (size_t)adims[0]; m++)
3139 for(n = 0; n < (size_t)adims[1]; n++) {
3140 if(new_data[i][j][m][n]!= orig_data[i][j][m][n]) {
3141 H5_FAILED();
3142 printf(" Read different values than written.\n");
3143 printf(" At index %lu,%lu,%lu,%lu\n",
3144 (unsigned long)i, (unsigned long)j, (unsigned long)m, (unsigned long)n);
3145 goto error;
3146 }
3147 }
3148
3149 /*----------------------------------------------------------------------
3150 * Cleanup
3151 *----------------------------------------------------------------------
3152 */
3153 if(H5Tclose(array_datatype) < 0) goto error;
3154 if(H5Tclose(base_datatype) < 0) goto error;
3155 if(H5Tclose(mem_array_datatype) < 0) goto error;
3156 if(H5Tclose(mem_base_datatype) < 0) goto error;
3157 if(H5Pclose(dc) < 0) goto error;
3158 if(H5Sclose(space) < 0) goto error;
3159 if(H5Dclose(dataset) < 0) goto error;
3160
3161 PASSED();
3162
3163 return 0;
3164
3165 error:
3166 return -1;
3167 }
3168
3169
3170 /*-------------------------------------------------------------------------
3171 * Function: test_nbit_compound
3172 *
3173 * Purpose: Tests a simple version of compound datatype of nbit filter
3174 *
3175 * Return: Success: 0
3176 *
3177 * Failure: -1
3178 *
3179 * Programmer: Xiaowen Wu
3180 * Tuesday, Jan. 18th, 2005
3181 *
3182 *-------------------------------------------------------------------------
3183 */
3184 static herr_t
test_nbit_compound(hid_t file)3185 test_nbit_compound(hid_t file)
3186 {
3187 typedef struct { /* Struct with atomic fields */
3188 int i;
3189 char c;
3190 short s;
3191 float f;
3192 } atomic;
3193 hid_t i_tid, c_tid, s_tid, f_tid;
3194 hid_t cmpd_tid; /* atomic compound datatype */
3195 hid_t mem_cmpd_tid; /* memory atomic compound datatype */
3196 size_t precision[3] = {15, 7, 10};
3197 size_t offset[3] = {9, 0, 3};
3198 hid_t dataset, space, dc;
3199 const hsize_t size[2] = {2, 5};
3200 const hsize_t chunk_size[2] = {2, 5};
3201 const float float_val[2][5] = {{188384.0F, 19.103516F, -1.0831790e9F, -84.242188F, 5.2045898F},
3202 {-49140.0F, 2350.25F, -3.2110596e-1F, 6.4998865e-5F, -0.0F}};
3203 atomic orig_data[2][5];
3204 atomic new_data[2][5];
3205 unsigned int i_mask, s_mask, c_mask;
3206 size_t i, j;
3207
3208
3209 TESTING(" nbit compound (setup)");
3210
3211 /* Define datatypes of members of compound datatype */
3212 i_tid=H5Tcopy(H5T_NATIVE_INT);
3213 c_tid=H5Tcopy(H5T_NATIVE_CHAR);
3214 s_tid=H5Tcopy(H5T_NATIVE_SHORT);
3215 f_tid=H5Tcopy(H5T_IEEE_F32BE);
3216
3217 /* Set precision and offset etc. */
3218 if(H5Tset_precision(i_tid,precision[0]) < 0) goto error;
3219 if(H5Tset_offset(i_tid,offset[0]) < 0) goto error;
3220
3221 if(H5Tset_precision(c_tid,precision[1]) < 0) goto error;
3222 if(H5Tset_offset(c_tid,offset[1]) < 0) goto error;
3223
3224 if(H5Tset_precision(s_tid,precision[2]) < 0) goto error;
3225 if(H5Tset_offset(s_tid,offset[2]) < 0) goto error;
3226
3227 if(H5Tset_fields(f_tid, (size_t)26, (size_t)20, (size_t)6, (size_t)7, (size_t)13) < 0) goto error;
3228 if(H5Tset_offset(f_tid, (size_t)7) < 0) goto error;
3229 if(H5Tset_precision(f_tid, (size_t)20) < 0) goto error;
3230 if(H5Tset_size(f_tid, (size_t)4) < 0) goto error;
3231 if(H5Tset_ebias(f_tid, (size_t)31) < 0) goto error;
3232
3233 /* Create a memory compound datatype before setting the order */
3234 mem_cmpd_tid = H5Tcreate(H5T_COMPOUND, sizeof(atomic));
3235 if(H5Tinsert(mem_cmpd_tid, "i", HOFFSET(atomic, i), i_tid) < 0) goto error;
3236 if(H5Tinsert(mem_cmpd_tid, "c", HOFFSET(atomic, c), c_tid) < 0) goto error;
3237 if(H5Tinsert(mem_cmpd_tid, "s", HOFFSET(atomic, s), s_tid) < 0) goto error;
3238 if(H5Tinsert(mem_cmpd_tid, "f", HOFFSET(atomic, f), H5T_NATIVE_FLOAT) < 0) goto error;
3239
3240 /* Create a dataset compound datatype and insert some atomic types */
3241 cmpd_tid = H5Tcreate(H5T_COMPOUND, sizeof(atomic));
3242 if(H5Tinsert(cmpd_tid, "i", HOFFSET(atomic, i), i_tid) < 0) goto error;
3243 if(H5Tinsert(cmpd_tid, "c", HOFFSET(atomic, c), c_tid) < 0) goto error;
3244 if(H5Tinsert(cmpd_tid, "s", HOFFSET(atomic, s), s_tid) < 0) goto error;
3245 if(H5Tinsert(cmpd_tid, "f", HOFFSET(atomic, f), f_tid) < 0) goto error;
3246
3247 /* Set order of dataset compound datatype */
3248 if(H5Tset_order(cmpd_tid, H5T_ORDER_BE) < 0) goto error;
3249
3250 /* Create the data space */
3251 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
3252
3253 /* Use nbit filter */
3254 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
3255 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
3256 if(H5Pset_nbit(dc) < 0) goto error;
3257
3258 /* Create the dataset */
3259 if((dataset = H5Dcreate2(file, DSET_NBIT_COMPOUND_NAME, cmpd_tid,
3260 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
3261
3262 /* Initialize data, assuming size of long long >= size of member datatypes */
3263 for(i= 0;i< (size_t)size[0]; i++)
3264 for(j = 0; j < (size_t)size[1]; j++) {
3265 orig_data[i][j].i = (int)(((long long)HDrandom() %
3266 (long long)HDpow(2.0F, (double)(precision[0]-1))) << offset[0]);
3267 orig_data[i][j].c = (char)(((long long)HDrandom() %
3268 (long long)HDpow(2.0F, (double)(precision[1]-1))) << offset[1]);
3269 orig_data[i][j].s = (short)(((long long)HDrandom() %
3270 (long long)HDpow(2.0F, (double)(precision[2]-1))) << offset[2]);
3271 orig_data[i][j].f = float_val[i][j];
3272
3273 /* some even-numbered integer values are negtive */
3274 if((i*size[1]+j+1)%2 == 0) {
3275 orig_data[i][j].i = -orig_data[i][j].i;
3276 orig_data[i][j].s = (short)-orig_data[i][j].s;
3277 }
3278 }
3279
3280 PASSED();
3281
3282 /*----------------------------------------------------------------------
3283 * STEP 1: Test nbit by setting up a chunked dataset and writing
3284 * to it.
3285 *----------------------------------------------------------------------
3286 */
3287 TESTING(" nbit compound (write)");
3288
3289 if(H5Dwrite(dataset, mem_cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3290 orig_data) < 0)
3291 goto error;
3292 PASSED();
3293
3294 /*----------------------------------------------------------------------
3295 * STEP 2: Try to read the data we just wrote.
3296 *----------------------------------------------------------------------
3297 */
3298 TESTING(" nbit compound (read)");
3299
3300 /* Read the dataset back */
3301 if(H5Dread(dataset, mem_cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3302 new_data) < 0)
3303 goto error;
3304
3305 /* Check that the values read are the same as the values written
3306 * Use mask for checking the significant bits, ignoring the padding bits
3307 */
3308 i_mask = ~((unsigned)~0 << (precision[0] + offset[0])) & ((unsigned)~0 << offset[0]);
3309 c_mask = ~((unsigned)~0 << (precision[1] + offset[1])) & ((unsigned)~0 << offset[1]);
3310 s_mask = ~((unsigned)~0 << (precision[2] + offset[2])) & ((unsigned)~0 << offset[2]);
3311 for(i=0; i<size[0]; i++) {
3312 for(j=0; j<size[1]; j++) {
3313 if((new_data[i][j].i & i_mask) != (orig_data[i][j].i & i_mask) ||
3314 (new_data[i][j].c & c_mask) != (orig_data[i][j].c & c_mask) ||
3315 (new_data[i][j].s & s_mask) != (orig_data[i][j].s & s_mask) ||
3316 (orig_data[i][j].f==orig_data[i][j].f && new_data[i][j].f != orig_data[i][j].f))
3317 {
3318 H5_FAILED();
3319 printf(" Read different values than written.\n");
3320 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
3321 goto error;
3322 }
3323 }
3324 }
3325
3326 /*----------------------------------------------------------------------
3327 * Cleanup
3328 *----------------------------------------------------------------------
3329 */
3330 if(H5Tclose(i_tid) < 0) goto error;
3331 if(H5Tclose(c_tid) < 0) goto error;
3332 if(H5Tclose(s_tid) < 0) goto error;
3333 if(H5Tclose(f_tid) < 0) goto error;
3334 if(H5Tclose(cmpd_tid) < 0) goto error;
3335 if(H5Tclose(mem_cmpd_tid) < 0) goto error;
3336 if(H5Pclose(dc) < 0) goto error;
3337 if(H5Sclose(space) < 0) goto error;
3338 if(H5Dclose(dataset) < 0) goto error;
3339
3340 PASSED();
3341
3342 return 0;
3343
3344 error:
3345 return -1;
3346 }
3347
3348
3349 /*-------------------------------------------------------------------------
3350 * Function: test_nbit_compound_2
3351 *
3352 * Purpose: Tests a complex version of compound datatype of nbit filter
3353 *
3354 * Return: Success: 0
3355 *
3356 * Failure: -1
3357 *
3358 * Programmer: Xiaowen Wu
3359 * Tuesday, Jan. 18th, 2005
3360 *
3361 *-------------------------------------------------------------------------
3362 */
3363 static herr_t
test_nbit_compound_2(hid_t file)3364 test_nbit_compound_2(hid_t file)
3365 {
3366 typedef struct { /* Struct with atomic fields */
3367 int i;
3368 char c;
3369 short s;
3370 float f;
3371 } atomic;
3372
3373 typedef struct { /* Struct with complex fields */
3374 atomic a;
3375 unsigned int v;
3376 char b[2][2];
3377 atomic d[2][2];
3378 } complex;
3379
3380 hid_t i_tid, c_tid, s_tid, f_tid, v_tid;
3381 hid_t cmpd_tid1; /* atomic compound datatype */
3382 hid_t cmpd_tid2; /* complex compound datatype */
3383 hid_t mem_cmpd_tid1; /* memory atomic compound datatype */
3384 hid_t mem_cmpd_tid2; /* memory complex compound datatype */
3385 hid_t base_tid; /* simple array datatype's base datatype */
3386 hid_t array_tid; /* simple array datatype */
3387 hid_t array_cmplx_tid; /* complex array datatype */
3388 hid_t mem_array_cmplx_tid; /* memory complex array datatype */
3389 const hsize_t array_dims[2] = {2, 2};
3390 size_t precision[5] = {31, 8, 10, 23, 8};
3391 size_t offset[5] = {1, 0, 3, 5, 0};
3392 hid_t dataset, space, dc;
3393 const hsize_t size[2] = {2, 5};
3394 const hsize_t chunk_size[2] = {2, 5};
3395 const float float_val[2][5] = {{188384.0F, 19.103516F, -1.0831790e9F, -84.242188F, 5.2045898F},
3396 {-49140.0F, 2350.25F, -3.2110596e-1F, 6.4998865e-5F, -0.0F}};
3397 complex orig_data[2][5];
3398 complex new_data[2][5];
3399 unsigned int i_mask, s_mask, c_mask, b_mask;
3400 size_t i, j, m, n, b_failed, d_failed;
3401
3402
3403 TESTING(" nbit compound complex (setup)");
3404
3405 /* Define datatypes of members of compound datatype */
3406 i_tid=H5Tcopy(H5T_NATIVE_INT);
3407 c_tid=H5Tcopy(H5T_NATIVE_CHAR);
3408 s_tid=H5Tcopy(H5T_NATIVE_SHORT);
3409 v_tid=H5Tcopy(H5T_NATIVE_UINT);
3410 f_tid=H5Tcopy(H5T_IEEE_F32BE);
3411
3412 /* Set precision and offset etc. of atomic compound datatype members */
3413 if(H5Tset_precision(i_tid,precision[0]) < 0) goto error;
3414 if(H5Tset_offset(i_tid,offset[0]) < 0) goto error;
3415
3416 if(H5Tset_precision(c_tid,precision[1]) < 0) goto error;
3417 if(H5Tset_offset(c_tid,offset[1]) < 0) goto error;
3418
3419 if(H5Tset_precision(s_tid,precision[2]) < 0) goto error;
3420 if(H5Tset_offset(s_tid,offset[2]) < 0) goto error;
3421
3422 if(H5Tset_fields(f_tid, (size_t)26, (size_t)20, (size_t)6, (size_t)7, (size_t)13) < 0) goto error;
3423 if(H5Tset_offset(f_tid, (size_t)7) < 0) goto error;
3424 if(H5Tset_precision(f_tid, (size_t)20) < 0) goto error;
3425 if(H5Tset_size(f_tid, (size_t)4) < 0) goto error;
3426 if(H5Tset_ebias(f_tid, (size_t)31) < 0) goto error;
3427
3428 /* Create a memory atomic compound datatype before setting the order */
3429 mem_cmpd_tid1 = H5Tcreate(H5T_COMPOUND, sizeof(atomic));
3430 if(H5Tinsert(mem_cmpd_tid1, "i", HOFFSET(atomic, i), i_tid) < 0) goto error;
3431 if(H5Tinsert(mem_cmpd_tid1, "c", HOFFSET(atomic, c), c_tid) < 0) goto error;
3432 if(H5Tinsert(mem_cmpd_tid1, "s", HOFFSET(atomic, s), s_tid) < 0) goto error;
3433 if(H5Tinsert(mem_cmpd_tid1, "f", HOFFSET(atomic, f), H5T_NATIVE_FLOAT) < 0) goto error;
3434
3435 /* Create a dataset atomic compound datatype and insert some atomic types */
3436 cmpd_tid1 = H5Tcreate(H5T_COMPOUND, sizeof(atomic));
3437 if(H5Tinsert(cmpd_tid1, "i", HOFFSET(atomic, i), i_tid) < 0) goto error;
3438 if(H5Tinsert(cmpd_tid1, "c", HOFFSET(atomic, c), c_tid) < 0) goto error;
3439 if(H5Tinsert(cmpd_tid1, "s", HOFFSET(atomic, s), s_tid) < 0) goto error;
3440 if(H5Tinsert(cmpd_tid1, "f", HOFFSET(atomic, f), f_tid) < 0) goto error;
3441
3442 /* Set order of dataset compound datatype */
3443 if(H5Tset_order(cmpd_tid1, H5T_ORDER_BE) < 0) goto error;
3444
3445 /* Set precision and offset of the other data member */
3446 if(H5Tset_precision(v_tid,precision[3]) < 0) goto error;
3447 if(H5Tset_offset(v_tid,offset[3]) < 0) goto error;
3448
3449 /* Create the simple array datatype */
3450 base_tid = H5Tcopy(H5T_NATIVE_CHAR);
3451 if(H5Tset_precision(base_tid,precision[4]) < 0) goto error;
3452 if(H5Tset_offset(base_tid,offset[4]) < 0) goto error;
3453 array_tid = H5Tarray_create2(base_tid, 2, array_dims);
3454
3455 /* Create the complex memory and dataset array datatype */
3456 array_cmplx_tid = H5Tarray_create2(cmpd_tid1, 2, array_dims);
3457 mem_array_cmplx_tid = H5Tarray_create2(mem_cmpd_tid1, 2, array_dims);
3458
3459 /* Create a memory complex compound datatype before setting the order */
3460 mem_cmpd_tid2 = H5Tcreate(H5T_COMPOUND, sizeof(complex));
3461 if(H5Tinsert(mem_cmpd_tid2, "a", HOFFSET(complex, a), mem_cmpd_tid1) < 0) goto error;
3462 if(H5Tinsert(mem_cmpd_tid2, "v", HOFFSET(complex, v), v_tid) < 0) goto error;
3463 if(H5Tinsert(mem_cmpd_tid2, "b", HOFFSET(complex, b), array_tid) < 0) goto error;
3464 if(H5Tinsert(mem_cmpd_tid2, "d", HOFFSET(complex, d), mem_array_cmplx_tid) < 0) goto error;
3465
3466 /* Set order of dataset other complex compound member datatype */
3467 if(H5Tset_order(v_tid, H5T_ORDER_BE) < 0) goto error;
3468
3469 /* Create a dataset complex compound datatype and insert members */
3470 cmpd_tid2 = H5Tcreate(H5T_COMPOUND, sizeof(complex));
3471 if(H5Tinsert(cmpd_tid2, "a", HOFFSET(complex, a), cmpd_tid1) < 0) goto error;
3472 if(H5Tinsert(cmpd_tid2, "v", HOFFSET(complex, v), v_tid) < 0) goto error;
3473 if(H5Tinsert(cmpd_tid2, "b", HOFFSET(complex, b), array_tid) < 0) goto error;
3474 if(H5Tinsert(cmpd_tid2, "d", HOFFSET(complex, d), array_cmplx_tid) < 0) goto error;
3475
3476 /* Create the data space */
3477 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
3478
3479 /* Use nbit filter */
3480 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
3481 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
3482 if(H5Pset_nbit(dc) < 0) goto error;
3483
3484 /* Create the dataset */
3485 if((dataset = H5Dcreate2(file, DSET_NBIT_COMPOUND_NAME_2, cmpd_tid2,
3486 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
3487
3488 /* Initialize data, assuming size of long long >= size of member datatypes */
3489 for(i= 0;i< (size_t)size[0]; i++)
3490 for(j = 0; j < (size_t)size[1]; j++) {
3491 orig_data[i][j].a.i = (int)(((long long)HDrandom() %
3492 (long long)HDpow(2.0F, (double)(precision[0]-1))) << offset[0]);
3493 orig_data[i][j].a.c = (char)(((long long)HDrandom() %
3494 (long long)HDpow(2.0F, (double)(precision[1]-1))) << offset[1]);
3495 orig_data[i][j].a.s = (short)(-((long long)HDrandom() %
3496 (long long)HDpow(2.0F, (double)(precision[2]-1))) << offset[2]);
3497 orig_data[i][j].a.f = float_val[i][j];
3498
3499 orig_data[i][j].v = (unsigned int)(((long long)HDrandom() %
3500 (long long)HDpow(2.0F, (double)precision[3])) << offset[3]);
3501
3502 for(m = 0; m < (size_t)array_dims[0]; m++)
3503 for(n = 0; n < (size_t)array_dims[1]; n++)
3504 orig_data[i][j].b[m][n] = (char)(((long long)HDrandom() %
3505 (long long)HDpow(2.0F, (double)(precision[4]-1))) << offset[4]);
3506
3507 for(m = 0; m < (size_t)array_dims[0]; m++)
3508 for(n = 0; n < (size_t)array_dims[1]; n++) {
3509 orig_data[i][j].d[m][n].i = (int)(-((long long)HDrandom() %
3510 (long long)HDpow(2.0F, (double)(precision[0]-1))) << offset[0]);
3511 orig_data[i][j].d[m][n].c = (char)(((long long)HDrandom() %
3512 (long long)HDpow(2.0F, (double)(precision[1]-1))) << offset[1]);
3513 orig_data[i][j].d[m][n].s = (short)(((long long)HDrandom() %
3514 (long long)HDpow(2.0F, (double)(precision[2]-1))) << offset[2]);
3515 orig_data[i][j].d[m][n].f = float_val[i][j];
3516 }
3517 }
3518
3519 PASSED();
3520
3521 /*----------------------------------------------------------------------
3522 * STEP 1: Test nbit by setting up a chunked dataset and writing
3523 * to it.
3524 *----------------------------------------------------------------------
3525 */
3526 TESTING(" nbit compound complex (write)");
3527
3528 if(H5Dwrite(dataset, mem_cmpd_tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3529 orig_data) < 0)
3530 goto error;
3531 PASSED();
3532
3533 /*----------------------------------------------------------------------
3534 * STEP 2: Try to read the data we just wrote.
3535 *----------------------------------------------------------------------
3536 */
3537 TESTING(" nbit compound complex (read)");
3538
3539 /* Read the dataset back */
3540 if(H5Dread(dataset, mem_cmpd_tid2, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3541 new_data) < 0)
3542 goto error;
3543
3544 /* Check that the values read are the same as the values written
3545 * Use mask for checking the significant bits, ignoring the padding bits
3546 */
3547 /* The original code
3548 * i_mask = ~((unsigned)~0 << (precision[0] + offset[0])) & ((unsigned)~0 << offset[0]);
3549 * left shift a 32-bit integer for 32-bit. The result is undefined by C language. A user
3550 * discovered it using clang compiler with -fcatch-undefined-behavior option (see Issue 7674
3551 * in Jira). So I changed it in a funny way as below to avoid it. SLU - 2011/8/11
3552 */
3553 if(sizeof(unsigned) > 4)
3554 i_mask = ~((unsigned)~0 << (precision[0] + offset[0])) & ((unsigned)~0 << offset[0]);
3555 else {
3556 i_mask = 0xffffffff;
3557 i_mask = i_mask & ((unsigned)~0 << offset[0]);
3558 }
3559 c_mask = ~((unsigned)~0 << (precision[1] + offset[1])) & ((unsigned)~0 << offset[1]);
3560 s_mask = ~((unsigned)~0 << (precision[2] + offset[2])) & ((unsigned)~0 << offset[2]);
3561 b_mask = ~((unsigned)~0 << (precision[4] + offset[4])) & ((unsigned)~0 << offset[4]);
3562 for(i=0; i<(size_t)size[0]; i++) {
3563 for(j=0; j<(size_t)size[1]; j++) {
3564 b_failed = 0;
3565 d_failed = 0;
3566
3567 for(m = 0; m < (size_t)array_dims[0]; m++)
3568 for(n = 0; n < (size_t)array_dims[1]; n++)
3569 if((new_data[i][j].b[m][n]&b_mask)!=(orig_data[i][j].b[m][n]&b_mask)) {
3570 b_failed = 1;
3571 goto out;
3572 }
3573
3574 for(m = 0; m < (size_t)array_dims[0]; m++)
3575 for(n = 0; n < (size_t)array_dims[1]; n++)
3576 if((new_data[i][j].d[m][n].i & i_mask)!=(orig_data[i][j].d[m][n].i & i_mask)||
3577 (new_data[i][j].d[m][n].c & c_mask)!=(orig_data[i][j].d[m][n].c & c_mask)||
3578 (new_data[i][j].d[m][n].s & s_mask)!=(orig_data[i][j].d[m][n].s & s_mask)||
3579 (new_data[i][j].d[m][n].f==new_data[i][j].d[m][n].f &&
3580 new_data[i][j].d[m][n].f != new_data[i][j].d[m][n].f)) {
3581 d_failed = 1;
3582 goto out;
3583 }
3584
3585 out:
3586 if((new_data[i][j].a.i & i_mask)!=(orig_data[i][j].a.i & i_mask)||
3587 (new_data[i][j].a.c & c_mask)!=(orig_data[i][j].a.c & c_mask)||
3588 (new_data[i][j].a.s & s_mask)!=(orig_data[i][j].a.s & s_mask)||
3589 (new_data[i][j].a.f==new_data[i][j].a.f &&
3590 new_data[i][j].a.f != new_data[i][j].a.f)||
3591 new_data[i][j].v != orig_data[i][j].v || b_failed || d_failed) {
3592 H5_FAILED();
3593 printf(" Read different values than written.\n");
3594 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
3595 goto error;
3596 }
3597 }
3598 }
3599
3600 /*----------------------------------------------------------------------
3601 * Cleanup
3602 *----------------------------------------------------------------------
3603 */
3604 if(H5Tclose(i_tid) < 0) goto error;
3605 if(H5Tclose(c_tid) < 0) goto error;
3606 if(H5Tclose(s_tid) < 0) goto error;
3607 if(H5Tclose(f_tid) < 0) goto error;
3608 if(H5Tclose(v_tid) < 0) goto error;
3609 if(H5Tclose(cmpd_tid2) < 0) goto error;
3610 if(H5Tclose(cmpd_tid1) < 0) goto error;
3611 if(H5Tclose(mem_cmpd_tid2) < 0) goto error;
3612 if(H5Tclose(mem_cmpd_tid1) < 0) goto error;
3613 if(H5Tclose(array_tid) < 0) goto error;
3614 if(H5Tclose(base_tid) < 0) goto error;
3615 if(H5Tclose(array_cmplx_tid) < 0) goto error;
3616 if(H5Tclose(mem_array_cmplx_tid) < 0) goto error;
3617 if(H5Pclose(dc) < 0) goto error;
3618 if(H5Sclose(space) < 0) goto error;
3619 if(H5Dclose(dataset) < 0) goto error;
3620
3621 PASSED();
3622
3623 return 0;
3624
3625 error:
3626 return -1;
3627 }
3628
3629
3630 /*-------------------------------------------------------------------------
3631 * Function: test_nbit_compound_3
3632 *
3633 * Purpose: Tests no-op datatypes in compound datatype for nbit filter
3634 *
3635 * Return: Success: 0
3636 *
3637 * Failure: -1
3638 *
3639 * Programmer: Xiaowen Wu
3640 * Thursday, Mar. 31th, 2005
3641 *
3642 *-------------------------------------------------------------------------
3643 */
3644 static herr_t
test_nbit_compound_3(hid_t file)3645 test_nbit_compound_3(hid_t file)
3646 {
3647 typedef struct { /* Struct with some no-op type fields */
3648 int i; /* integer field, NOT a no-op type */
3649 char str[30]; /* fixed-length string, no-op type */
3650 char *vl_str; /* varible-length string, no-op type */
3651 hvl_t v; /* VL datatype field, no-op type */
3652 hobj_ref_t r; /* Object reference field, no-op type */
3653 unsigned char o[5]; /* Opaque field, no-op type */
3654 } atomic;
3655 hid_t i_tid, str_tid, vl_str_tid, v_tid, o_tid;
3656 hid_t cmpd_tid; /* atomic compound datatype */
3657 hid_t dataset, space, dc, obj_ref_dataset = -1;
3658 const hsize_t size[1] = {5};
3659 const hsize_t chunk_size[1] = {5};
3660 atomic orig_data[5];
3661 atomic new_data[5];
3662 size_t i, k, j;
3663
3664
3665 TESTING(" nbit compound with no-op type (setup)");
3666
3667 /* Define datatypes of members of compound datatype */
3668 i_tid=H5Tcopy(H5T_NATIVE_INT);
3669 if(H5Tset_precision(i_tid, (size_t)17) < 0) goto error;
3670
3671 str_tid=H5Tcopy(H5T_C_S1);
3672 if(H5Tset_size(str_tid, (size_t)30) < 0) goto error;
3673
3674 vl_str_tid = H5Tcopy(H5T_C_S1);
3675 if(H5Tset_size(vl_str_tid,H5T_VARIABLE) < 0) goto error;
3676
3677 if((v_tid = H5Tvlen_create(H5T_NATIVE_UINT)) < 0) goto error;
3678
3679 if((o_tid = H5Tcreate(H5T_OPAQUE, (size_t)5)) < 0) goto error;
3680 if(H5Tset_tag(o_tid, "testing opaque field") < 0) goto error;
3681
3682 /* Create a dataset compound datatype and insert some atomic types */
3683 cmpd_tid = H5Tcreate(H5T_COMPOUND, sizeof(atomic));
3684 if(H5Tinsert(cmpd_tid, "i", HOFFSET(atomic, i), i_tid) < 0) goto error;
3685 if(H5Tinsert(cmpd_tid, "str", HOFFSET(atomic, str), str_tid) < 0) goto error;
3686 if(H5Tinsert(cmpd_tid, "vl_str", HOFFSET(atomic, vl_str), vl_str_tid) < 0) goto error;
3687 if(H5Tinsert(cmpd_tid, "v", HOFFSET(atomic, v), v_tid) < 0) goto error;
3688 if(H5Tinsert(cmpd_tid, "r", HOFFSET(atomic, r), H5T_STD_REF_OBJ) < 0) goto error;
3689 if(H5Tinsert(cmpd_tid, "o", HOFFSET(atomic, o), o_tid) < 0) goto error;
3690
3691 /* Create the data space */
3692 if((space = H5Screate_simple(1, size, NULL)) < 0) goto error;
3693
3694 /* Use nbit filter */
3695 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
3696 if(H5Pset_chunk(dc, 1, chunk_size) < 0) goto error;
3697 if(H5Pset_nbit(dc) < 0) goto error;
3698
3699 /* Create the dataset */
3700 if((dataset = H5Dcreate2(file, DSET_NBIT_COMPOUND_NAME_3, cmpd_tid,
3701 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
3702
3703 /* Create the dataset object reference points to */
3704 if((obj_ref_dataset = H5Dcreate2(file, "nbit_obj_ref", H5T_NATIVE_INT,
3705 space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
3706
3707 /* Initialize data */
3708 for(i = 0; i < (size_t)size[0]; i++) {
3709 HDmemset(&orig_data[i], 0, sizeof(orig_data[i]));
3710 orig_data[i].i = HDrandom() % (long)HDpow(2.0F, 17.0F - 1.0F);
3711 HDstrcpy(orig_data[i].str, "fixed-length C string");
3712 orig_data[i].vl_str = HDstrdup("variable-length C string");
3713
3714 orig_data[i].v.p = HDmalloc((size_t)(i+1)*sizeof(unsigned int));
3715 orig_data[i].v.len = (size_t)i+1;
3716 for(k = 0; k < (i+1); k++) ((unsigned int *)orig_data[i].v.p)[k] = (unsigned int)(i*100 + k);
3717
3718 /* Create reference to the dataset "nbit_obj_ref" */
3719 if(H5Rcreate(&orig_data[i].r, file, "nbit_obj_ref", H5R_OBJECT, (hid_t)-1) < 0) goto error;
3720
3721 for(j = 0; j < 5; j++) orig_data[i].o[j] = (unsigned char)(i + j);
3722 }
3723
3724 PASSED();
3725
3726 /*----------------------------------------------------------------------
3727 * STEP 1: Test nbit by setting up a chunked dataset and writing
3728 * to it.
3729 *----------------------------------------------------------------------
3730 */
3731 TESTING(" nbit compound with no-op type (write)");
3732
3733 if(H5Dwrite(dataset, cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3734 orig_data) < 0)
3735 goto error;
3736 PASSED();
3737
3738 /*----------------------------------------------------------------------
3739 * STEP 2: Try to read the data we just wrote.
3740 *----------------------------------------------------------------------
3741 */
3742 TESTING(" nbit compound with no-op type (read)");
3743
3744 /* Read the dataset back */
3745 if(H5Dread(dataset, cmpd_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT,
3746 new_data) < 0)
3747 goto error;
3748
3749 /* Check that the values read are the same as the values written */
3750 for(i = 0; i < (size_t)size[0]; i++) {
3751 if(new_data[i].i != orig_data[i].i ||
3752 strcmp(new_data[i].str, orig_data[i].str) !=0 ||
3753 strcmp(new_data[i].vl_str, orig_data[i].vl_str) !=0 ||
3754 new_data[i].v.len != orig_data[i].v.len ||
3755 new_data[i].r != orig_data[i].r)
3756 {
3757 H5_FAILED();
3758 printf(" Read different values than written.\n");
3759 printf(" At index %lu\n", (unsigned long)i);
3760 goto error;
3761 }
3762
3763 for(k=0; k<i+1; k++)
3764 if(((unsigned int *)orig_data[i].v.p)[k] !=((unsigned int *)new_data[i].v.p)[k])
3765 {
3766 H5_FAILED();
3767 printf(" Read different values than written.\n");
3768 printf(" At index %lu\n", (unsigned long)i);
3769 goto error;
3770 }
3771
3772 for(j=0; j<5; j++)
3773 if(orig_data[i].o[j] != new_data[i].o[j])
3774 {
3775 H5_FAILED();
3776 printf(" Read different values than written.\n");
3777 printf(" At index %lu\n", (unsigned long)i);
3778 goto error;
3779 }
3780 }
3781
3782 /*----------------------------------------------------------------------
3783 * Cleanup
3784 *----------------------------------------------------------------------
3785 */
3786 if(H5Dvlen_reclaim(cmpd_tid, space, H5P_DEFAULT, new_data) < 0) goto error;
3787 if(H5Dvlen_reclaim(cmpd_tid, space, H5P_DEFAULT, orig_data) < 0) goto error;
3788 if(H5Tclose(i_tid) < 0) goto error;
3789 if(H5Tclose(str_tid) < 0) goto error;
3790 if(H5Tclose(vl_str_tid) < 0) goto error;
3791 if(H5Tclose(v_tid) < 0) goto error;
3792 if(H5Tclose(o_tid) < 0) goto error;
3793 if(H5Tclose(cmpd_tid) < 0) goto error;
3794 if(H5Pclose(dc) < 0) goto error;
3795 if(H5Sclose(space) < 0) goto error;
3796 if(H5Dclose(obj_ref_dataset) < 0) goto error;
3797 if(H5Dclose(dataset) < 0) goto error;
3798
3799 PASSED();
3800
3801 return 0;
3802
3803 error:
3804 return -1;
3805 }
3806
3807
3808 /*-------------------------------------------------------------------------
3809 * Function: test_nbit_int_size
3810 *
3811 * Purpose: Tests the correct size of the integer datatype for nbit filter
3812 *
3813 * Return: Success: 0
3814 *
3815 * Failure: -1
3816 *
3817 * Programmer: Raymond Lu
3818 * 19 November 2010
3819 *
3820 *-------------------------------------------------------------------------
3821 */
3822 static herr_t
test_nbit_int_size(hid_t file)3823 test_nbit_int_size(hid_t file)
3824 {
3825 hid_t dataspace, dataset, datatype, mem_datatype, dset_create_props;
3826 hsize_t dims[2], chunk_size[2];
3827 hsize_t dset_size = 0;
3828 int orig_data[DSET_DIM1][DSET_DIM2];
3829 int i, j;
3830 size_t precision, offset;
3831
3832 TESTING(" nbit integer dataset size");
3833
3834 /* Define dataset datatype (integer), and set precision, offset */
3835 if((datatype = H5Tcopy(H5T_NATIVE_INT)) < 0) {
3836 H5_FAILED();
3837 printf(" line %d: H5Tcopy failed\n",__LINE__);
3838 goto error;
3839 } /* end if */
3840
3841 precision = 16; /* precision includes sign bit */
3842 if(H5Tset_precision(datatype,precision)<0) {
3843 H5_FAILED();
3844 printf(" line %d: H5Pset_precision failed\n",__LINE__);
3845 goto error;
3846 } /* end if */
3847
3848 offset = 8;
3849 if(H5Tset_offset(datatype,offset)<0) {
3850 H5_FAILED();
3851 printf(" line %d: H5Tset_offset failed\n",__LINE__);
3852 goto error;
3853 } /* end if */
3854
3855 /* Copy to memory datatype */
3856 if((mem_datatype = H5Tcopy(datatype)) < 0) {
3857 H5_FAILED();
3858 printf(" line %d: H5Tcopy failed\n",__LINE__);
3859 goto error;
3860 } /* end if */
3861
3862 /* Set order of dataset datatype */
3863 if(H5Tset_order(datatype, H5T_ORDER_BE)<0) {
3864 H5_FAILED();
3865 printf(" line %d: H5Pset_order failed\n",__LINE__);
3866 goto error;
3867 } /* end if */
3868
3869 if(H5Tset_size(datatype, 4)<0) {
3870 H5_FAILED();
3871 printf(" line %d: H5Pset_size failed\n",__LINE__);
3872 goto error;
3873 } /* end if */
3874
3875 /* Initiliaze data buffer with random data within correct range
3876 * corresponding to the memory datatype's precision and offset.
3877 */
3878 for (i=0; i < DSET_DIM1; i++)
3879 for (j=0; j < DSET_DIM2; j++)
3880 orig_data[i][j] = rand() % (int)pow((double)2, (double)(precision-1)) << offset;
3881
3882
3883 /* Describe the dataspace. */
3884 dims[0] = DSET_DIM1;
3885 dims[1] = DSET_DIM2;
3886 if((dataspace = H5Screate_simple (2, dims, NULL))<0) {
3887 H5_FAILED();
3888 printf(" line %d: H5Pcreate failed\n",__LINE__);
3889 goto error;
3890 } /* end if */
3891
3892 /*
3893 * Set the dataset creation property list to specify the chunks
3894 */
3895 chunk_size[0] = DSET_DIM1/10;
3896 chunk_size[1] = DSET_DIM2/10;
3897 if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE))<0) {
3898 H5_FAILED();
3899 printf(" line %d: H5Pcreate failed\n",__LINE__);
3900 goto error;
3901 } /* end if */
3902
3903 if(H5Pset_chunk (dset_create_props, 2, chunk_size)<0) {
3904 H5_FAILED();
3905 printf(" line %d: H5Pset_chunk failed\n",__LINE__);
3906 goto error;
3907 } /* end if */
3908
3909 /*
3910 * Set for n-bit compression
3911 */
3912 if(H5Pset_nbit (dset_create_props)<0) {
3913 H5_FAILED();
3914 printf(" line %d: H5Pset_nbit failed\n",__LINE__);
3915 goto error;
3916 } /* end if */
3917
3918 /*
3919 * Create a new dataset within the file.
3920 */
3921 if((dataset = H5Dcreate2 (file, DSET_NBIT_INT_SIZE_NAME, datatype,
3922 dataspace, H5P_DEFAULT,
3923 dset_create_props, H5P_DEFAULT))<0) {
3924 H5_FAILED();
3925 printf(" line %d: H5dwrite failed\n",__LINE__);
3926 goto error;
3927 } /* end if */
3928
3929 /*
3930 * Write the array to the file.
3931 */
3932 if(H5Dwrite (dataset, mem_datatype, H5S_ALL, H5S_ALL,
3933 H5P_DEFAULT, orig_data)<0) {
3934 H5_FAILED();
3935 printf(" Line %d: H5Dwrite failed\n",__LINE__);
3936 goto error;
3937 } /* end if */
3938
3939 /*
3940 * Get the precision of the data type
3941 */
3942 if((precision = H5Tget_precision(datatype)) == 0) {
3943 H5_FAILED();
3944 printf(" Line %d: wrong precision size: %zu\n",__LINE__, precision);
3945 goto error;
3946 } /* end if */
3947
3948 /*
3949 * The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
3950 */
3951 if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
3952 dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
3953 H5_FAILED();
3954 HDfprintf(stdout, " Line %d: wrong dataset size: %Hu\n",__LINE__, dset_size);
3955 goto error;
3956 } /* end if */
3957
3958 H5Tclose (datatype);
3959 H5Tclose (mem_datatype);
3960 H5Dclose (dataset);
3961 H5Sclose (dataspace);
3962 H5Pclose (dset_create_props);
3963
3964 PASSED();
3965
3966 return 0;
3967 error:
3968 return -1;
3969 }
3970
3971
3972 /*-------------------------------------------------------------------------
3973 * Function: test_nbit_flt_size
3974 *
3975 * Purpose: Tests the correct size of the floating-number datatype for
3976 * nbit filter
3977 *
3978 * Return: Success: 0
3979 *
3980 * Failure: -1
3981 *
3982 * Programmer: Raymond Lu
3983 * 19 November 2010
3984 *
3985 *-------------------------------------------------------------------------
3986 */
3987 static herr_t
test_nbit_flt_size(hid_t file)3988 test_nbit_flt_size(hid_t file)
3989 {
3990 hid_t dataspace, dataset, datatype, dset_create_props;
3991 hsize_t dims[2], chunk_size[2];
3992 hsize_t dset_size = 0;
3993 float orig_data[DSET_DIM1][DSET_DIM2];
3994 int i, j;
3995 size_t precision, offset;
3996 size_t spos, epos, esize, mpos, msize;
3997
3998 TESTING(" nbit floating-number dataset size");
3999
4000 /* Define floating-point type for dataset
4001 *-------------------------------------------------------------------
4002 * size=4 byte, precision=16 bits, offset=8 bits,
4003 * mantissa size=9 bits, mantissa position=8,
4004 * exponent size=6 bits, exponent position=17,
4005 * exponent bias=31.
4006 * It can be illustrated in little-endian order as:
4007 * (S - sign bit, E - exponent bit, M - mantissa bit,
4008 * ? - padding bit)
4009 *
4010 * 3 2 1 0
4011 * ???????? SEEEEEEM MMMMMMMM ????????
4012 *
4013 * To create a new floating-point type, the following
4014 * properties must be set in the order of
4015 * set fields -> set offset -> set precision -> set size.
4016 * All these properties must be set before the type can function.
4017 * Other properties can be set anytime. Derived type size cannot
4018 * be expanded bigger than original size but can be decreased.
4019 * There should be no holes among the significant bits. Exponent
4020 * bias usually is set 2^(n-1)-1, where n is the exponent size.
4021 *-------------------------------------------------------------------*/
4022 if((datatype = H5Tcopy(H5T_IEEE_F32LE)) < 0) {
4023 H5_FAILED();
4024 printf(" line %d: H5Tcopy failed\n",__LINE__);
4025 goto error;
4026 } /* end if */
4027
4028 msize = 9;
4029 spos = 23;
4030 epos = 17;
4031 esize = 6;
4032 mpos = 8;
4033 offset = 8;
4034 precision = 16;
4035
4036 if(H5Tset_fields(datatype, spos, epos, esize, mpos, msize)<0) {
4037 H5_FAILED();
4038 printf(" line %d: H5Tset_fields failed\n",__LINE__);
4039 goto error;
4040 } /* end if */
4041
4042 if(H5Tset_offset(datatype,offset)<0) {
4043 H5_FAILED();
4044 printf(" line %d: H5Tset_offset failed\n",__LINE__);
4045 goto error;
4046 } /* end if */
4047
4048 if(H5Tset_precision(datatype,precision)<0) {
4049 H5_FAILED();
4050 printf(" line %d: H5Tset_precision failed\n",__LINE__);
4051 goto error;
4052 } /* end if */
4053
4054 if(H5Tset_size(datatype, 4)<0) {
4055 H5_FAILED();
4056 printf(" line %d: H5Pset_size failed\n",__LINE__);
4057 goto error;
4058 } /* end if */
4059
4060 /* Set order of dataset datatype */
4061 if(H5Tset_order(datatype, H5T_ORDER_BE)<0) {
4062 H5_FAILED();
4063 printf(" line %d: H5Pset_order failed\n",__LINE__);
4064 goto error;
4065 } /* end if */
4066
4067 if(H5Tset_ebias(datatype, 31)<0) {
4068 H5_FAILED();
4069 printf(" line %d: H5Pset_size failed\n",__LINE__);
4070 goto error;
4071 } /* end if */
4072
4073 /*
4074 * Initiliaze data buffer with random data
4075 */
4076 for (i=0; i < DSET_DIM1; i++)
4077 for (j=0; j < DSET_DIM2; j++)
4078 orig_data[i][j] = (rand() % 1234567) / 2;
4079
4080
4081 /* Describe the dataspace. */
4082 dims[0] = DSET_DIM1;
4083 dims[1] = DSET_DIM2;
4084 if((dataspace = H5Screate_simple (2, dims, NULL))<0) {
4085 H5_FAILED();
4086 printf(" line %d: H5Pcreate failed\n",__LINE__);
4087 goto error;
4088 } /* end if */
4089
4090 /*
4091 * Set the dataset creation property list to specify the chunks
4092 */
4093 chunk_size[0] = DSET_DIM1/10;
4094 chunk_size[1] = DSET_DIM2/10;
4095 if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE))<0) {
4096 H5_FAILED();
4097 printf(" line %d: H5Pcreate failed\n",__LINE__);
4098 goto error;
4099 } /* end if */
4100
4101 if(H5Pset_chunk (dset_create_props, 2, chunk_size)<0) {
4102 H5_FAILED();
4103 printf(" line %d: H5Pset_chunk failed\n",__LINE__);
4104 goto error;
4105 } /* end if */
4106
4107 /*
4108 * Set for n-bit compression
4109 */
4110 if(H5Pset_nbit (dset_create_props)<0) {
4111 H5_FAILED();
4112 printf(" line %d: H5Pset_nbit failed\n",__LINE__);
4113 goto error;
4114 } /* end if */
4115
4116 /*
4117 * Create a new dataset within the file.
4118 */
4119 if((dataset = H5Dcreate2 (file, DSET_NBIT_FLT_SIZE_NAME, datatype,
4120 dataspace, H5P_DEFAULT,
4121 dset_create_props, H5P_DEFAULT))<0) {
4122 H5_FAILED();
4123 printf(" line %d: H5dwrite failed\n",__LINE__);
4124 goto error;
4125 } /* end if */
4126
4127 /*
4128 * Write the array to the file.
4129 */
4130 if(H5Dwrite (dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL,
4131 H5P_DEFAULT, orig_data)<0) {
4132 H5_FAILED();
4133 printf(" Line %d: H5Dwrite failed\n",__LINE__);
4134 goto error;
4135 } /* end if */
4136
4137 /*
4138 * Get the precision of the data type
4139 */
4140 if((precision = H5Tget_precision(datatype)) == 0) {
4141 H5_FAILED();
4142 printf(" Line %d: wrong precision size: %zu\n",__LINE__, precision);
4143 goto error;
4144 } /* end if */
4145
4146 /*
4147 * The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
4148 */
4149 if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
4150 dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
4151 H5_FAILED();
4152 HDfprintf(stdout, " Line %d: wrong dataset size: %Hu\n",__LINE__, dset_size);
4153 goto error;
4154 } /* end if */
4155
4156 H5Tclose (datatype);
4157 H5Dclose (dataset);
4158 H5Sclose (dataspace);
4159 H5Pclose (dset_create_props);
4160
4161 PASSED();
4162
4163 return 0;
4164 error:
4165 return -1;
4166 }
4167
4168 /*-------------------------------------------------------------------------
4169 * Function: test_scaleoffset_int
4170 *
4171 * Purpose: Tests the integer datatype for scaleoffset filter
4172 * with fill value not defined
4173 *
4174 * Return: Success: 0
4175 *
4176 * Failure: -1
4177 *
4178 * Programmer: Xiaowen Wu
4179 * Monday, Feb. 14th, 2005
4180 *
4181 *-------------------------------------------------------------------------
4182 */
4183 static herr_t
test_scaleoffset_int(hid_t file)4184 test_scaleoffset_int(hid_t file)
4185 {
4186 hid_t dataset, datatype, space, dc;
4187 const hsize_t size[2] = {2, 5};
4188 const hsize_t chunk_size[2] = {2,5};
4189 int orig_data[2][5];
4190 int new_data[2][5];
4191 size_t i, j;
4192
4193 puts("Testing scaleoffset filter");
4194 TESTING(" scaleoffset int without fill value (setup)");
4195
4196 datatype = H5Tcopy(H5T_NATIVE_INT);
4197
4198 /* Set order of dataset datatype */
4199 if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) goto error;
4200
4201 /* Create the data space */
4202 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
4203
4204 /* Create the dataset property list */
4205 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
4206
4207 /* Fill value undefined */
4208 if(H5Pset_fill_value(dc, datatype, NULL) < 0) goto error;
4209
4210 /* Set up to use scaleoffset filter, let library calculate minbits */
4211 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
4212 if(H5Pset_scaleoffset(dc, H5Z_SO_INT,H5Z_SO_INT_MINBITS_DEFAULT) < 0) goto error;
4213
4214 /* Create the dataset */
4215 if((dataset = H5Dcreate2(file, DSET_SCALEOFFSET_INT_NAME, datatype,
4216 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
4217
4218 /* Initialize data */
4219 for(i= 0;i< (size_t)size[0]; i++)
4220 for(j = 0; j < (size_t)size[1]; j++) {
4221 orig_data[i][j] = HDrandom() % 10000;
4222
4223 /* even-numbered values are negtive */
4224 if((i*size[1]+j+1)%2 == 0)
4225 orig_data[i][j] = -orig_data[i][j];
4226 }
4227
4228 PASSED();
4229
4230 /*----------------------------------------------------------------------
4231 * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
4232 * to it.
4233 *----------------------------------------------------------------------
4234 */
4235 TESTING(" scaleoffset int without fill value (write)");
4236
4237 if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
4238 orig_data) < 0) goto error;
4239 PASSED();
4240
4241 /*----------------------------------------------------------------------
4242 * STEP 2: Try to read the data we just wrote.
4243 *----------------------------------------------------------------------
4244 */
4245 TESTING(" scaleoffset int without fill value (read)");
4246
4247 /* Read the dataset back */
4248 if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
4249 new_data) < 0) goto error;
4250
4251 /* Check that the values read are the same as the values written */
4252 for(i=0; i<(size_t)size[0]; i++) {
4253 for(j=0; j<(size_t)size[1]; j++) {
4254 if(new_data[i][j] != orig_data[i][j]) {
4255 H5_FAILED();
4256 printf(" Read different values than written.\n");
4257 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
4258 goto error;
4259 }
4260 }
4261 }
4262
4263 /*----------------------------------------------------------------------
4264 * Cleanup
4265 *----------------------------------------------------------------------
4266 */
4267 if(H5Tclose(datatype) < 0) goto error;
4268 if(H5Pclose(dc) < 0) goto error;
4269 if(H5Sclose(space) < 0) goto error;
4270 if(H5Dclose(dataset) < 0) goto error;
4271
4272 PASSED();
4273
4274 return 0;
4275 error:
4276 return -1;
4277 }
4278
4279
4280 /*-------------------------------------------------------------------------
4281 * Function: test_scaleoffset_int_2
4282 *
4283 * Purpose: Tests the integer datatype for scaleoffset filter
4284 * with fill value set
4285 *
4286 * Return: Success: 0
4287 *
4288 * Failure: -1
4289 *
4290 * Programmer: Xiaowen Wu
4291 * Tuesday, March 15th, 2005
4292 *
4293 *-------------------------------------------------------------------------
4294 */
4295 static herr_t
test_scaleoffset_int_2(hid_t file)4296 test_scaleoffset_int_2(hid_t file)
4297 {
4298 hid_t dataset, datatype, space, mspace, dc;
4299 const hsize_t size[2] = {2, 5};
4300 const hsize_t chunk_size[2] = {2,5};
4301 int orig_data[2][5];
4302 int new_data[2][5];
4303 hsize_t start[2]; /* Start of hyperslab */
4304 hsize_t stride[2]; /* Stride of hyperslab */
4305 hsize_t count[2]; /* Block count */
4306 hsize_t block[2]; /* Block sizes */
4307 int fillval;
4308 size_t j;
4309
4310 TESTING(" scaleoffset int with fill value (setup)");
4311
4312 datatype = H5Tcopy(H5T_NATIVE_INT);
4313
4314 /* Set order of dataset datatype */
4315 if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) goto error;
4316
4317 /* Create the data space for the dataset */
4318 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
4319
4320 /* Create the dataset property list */
4321 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
4322
4323 /* Set fill value */
4324 fillval = 10000;
4325 if(H5Pset_fill_value(dc, H5T_NATIVE_INT, &fillval) < 0) goto error;
4326
4327 /* Set up to use scaleoffset filter, let library calculate minbits */
4328 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
4329 if(H5Pset_scaleoffset(dc, H5Z_SO_INT,H5Z_SO_INT_MINBITS_DEFAULT) < 0) goto error;
4330
4331 /* Create the dataset */
4332 if((dataset = H5Dcreate2(file, DSET_SCALEOFFSET_INT_NAME_2, datatype,
4333 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
4334
4335 /* Create the memory data space */
4336 if((mspace = H5Screate_simple(2, size, NULL)) < 0) goto error;
4337
4338 /* Select hyperslab for data to write, using 1x5 blocks,
4339 * (1,1) stride and (1,1) count starting at the position (0,0).
4340 */
4341 start[0] = 0; start[1] = 0;
4342 stride[0] = 1; stride[1] = 1;
4343 count[0] = 1; count[1] = 1;
4344 block[0] = 1; block[1] = 5;
4345 if(H5Sselect_hyperslab(mspace, H5S_SELECT_SET, start,
4346 stride, count, block) < 0) goto error;
4347
4348 /* Initialize data of hyperslab */
4349 for(j = 0; j < (size_t)size[1]; j++) {
4350 orig_data[0][j] = (int)HDrandom() % 10000;
4351
4352 /* even-numbered values are negtive */
4353 if((j+1)%2 == 0)
4354 orig_data[0][j] = -orig_data[0][j];
4355 }
4356
4357 PASSED();
4358
4359 /*----------------------------------------------------------------------
4360 * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
4361 * to it.
4362 *----------------------------------------------------------------------
4363 */
4364 TESTING(" scaleoffset int with fill value (write)");
4365
4366 /* only data in the hyperslab will be written, other value should be fill value */
4367 if(H5Dwrite(dataset, H5T_NATIVE_INT, mspace, mspace, H5P_DEFAULT,
4368 orig_data) < 0) goto error;
4369 PASSED();
4370
4371 /*----------------------------------------------------------------------
4372 * STEP 2: Try to read the data we just wrote.
4373 *----------------------------------------------------------------------
4374 */
4375 TESTING(" scaleoffset int with fill value (read)");
4376
4377 /* Read the dataset back */
4378 if(H5Dread(dataset, H5T_NATIVE_INT, mspace, mspace, H5P_DEFAULT,
4379 new_data) < 0) goto error;
4380
4381 /* Check that the values read are the same as the values written */
4382 for(j=0; j<(size_t)size[1]; j++) {
4383 if(new_data[0][j] != orig_data[0][j]) {
4384 H5_FAILED();
4385 printf(" Read different values than written.\n");
4386 printf(" At index %lu,%lu\n", (unsigned long)0, (unsigned long)j);
4387 goto error;
4388 }
4389 }
4390
4391 /*----------------------------------------------------------------------
4392 * Cleanup
4393 *----------------------------------------------------------------------
4394 */
4395 if(H5Tclose(datatype) < 0) goto error;
4396 if(H5Pclose(dc) < 0) goto error;
4397 if(H5Sclose(space) < 0) goto error;
4398 if(H5Dclose(dataset) < 0) goto error;
4399
4400 PASSED();
4401
4402 return 0;
4403 error:
4404 return -1;
4405 }
4406
4407
4408 /*-------------------------------------------------------------------------
4409 * Function: test_scaleoffset_float
4410 *
4411 * Purpose: Tests the float datatype for scaleoffset filter, with fill
4412 * value undefined, using variable-minimum-bits method
4413 *
4414 * Return: Success: 0
4415 *
4416 * Failure: -1
4417 *
4418 * Programmer: Xiaowen Wu
4419 * Wednesday, Apr. 20th, 2005
4420 *
4421 *-------------------------------------------------------------------------
4422 */
4423 static herr_t
test_scaleoffset_float(hid_t file)4424 test_scaleoffset_float(hid_t file)
4425 {
4426 hid_t dataset, datatype, space, dc;
4427 const hsize_t size[2] = {2, 5};
4428 const hsize_t chunk_size[2] = {2,5};
4429 float orig_data[2][5];
4430 float new_data[2][5];
4431 size_t i, j;
4432
4433 TESTING(" scaleoffset float without fill value, D-scaling (setup)");
4434
4435 datatype = H5Tcopy(H5T_NATIVE_FLOAT);
4436
4437 /* Set order of dataset datatype */
4438 if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) goto error;
4439
4440 /* Create the data space */
4441 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
4442
4443 /* Create the dataset property list */
4444 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
4445
4446 /* Fill value undefined */
4447 if(H5Pset_fill_value(dc, datatype, NULL) < 0) goto error;
4448
4449 /* Set up to use scaleoffset filter, decimal scale factor is 3,
4450 * use variable-minimum-bits method
4451 */
4452 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
4453 if(H5Pset_scaleoffset(dc, H5Z_SO_FLOAT_DSCALE,3) < 0) goto error;
4454
4455 /* Create the dataset */
4456 if((dataset = H5Dcreate2(file, DSET_SCALEOFFSET_FLOAT_NAME, datatype,
4457 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
4458
4459 /* Initialize data */
4460 for(i= 0;i< (size_t)size[0]; i++)
4461 for(j = 0; j < (size_t)size[1]; j++) {
4462 orig_data[i][j] = (float)((HDrandom() % 100000) / 1000.0F);
4463
4464 /* even-numbered values are negtive */
4465 if((i*size[1]+j+1)%2 == 0)
4466 orig_data[i][j] = -orig_data[i][j];
4467 }
4468
4469 PASSED();
4470
4471 /*----------------------------------------------------------------------
4472 * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
4473 * to it.
4474 *----------------------------------------------------------------------
4475 */
4476 TESTING(" scaleoffset float without fill value, D-scaling (write)");
4477
4478 if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
4479 orig_data) < 0) goto error;
4480 PASSED();
4481
4482 /*----------------------------------------------------------------------
4483 * STEP 2: Try to read the data we just wrote.
4484 *----------------------------------------------------------------------
4485 */
4486 TESTING(" scaleoffset float without fill value, D-scaling (read)");
4487
4488 /* Read the dataset back */
4489 if(H5Dread(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT,
4490 new_data) < 0) goto error;
4491
4492 /* Check that the values read are the same as the values written */
4493 for(i=0; i<(size_t)size[0]; i++) {
4494 for(j=0; j<(size_t)size[1]; j++) {
4495 if(HDfabs(new_data[i][j]-orig_data[i][j]) > HDpow(10.0F, -3.0F)) {
4496 H5_FAILED();
4497 printf(" Read different values than written.\n");
4498 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
4499 goto error;
4500 }
4501 }
4502 }
4503
4504 /*----------------------------------------------------------------------
4505 * Cleanup
4506 *----------------------------------------------------------------------
4507 */
4508 if(H5Tclose(datatype) < 0) goto error;
4509 if(H5Pclose(dc) < 0) goto error;
4510 if(H5Sclose(space) < 0) goto error;
4511 if(H5Dclose(dataset) < 0) goto error;
4512
4513 PASSED();
4514
4515 return 0;
4516 error:
4517 return -1;
4518 }
4519
4520
4521 /*-------------------------------------------------------------------------
4522 * Function: test_scaleoffset_float_2
4523 *
4524 * Purpose: Tests the float datatype for scaleoffset filter, with fill
4525 * value set, using variable-minimum-bits method
4526 *
4527 * Return: Success: 0
4528 *
4529 * Failure: -1
4530 *
4531 * Programmer: Xiaowen Wu
4532 * Wednesday, Apr. 20th, 2005
4533 *
4534 *-------------------------------------------------------------------------
4535 */
4536 static herr_t
test_scaleoffset_float_2(hid_t file)4537 test_scaleoffset_float_2(hid_t file)
4538 {
4539 hid_t dataset, datatype, space, mspace, dc;
4540 const hsize_t size[2] = {2, 5};
4541 const hsize_t chunk_size[2] = {2,5};
4542 float orig_data[2][5];
4543 float new_data[2][5];
4544 float fillval;
4545 hsize_t start[2]; /* Start of hyperslab */
4546 hsize_t stride[2]; /* Stride of hyperslab */
4547 hsize_t count[2]; /* Block count */
4548 hsize_t block[2]; /* Block sizes */
4549 size_t j;
4550
4551 TESTING(" scaleoffset float with fill value, D-scaling (setup)");
4552
4553 datatype = H5Tcopy(H5T_NATIVE_FLOAT);
4554
4555 /* Set order of dataset datatype */
4556 if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) goto error;
4557
4558 /* Create the data space for the dataset */
4559 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
4560
4561 /* Create the dataset property list */
4562 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
4563
4564 /* Set fill value */
4565 fillval = 10000.0F;
4566 if(H5Pset_fill_value(dc, H5T_NATIVE_FLOAT, &fillval) < 0) goto error;
4567
4568 /* Set up to use scaleoffset filter, decimal scale factor is 3,
4569 * use variable-minimum-bits method
4570 */
4571 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
4572 if(H5Pset_scaleoffset(dc, H5Z_SO_FLOAT_DSCALE,3) < 0) goto error;
4573
4574 /* Create the dataset */
4575 if((dataset = H5Dcreate2(file, DSET_SCALEOFFSET_FLOAT_NAME_2, datatype,
4576 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
4577
4578 /* Create the memory data space */
4579 if((mspace = H5Screate_simple(2, size, NULL)) < 0) goto error;
4580
4581 /* Select hyperslab for data to write, using 1x5 blocks,
4582 * (1,1) stride and (1,1) count starting at the position (0,0).
4583 */
4584 start[0] = 0; start[1] = 0;
4585 stride[0] = 1; stride[1] = 1;
4586 count[0] = 1; count[1] = 1;
4587 block[0] = 1; block[1] = 5;
4588 if(H5Sselect_hyperslab(mspace, H5S_SELECT_SET, start,
4589 stride, count, block) < 0) goto error;
4590
4591 /* Initialize data of hyperslab */
4592 for(j = 0; j < (size_t)size[1]; j++) {
4593 orig_data[0][j] = (float)((HDrandom() % 100000) / 1000.0F);
4594
4595 /* even-numbered values are negtive */
4596 if((j+1)%2 == 0)
4597 orig_data[0][j] = -orig_data[0][j];
4598 }
4599
4600 PASSED();
4601
4602 /*----------------------------------------------------------------------
4603 * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
4604 * to it.
4605 *----------------------------------------------------------------------
4606 */
4607 TESTING(" scaleoffset float with fill value, D-scaling (write)");
4608
4609 /* only data in the hyperslab will be written, other value should be fill value */
4610 if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, mspace, mspace, H5P_DEFAULT,
4611 orig_data) < 0) goto error;
4612 PASSED();
4613
4614 /*----------------------------------------------------------------------
4615 * STEP 2: Try to read the data we just wrote.
4616 *----------------------------------------------------------------------
4617 */
4618 TESTING(" scaleoffset float with fill value, D-scaling (read)");
4619
4620 /* Read the dataset back */
4621 if(H5Dread(dataset, H5T_NATIVE_FLOAT, mspace, mspace, H5P_DEFAULT,
4622 new_data) < 0) goto error;
4623
4624 /* Check that the values read are the same as the values written */
4625 for(j=0; j<(size_t)size[1]; j++) {
4626 if(HDfabs(new_data[0][j]-orig_data[0][j]) > HDpow(10.0F, -3.0F)) {
4627 H5_FAILED();
4628 printf(" Read different values than written.\n");
4629 printf(" At index %lu,%lu\n", (unsigned long)0, (unsigned long)j);
4630 goto error;
4631 }
4632 }
4633 /*----------------------------------------------------------------------
4634 * Cleanup
4635 *----------------------------------------------------------------------
4636 */
4637 if(H5Tclose(datatype) < 0) goto error;
4638 if(H5Pclose(dc) < 0) goto error;
4639 if(H5Sclose(space) < 0) goto error;
4640 if(H5Dclose(dataset) < 0) goto error;
4641
4642 PASSED();
4643
4644 return 0;
4645 error:
4646 return -1;
4647 }
4648
4649
4650 /*-------------------------------------------------------------------------
4651 * Function: test_scaleoffset_double
4652 *
4653 * Purpose: Tests the double datatype for scaleoffset filter, with fill
4654 * value undefined, using variable-minimum-bits method
4655 *
4656 * Return: Success: 0
4657 *
4658 * Failure: -1
4659 *
4660 * Programmer: Xiaowen Wu
4661 * Monday, Apr. 25th, 2005
4662 *
4663 *-------------------------------------------------------------------------
4664 */
4665 static herr_t
test_scaleoffset_double(hid_t file)4666 test_scaleoffset_double(hid_t file)
4667 {
4668 hid_t dataset, datatype, space, dc;
4669 const hsize_t size[2] = {2, 5};
4670 const hsize_t chunk_size[2] = {2,5};
4671 double orig_data[2][5];
4672 double new_data[2][5];
4673 size_t i, j;
4674
4675 TESTING(" scaleoffset double without fill value, D-scaling (setup)");
4676
4677 datatype = H5Tcopy(H5T_NATIVE_DOUBLE);
4678
4679 /* Set order of dataset datatype */
4680 if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) goto error;
4681
4682 /* Create the data space */
4683 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
4684
4685 /* Create the dataset property list */
4686 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
4687
4688 /* Fill value undefined */
4689 if(H5Pset_fill_value(dc, datatype, NULL) < 0) goto error;
4690
4691 /* Set up to use scaleoffset filter, decimal scale factor is 7,
4692 * use variable-minimum-bits method
4693 */
4694 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
4695 if(H5Pset_scaleoffset(dc, H5Z_SO_FLOAT_DSCALE,7) < 0) goto error;
4696
4697 /* Create the dataset */
4698 if((dataset = H5Dcreate2(file, DSET_SCALEOFFSET_DOUBLE_NAME, datatype,
4699 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
4700
4701 /* Initialize data */
4702 for(i= 0;i< (size_t)size[0]; i++)
4703 for(j = 0; j < (size_t)size[1]; j++) {
4704 orig_data[i][j] = (HDrandom() % 10000000) / 10000000.0F;
4705
4706 /* even-numbered values are negtive */
4707 if((i*size[1]+j+1)%2 == 0)
4708 orig_data[i][j] = -orig_data[i][j];
4709 }
4710
4711 PASSED();
4712
4713 /*----------------------------------------------------------------------
4714 * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
4715 * to it.
4716 *----------------------------------------------------------------------
4717 */
4718 TESTING(" scaleoffset double without fill value, D-scaling (write)");
4719
4720 if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
4721 orig_data) < 0) goto error;
4722 PASSED();
4723
4724 /*----------------------------------------------------------------------
4725 * STEP 2: Try to read the data we just wrote.
4726 *----------------------------------------------------------------------
4727 */
4728 TESTING(" scaleoffset double without fill value, D-scaling (read)");
4729
4730 /* Read the dataset back */
4731 if(H5Dread(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT,
4732 new_data) < 0) goto error;
4733
4734 /* Check that the values read are the same as the values written */
4735 for(i=0; i<(size_t)size[0]; i++) {
4736 for(j=0; j<(size_t)size[1]; j++) {
4737 if(HDfabs(new_data[i][j]-orig_data[i][j]) > HDpow(10.0F, -7.0F)) {
4738 H5_FAILED();
4739 printf(" Read different values than written.\n");
4740 printf(" At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
4741 goto error;
4742 }
4743 }
4744 }
4745
4746 /*----------------------------------------------------------------------
4747 * Cleanup
4748 *----------------------------------------------------------------------
4749 */
4750 if(H5Tclose(datatype) < 0) goto error;
4751 if(H5Pclose(dc) < 0) goto error;
4752 if(H5Sclose(space) < 0) goto error;
4753 if(H5Dclose(dataset) < 0) goto error;
4754
4755 PASSED();
4756
4757 return 0;
4758 error:
4759 return -1;
4760 }
4761
4762
4763 /*-------------------------------------------------------------------------
4764 * Function: test_scaleoffset_double_2
4765 *
4766 * Purpose: Tests the double datatype for scaleoffset filter, with fill
4767 * value set, using variable-minimum-bits method
4768 *
4769 * Return: Success: 0
4770 *
4771 * Failure: -1
4772 *
4773 * Programmer: Xiaowen Wu
4774 * Monday, Apr. 25th, 2005
4775 *
4776 *-------------------------------------------------------------------------
4777 */
4778 static herr_t
test_scaleoffset_double_2(hid_t file)4779 test_scaleoffset_double_2(hid_t file)
4780 {
4781 hid_t dataset, datatype, space, mspace, dc;
4782 const hsize_t size[2] = {2, 5};
4783 const hsize_t chunk_size[2] = {2,5};
4784 double orig_data[2][5];
4785 double new_data[2][5];
4786 double fillval;
4787 hsize_t start[2]; /* Start of hyperslab */
4788 hsize_t stride[2]; /* Stride of hyperslab */
4789 hsize_t count[2]; /* Block count */
4790 hsize_t block[2]; /* Block sizes */
4791 size_t j;
4792
4793 TESTING(" scaleoffset double with fill value, D-scaling (setup)");
4794
4795 datatype = H5Tcopy(H5T_NATIVE_DOUBLE);
4796
4797 /* Set order of dataset datatype */
4798 if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) goto error;
4799
4800 /* Create the data space for the dataset */
4801 if((space = H5Screate_simple(2, size, NULL)) < 0) goto error;
4802
4803 /* Create the dataset property list */
4804 if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
4805
4806 /* Set fill value */
4807 fillval = 10000.0F;
4808 if(H5Pset_fill_value(dc, H5T_NATIVE_DOUBLE, &fillval) < 0) goto error;
4809
4810 /* Set up to use scaleoffset filter, decimal scale factor is 7,
4811 * use variable-minimum-bits method
4812 */
4813 if(H5Pset_chunk(dc, 2, chunk_size) < 0) goto error;
4814 if(H5Pset_scaleoffset(dc, H5Z_SO_FLOAT_DSCALE,7) < 0) goto error;
4815
4816 /* Create the dataset */
4817 if((dataset = H5Dcreate2(file, DSET_SCALEOFFSET_DOUBLE_NAME_2, datatype,
4818 space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
4819
4820 /* Create the memory data space */
4821 if((mspace = H5Screate_simple(2, size, NULL)) < 0) goto error;
4822
4823 /* Select hyperslab for data to write, using 1x5 blocks,
4824 * (1,1) stride and (1,1) count starting at the position (0,0).
4825 */
4826 start[0] = 0; start[1] = 0;
4827 stride[0] = 1; stride[1] = 1;
4828 count[0] = 1; count[1] = 1;
4829 block[0] = 1; block[1] = 5;
4830 if(H5Sselect_hyperslab(mspace, H5S_SELECT_SET, start,
4831 stride, count, block) < 0) goto error;
4832
4833 /* Initialize data of hyperslab */
4834 for(j = 0; j < (size_t)size[1]; j++) {
4835 orig_data[0][j] = (HDrandom() % 10000000) / 10000000.0F;
4836
4837 /* even-numbered values are negtive */
4838 if((j+1)%2 == 0)
4839 orig_data[0][j] = -orig_data[0][j];
4840 }
4841
4842 PASSED();
4843
4844 /*----------------------------------------------------------------------
4845 * STEP 1: Test scaleoffset by setting up a chunked dataset and writing
4846 * to it.
4847 *----------------------------------------------------------------------
4848 */
4849 TESTING(" scaleoffset double with fill value, D-scaling (write)");
4850
4851 /* only data in the hyperslab will be written, other value should be fill value */
4852 if(H5Dwrite(dataset, H5T_NATIVE_DOUBLE, mspace, mspace, H5P_DEFAULT,
4853 orig_data) < 0) goto error;
4854 PASSED();
4855
4856 /*----------------------------------------------------------------------
4857 * STEP 2: Try to read the data we just wrote.
4858 *----------------------------------------------------------------------
4859 */
4860 TESTING(" scaleoffset double with fill value, D-scaling (read)");
4861
4862 /* Read the dataset back */
4863 if(H5Dread(dataset, H5T_NATIVE_DOUBLE, mspace, mspace, H5P_DEFAULT,
4864 new_data) < 0) goto error;
4865
4866 /* Check that the values read are the same as the values written */
4867 for(j=0; j<(size_t)size[1]; j++) {
4868 if(HDfabs(new_data[0][j]-orig_data[0][j]) > HDpow(10.0F, -7.0F)) {
4869 H5_FAILED();
4870 printf(" Read different values than written.\n");
4871 printf(" At index %lu,%lu\n", (unsigned long)0, (unsigned long)j);
4872 goto error;
4873 }
4874 }
4875
4876 /*----------------------------------------------------------------------
4877 * Cleanup
4878 *----------------------------------------------------------------------
4879 */
4880 if(H5Tclose(datatype) < 0) goto error;
4881 if(H5Pclose(dc) < 0) goto error;
4882 if(H5Sclose(space) < 0) goto error;
4883 if(H5Dclose(dataset) < 0) goto error;
4884
4885 PASSED();
4886
4887 return 0;
4888 error:
4889 return -1;
4890 }
4891
4892
4893 /*-------------------------------------------------------------------------
4894 * Function: test_multiopen
4895 *
4896 * Purpose: Tests that a bug no longer exists. If a dataset is opened
4897 * twice and one of the handles is used to extend the dataset,
4898 * then the other handle should return the new size when
4899 * queried.
4900 *
4901 * Return: Success: 0
4902 *
4903 * Failure: -1
4904 *
4905 * Programmer: Robb Matzke
4906 * Tuesday, June 9, 1998
4907 *
4908 *-------------------------------------------------------------------------
4909 */
4910 static herr_t
test_multiopen(hid_t file)4911 test_multiopen (hid_t file)
4912 {
4913 hid_t dcpl = -1, space = -1, dset1 = -1, dset2 = -1;
4914 hsize_t cur_size[1] = {10};
4915 static hsize_t max_size[1] = {H5S_UNLIMITED};
4916 hsize_t tmp_size[1];
4917
4918 TESTING("multi-open with extending");
4919
4920 /* Create the dataset and open it twice */
4921 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
4922 if(H5Pset_chunk(dcpl, 1, cur_size) < 0) goto error;
4923 if((space = H5Screate_simple(1, cur_size, max_size)) < 0) goto error;
4924 if((dset1 = H5Dcreate2(file, "multiopen", H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error;
4925 if((dset2 = H5Dopen2(dset1, ".", H5P_DEFAULT)) < 0) goto error;
4926 if(H5Sclose(space) < 0) goto error;
4927
4928 /* Extend with the first handle */
4929 cur_size[0] = 20;
4930 if(H5Dset_extent(dset1, cur_size) < 0) goto error;
4931
4932 /* Get the size from the second handle */
4933 if((space = H5Dget_space(dset2)) < 0) goto error;
4934 if(H5Sget_simple_extent_dims(space, tmp_size, NULL) < 0) goto error;
4935 if(cur_size[0] != tmp_size[0]) {
4936 H5_FAILED();
4937 printf(" Got %d instead of %d!\n", (int)tmp_size[0], (int)cur_size[0]);
4938 goto error;
4939 } /* end if */
4940
4941 if(H5Dclose(dset1) < 0) goto error;
4942 if(H5Dclose(dset2) < 0) goto error;
4943 if(H5Sclose(space) < 0) goto error;
4944 if(H5Pclose(dcpl) < 0) goto error;
4945
4946 PASSED();
4947 return 0;
4948
4949 error:
4950 H5E_BEGIN_TRY {
4951 H5Dclose(dset1);
4952 H5Dclose(dset2);
4953 H5Sclose(space);
4954 H5Pclose(dcpl);
4955 } H5E_END_TRY;
4956 return -1;
4957 }
4958
4959
4960 /*-------------------------------------------------------------------------
4961 * Function: test_types
4962 *
4963 * Purpose: Make some datasets with various types so we can test h5ls.
4964 *
4965 * Return: Success: 0
4966 *
4967 * Failure: -1
4968 *
4969 * Programmer: Robb Matzke
4970 * Monday, June 7, 1999
4971 *
4972 *-------------------------------------------------------------------------
4973 */
4974 static herr_t
test_types(hid_t file)4975 test_types(hid_t file)
4976 {
4977 hid_t grp=-1, type=-1, space=-1, dset=-1;
4978 size_t i;
4979 hsize_t nelmts;
4980 unsigned char buf[32];
4981
4982 TESTING("various datatypes");
4983 if((grp = H5Gcreate2(file, "typetests", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
4984
4985 /* bitfield_1 */
4986 nelmts = sizeof(buf);
4987 if((type=H5Tcopy(H5T_STD_B8LE)) < 0 ||
4988 (space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
4989 (dset=H5Dcreate2(grp, "bitfield_1", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
4990 goto error;
4991 for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i;
4992 if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
4993 goto error;
4994
4995 if(H5Sclose(space) < 0) goto error;
4996 if(H5Tclose(type) < 0) goto error;
4997 if(H5Dclose(dset) < 0) goto error;
4998
4999 /* bitfield_2 */
5000 nelmts = sizeof(buf)/2;
5001 if((type=H5Tcopy(H5T_STD_B16LE)) < 0 ||
5002 (space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
5003 (dset=H5Dcreate2(grp, "bitfield_2", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
5004 goto error;
5005 for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i;
5006 if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
5007 goto error;
5008 if(H5Sclose(space) < 0) goto error;
5009 if(H5Tclose(type) < 0) goto error;
5010 if(H5Dclose(dset) < 0) goto error;
5011
5012 /* opaque_1 */
5013 nelmts = sizeof(buf);
5014 if((type = H5Tcreate(H5T_OPAQUE, (size_t)1)) < 0 ||
5015 H5Tset_tag(type, "testing 1-byte opaque type") < 0 ||
5016 (space = H5Screate_simple(1, &nelmts, NULL)) < 0 ||
5017 (dset = H5Dcreate2(grp, "opaque_1", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
5018 goto error;
5019 for(i = 0; i < sizeof buf; i++)
5020 buf[i] = (unsigned char)0xff ^ (unsigned char)i;
5021 if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;
5022 if(H5Sclose(space) < 0) goto error;
5023 if(H5Tclose(type) < 0) goto error;
5024 if(H5Dclose(dset) < 0) goto error;
5025
5026 /* opaque_2 */
5027 nelmts = sizeof(buf)/4;
5028 if((type = H5Tcreate(H5T_OPAQUE, (size_t)4)) < 0 ||
5029 H5Tset_tag(type, "testing 4-byte opaque type") < 0 ||
5030 (space = H5Screate_simple(1, &nelmts, NULL)) < 0 ||
5031 (dset = H5Dcreate2(grp, "opaque_2", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
5032 goto error;
5033 for(i = 0; i < sizeof buf; i++)
5034 buf[i] = (unsigned char)0xff ^ (unsigned char)i;
5035 if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;
5036 if(H5Sclose(space) < 0) goto error;
5037 if(H5Tclose(type) < 0) goto error;
5038 if(H5Dclose(dset) < 0) goto error;
5039
5040 /* Cleanup */
5041 if(H5Gclose(grp) < 0) goto error;
5042 PASSED();
5043 return 0;
5044
5045 error:
5046 H5E_BEGIN_TRY {
5047 H5Gclose(grp);
5048 H5Tclose(type);
5049 H5Sclose(space);
5050 H5Dclose(dset);
5051 } H5E_END_TRY;
5052 return -1;
5053 }
5054
5055 /* This message derives from H5Z */
5056 const H5Z_class2_t H5Z_CAN_APPLY_TEST[1] = {{
5057 H5Z_CLASS_T_VERS,
5058 H5Z_FILTER_CAN_APPLY_TEST, /* Filter id number */
5059 1, 1,
5060 "can_apply_test", /* Filter name for debugging */
5061 can_apply_bogus, /* The "can apply" callback */
5062 NULL, /* The "set local" callback */
5063 filter_bogus, /* The actual filter function */
5064 }};
5065
5066
5067 /*-------------------------------------------------------------------------
5068 * Function: test_can_apply
5069 *
5070 * Purpose: Tests library behavior when filter indicates it can't
5071 * apply to certain combinations of creation parameters.
5072 * The filter is mandate. If the CAN_APPLY callback function
5073 * indicates wrong datatype, the dataset creation should fail.
5074 *
5075 * Return: Success: 0
5076 * Failure: -1
5077 *
5078 * Programmer: Quincey Koziol
5079 * Friday, April 5, 2003
5080 *
5081 *-------------------------------------------------------------------------
5082 */
5083 static herr_t
test_can_apply(hid_t file)5084 test_can_apply(hid_t file)
5085 {
5086 hid_t dsid; /* Dataset ID */
5087 hid_t sid; /* Dataspace ID */
5088 hid_t dcpl; /* Dataspace creation property list ID */
5089 const hsize_t dims[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */
5090 const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */
5091 hsize_t dset_size; /* Dataset size */
5092 size_t i,j; /* Local index variables */
5093
5094 TESTING("dataset filter 'can apply' callback");
5095
5096 /* Create dcpl with special filter */
5097 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
5098 H5_FAILED();
5099 printf(" Line %d: Can't create dcpl\n",__LINE__);
5100 goto error;
5101 } /* end if */
5102 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) {
5103 H5_FAILED();
5104 printf(" Line %d: Can't set chunk sizes\n",__LINE__);
5105 goto error;
5106 } /* end if */
5107 if(H5Zregister (H5Z_CAN_APPLY_TEST) < 0) {
5108 H5_FAILED();
5109 printf(" Line %d: Can't register 'can apply' filter\n",__LINE__);
5110 goto error;
5111 }
5112 /* The filter is mandate. */
5113 if(H5Pset_filter(dcpl, H5Z_FILTER_CAN_APPLY_TEST, 0, (size_t)0, NULL) < 0) {
5114 H5_FAILED();
5115 printf(" Line %d: Can't set bogus filter\n",__LINE__);
5116 goto error;
5117 }
5118
5119 /* Create the data space */
5120 if((sid = H5Screate_simple(2, dims, NULL)) < 0) {
5121 H5_FAILED();
5122 printf(" Line %d: Can't open dataspace\n",__LINE__);
5123 goto error;
5124 } /* end if */
5125
5126 /* Create new dataset */
5127 /* (Should fail because the 'can apply' function should indicate inappropriate
5128 * combination. And the filter is mandate.) */
5129 H5E_BEGIN_TRY {
5130 dsid = H5Dcreate2(file, DSET_CAN_APPLY_NAME, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
5131 } H5E_END_TRY;
5132 if(dsid >=0) {
5133 H5_FAILED();
5134 printf(" Line %d: Shouldn't have created dataset!\n",__LINE__);
5135 H5Dclose(dsid);
5136 goto error;
5137 } /* end if */
5138
5139 /* (Should fail because the 'can apply' function should fail) */
5140 H5E_BEGIN_TRY {
5141 dsid = H5Dcreate2(file, DSET_CAN_APPLY_NAME, H5T_NATIVE_FLOAT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
5142 } H5E_END_TRY;
5143 if(dsid >=0) {
5144 H5_FAILED();
5145 printf(" Line %d: Shouldn't have created dataset!\n",__LINE__);
5146 H5Dclose(dsid);
5147 goto error;
5148 } /* end if */
5149
5150 /* Create new dataset */
5151 if((dsid = H5Dcreate2(file, DSET_CAN_APPLY_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) {
5152 H5_FAILED();
5153 printf(" Line %d: Can't create dataset\n",__LINE__);
5154 goto error;
5155 } /* end if */
5156
5157 /* Write data */
5158 if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
5159 H5_FAILED();
5160 printf(" Line %d: Error writing dataset data\n",__LINE__);
5161 goto error;
5162 } /* end if */
5163
5164 /* Flush the file (to clear the cache) */
5165 if(H5Fflush(file, H5F_SCOPE_GLOBAL) < 0) {
5166 H5_FAILED();
5167 printf(" Line %d: Error flushing file\n",__LINE__);
5168 goto error;
5169 } /* end if */
5170
5171 /* Query the dataset's size on disk */
5172 if((dset_size=H5Dget_storage_size(dsid))==0) {
5173 H5_FAILED();
5174 printf(" Line %d: Error querying dataset size\n",__LINE__);
5175 goto error;
5176 } /* end if */
5177
5178 /* Verify that the size indicates data is uncompressed */
5179 if((H5Tget_size(H5T_NATIVE_INT)*dims[0]*dims[1])!=dset_size) {
5180 H5_FAILED();
5181 printf(" Line %d: Incorrect dataset size: %lu\n",__LINE__,(unsigned long)dset_size);
5182 goto error;
5183 } /* end if */
5184
5185 /* Read data */
5186 if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
5187 H5_FAILED();
5188 printf(" Line %d: Error reading dataset data\n",__LINE__);
5189 goto error;
5190 } /* end if */
5191
5192 /* Compare data */
5193 /* Check that the values read are the same as the values written */
5194 for(i=0; i<(size_t)dims[0]; i++) {
5195 for(j=0; j<(size_t)dims[1]; j++) {
5196 if(points[i][j] != check[i][j]) {
5197 H5_FAILED();
5198 printf(" Line %d: Read different values than written.\n",__LINE__);
5199 printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
5200 printf(" At original: %d\n",points[i][j]);
5201 printf(" At returned: %d\n",check[i][j]);
5202 goto error;
5203 } /* end if */
5204 } /* end for */
5205 } /* end for */
5206
5207 /* Close dataset */
5208 if(H5Dclose(dsid) < 0) {
5209 H5_FAILED();
5210 printf(" Line %d: Can't close dataset\n",__LINE__);
5211 goto error;
5212 } /* end if */
5213
5214 /* Close dataspace */
5215 if(H5Sclose(sid) < 0) {
5216 H5_FAILED();
5217 printf(" Line %d: Can't close dataspace\n",__LINE__);
5218 goto error;
5219 } /* end if */
5220
5221 /* Close dataset creation property list */
5222 if(H5Pclose(dcpl) < 0) {
5223 H5_FAILED();
5224 printf(" Line %d: Can't close dcpl\n",__LINE__);
5225 goto error;
5226 } /* end if */
5227
5228
5229 PASSED();
5230 return 0;
5231
5232 error:
5233 return -1;
5234 } /* end test_can_apply() */
5235
5236 /* This message derives from H5Z */
5237 const H5Z_class2_t H5Z_CAN_APPLY_TEST2[1] = {{
5238 H5Z_CLASS_T_VERS,
5239 H5Z_FILTER_CAN_APPLY_TEST2, /* Filter id number */
5240 1, 1,
5241 "can_apply_test", /* Filter name for debugging */
5242 can_apply_bogus, /* The "can apply" callback */
5243 NULL, /* The "set local" callback */
5244 filter_bogus3, /* The actual filter function */
5245 }};
5246
5247
5248 /*-------------------------------------------------------------------------
5249 * Function: test_can_apply2
5250 *
5251 * Purpose: Tests library behavior when an optional filter indicates
5252 * it can't apply to certain combinations of creation
5253 * parameters. The filter function FILTER_BOGUS3 does nothing
5254 * than returning a failure. Because the filter is optional,
5255 * the library skips the filter even though the CAN_APPLY_BOGUS
5256 * indicates the datatype DOUBLE can't apply to the dataset.
5257 *
5258 * Return: Success: 0
5259 * Failure: -1
5260 *
5261 * Programmer: Raymond Lu
5262 * 4 August 2010
5263 *
5264 *-------------------------------------------------------------------------
5265 */
5266 static herr_t
test_can_apply2(hid_t file)5267 test_can_apply2(hid_t file)
5268 {
5269 hid_t dsid; /* Dataset ID */
5270 hid_t sid; /* Dataspace ID */
5271 hid_t dcpl; /* Dataspace creation property list ID */
5272 const hsize_t dims[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */
5273 const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */
5274 hsize_t dset_size; /* Dataset size */
5275 size_t i,j; /* Local index variables */
5276
5277 TESTING("dataset filter 'can apply' callback second");
5278
5279 /* Create dcpl with special filter */
5280 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
5281 H5_FAILED();
5282 printf(" Line %d: Can't create dcpl\n",__LINE__);
5283 goto error;
5284 } /* end if */
5285 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) {
5286 H5_FAILED();
5287 printf(" Line %d: Can't set chunk sizes\n",__LINE__);
5288 goto error;
5289 } /* end if */
5290 if(H5Zregister (H5Z_CAN_APPLY_TEST2) < 0) {
5291 H5_FAILED();
5292 printf(" Line %d: Can't register 'can apply' filter\n",__LINE__);
5293 goto error;
5294 }
5295 /* The filter is optional. */
5296 if(H5Pset_filter(dcpl, H5Z_FILTER_CAN_APPLY_TEST2, H5Z_FLAG_OPTIONAL, (size_t)0, NULL) < 0) {
5297 H5_FAILED();
5298 printf(" Line %d: Can't set bogus filter\n",__LINE__);
5299 goto error;
5300 }
5301
5302 /* Create the data space */
5303 if((sid = H5Screate_simple(2, dims, NULL)) < 0) {
5304 H5_FAILED();
5305 printf(" Line %d: Can't open dataspace\n",__LINE__);
5306 goto error;
5307 } /* end if */
5308
5309 /* Create new dataset */
5310 if((dsid = H5Dcreate2(file, DSET_CAN_APPLY_NAME2, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) {
5311 H5_FAILED();
5312 printf(" Line %d: Can't create dataset\n",__LINE__);
5313 goto error;
5314 } /* end if */
5315
5316 /* Write data */
5317 if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
5318 H5_FAILED();
5319 printf(" Line %d: Error writing dataset data\n",__LINE__);
5320 goto error;
5321 } /* end if */
5322
5323 /* Flush the file (to clear the cache) */
5324 if(H5Fflush(file, H5F_SCOPE_GLOBAL) < 0) {
5325 H5_FAILED();
5326 printf(" Line %d: Error flushing file\n",__LINE__);
5327 goto error;
5328 } /* end if */
5329
5330 /* Query the dataset's size on disk */
5331 if((dset_size=H5Dget_storage_size(dsid))==0) {
5332 H5_FAILED();
5333 printf(" Line %d: Error querying dataset size\n",__LINE__);
5334 goto error;
5335 } /* end if */
5336
5337 /* Verify that the size indicates data is uncompressed */
5338 if((H5Tget_size(H5T_NATIVE_DOUBLE)*dims[0]*dims[1])!=dset_size) {
5339 H5_FAILED();
5340 printf(" Line %d: Incorrect dataset size: %lu\n",__LINE__,(unsigned long)dset_size);
5341 goto error;
5342 } /* end if */
5343
5344 /* Read data */
5345 if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
5346 H5_FAILED();
5347 printf(" Line %d: Error reading dataset data\n",__LINE__);
5348 goto error;
5349 } /* end if */
5350
5351 /* Compare data */
5352 /* Check that the values read are the same as the values written */
5353 for(i=0; i<(size_t)dims[0]; i++) {
5354 for(j=0; j<(size_t)dims[1]; j++) {
5355 if(points[i][j] != check[i][j]) {
5356 H5_FAILED();
5357 printf(" Line %d: Read different values than written.\n",__LINE__);
5358 printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
5359 printf(" At original: %d\n",points[i][j]);
5360 printf(" At returned: %d\n",check[i][j]);
5361 goto error;
5362 } /* end if */
5363 } /* end for */
5364 } /* end for */
5365
5366 /* Close dataset */
5367 if(H5Dclose(dsid) < 0) {
5368 H5_FAILED();
5369 printf(" Line %d: Can't close dataset\n",__LINE__);
5370 goto error;
5371 } /* end if */
5372
5373 /* Close dataspace */
5374 if(H5Sclose(sid) < 0) {
5375 H5_FAILED();
5376 printf(" Line %d: Can't close dataspace\n",__LINE__);
5377 goto error;
5378 } /* end if */
5379
5380 /* Close dataset creation property list */
5381 if(H5Pclose(dcpl) < 0) {
5382 H5_FAILED();
5383 printf(" Line %d: Can't close dcpl\n",__LINE__);
5384 goto error;
5385 } /* end if */
5386
5387
5388 PASSED();
5389 return 0;
5390
5391 error:
5392 return -1;
5393 } /* end test_can_apply2() */
5394
5395
5396
5397 /*-------------------------------------------------------------------------
5398 * Function: test_can_apply_szip
5399 *
5400 * Purpose: Tests library behavior when szip filter indicates it can't
5401 * apply to certain combinations of creation parameters
5402 *
5403 * Return: Success: 0
5404 * Failure: -1
5405 *
5406 * Programmer: Quincey Koziol
5407 * Monday, April 7, 2003
5408 *
5409 *-------------------------------------------------------------------------
5410 */
5411 static herr_t
test_can_apply_szip(hid_t H5_ATTR_UNUSED file)5412 test_can_apply_szip(hid_t
5413 #ifndef H5_HAVE_FILTER_SZIP
5414 H5_ATTR_UNUSED
5415 #endif /* H5_HAVE_FILTER_SZIP */
5416 file)
5417 {
5418 #ifdef H5_HAVE_FILTER_SZIP
5419 hid_t dsid; /* Dataset ID */
5420 hid_t sid; /* Dataspace ID */
5421 hid_t dcpl; /* Dataspace creation property list ID */
5422 unsigned szip_options_mask=H5_SZIP_NN_OPTION_MASK;
5423 unsigned szip_pixels_per_block;
5424 const hsize_t dims[2] = {500, 4096}; /* Dataspace dimensions */
5425 const hsize_t dims2[2] = {4, 2}; /* Dataspace dimensions */
5426 const hsize_t chunk_dims[2] = {250, 2048}; /* Chunk dimensions */
5427 const hsize_t chunk_dims2[2] = {2, 1}; /* Chunk dimensions */
5428 herr_t ret; /* Status value */
5429 #endif /* H5_HAVE_FILTER_SZIP */
5430
5431 TESTING("dataset szip filter 'can apply' callback");
5432
5433 #ifdef H5_HAVE_FILTER_SZIP
5434
5435 if(h5_szip_can_encode() == 1) {
5436 /* Create the data space */
5437 if((sid = H5Screate_simple(2, dims, NULL)) < 0) {
5438 H5_FAILED();
5439 printf(" Line %d: Can't open dataspace\n",__LINE__);
5440 goto error;
5441 } /* end if */
5442
5443 /* Create dcpl with special filter */
5444 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
5445 H5_FAILED();
5446 printf(" Line %d: Can't create dcpl\n",__LINE__);
5447 goto error;
5448 } /* end if */
5449 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) {
5450 H5_FAILED();
5451 printf(" Line %d: Can't set chunk sizes\n",__LINE__);
5452 goto error;
5453 } /* end if */
5454
5455 /* Set (invalid at property set time) szip parameters */
5456 szip_pixels_per_block=3;
5457 H5E_BEGIN_TRY {
5458 ret=H5Pset_szip (dcpl, szip_options_mask, szip_pixels_per_block);
5459 } H5E_END_TRY;
5460 if(ret>=0) {
5461 H5_FAILED();
5462 printf(" Line %d: Shouldn't be able to set szip filter\n",__LINE__);
5463 goto error;
5464 }
5465
5466 /* Set (invalid at property set time) szip parameters */
5467 szip_pixels_per_block=512;
5468 H5E_BEGIN_TRY {
5469 ret=H5Pset_szip (dcpl, szip_options_mask, szip_pixels_per_block);
5470 } H5E_END_TRY;
5471 if(ret>=0) {
5472 H5_FAILED();
5473 printf(" Line %d: Shouldn't be able to set szip filter\n",__LINE__);
5474 goto error;
5475 }
5476
5477 /* Set (invalid at dataset creation time) szip parameters */
5478 szip_pixels_per_block=2;
5479 if(H5Pset_szip (dcpl, szip_options_mask, szip_pixels_per_block) < 0) {
5480 H5_FAILED();
5481 printf(" Line %d: Can't set szip filter\n",__LINE__);
5482 goto error;
5483 }
5484
5485 /* Create new dataset */
5486 /* (Should succeed; according to the new algorithm, scanline should be reset
5487 to 2*128 satisfying 'maximum blocks per scanline' condition) */
5488 H5E_BEGIN_TRY {
5489 dsid = H5Dcreate2(file, DSET_CAN_APPLY_SZIP_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
5490 } H5E_END_TRY;
5491 if(dsid <=0) {
5492 H5_FAILED();
5493 printf(" Line %d: Should have created dataset!\n",__LINE__);
5494 goto error;
5495 } /* end if */
5496
5497 /* Close dataset */
5498 if(H5Dclose(dsid) < 0) {
5499 H5_FAILED();
5500 printf(" Line %d: Can't close dataset\n",__LINE__);
5501 goto error;
5502 } /* end if */
5503
5504 /* Close dataspace */
5505 if(H5Sclose(sid) < 0) {
5506 H5_FAILED();
5507 printf(" Line %d: Can't close dataspace\n",__LINE__);
5508 goto error;
5509 } /* end if */
5510
5511 /* Close dataset creation property list */
5512 if(H5Pclose(dcpl) < 0) {
5513 H5_FAILED();
5514 printf(" Line %d: Can't close dcpl\n",__LINE__);
5515 goto error;
5516 } /* end if */
5517
5518 /* Create another data space */
5519 if((sid = H5Screate_simple(2, dims2, NULL)) < 0) {
5520 H5_FAILED();
5521 printf(" Line %d: Can't open dataspace\n",__LINE__);
5522 goto error;
5523 } /* end if */
5524
5525 /* Create dcpl with special filter */
5526 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
5527 H5_FAILED();
5528 printf(" Line %d: Can't create dcpl\n",__LINE__);
5529 goto error;
5530 } /* end if */
5531 if(H5Pset_chunk(dcpl, 2, chunk_dims2) < 0) {
5532 H5_FAILED();
5533 printf(" Line %d: Can't set chunk sizes\n",__LINE__);
5534 goto error;
5535 } /* end if */
5536
5537 /* Set (invalid at dataset creation time) szip parameters */
5538 szip_pixels_per_block=32;
5539 if(H5Pset_szip (dcpl, szip_options_mask, szip_pixels_per_block) < 0) {
5540 H5_FAILED();
5541 printf(" Line %d: Can't set szip filter\n",__LINE__);
5542 goto error;
5543 }
5544
5545 /* Create new dataset */
5546 /* (Should fail because the 'can apply' filter should indicate inappropriate combination) */
5547 H5E_BEGIN_TRY {
5548 dsid = H5Dcreate2(file, DSET_CAN_APPLY_SZIP_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
5549 } H5E_END_TRY;
5550 if(dsid >=0) {
5551 H5_FAILED();
5552 printf(" Line %d: Shouldn't have created dataset!\n",__LINE__);
5553 H5Dclose(dsid);
5554 goto error;
5555 } /* end if */
5556
5557 /* Close dataspace */
5558 if(H5Sclose(sid) < 0) {
5559 H5_FAILED();
5560 printf(" Line %d: Can't close dataspace\n",__LINE__);
5561 goto error;
5562 } /* end if */
5563
5564 /* Close dataset creation property list */
5565 if(H5Pclose(dcpl) < 0) {
5566 H5_FAILED();
5567 printf(" Line %d: Can't close dcpl\n",__LINE__);
5568 goto error;
5569 } /* end if */
5570
5571
5572 PASSED();
5573 } else {
5574 SKIPPED();
5575 puts(" Szip encoding is not enabled.");
5576 }
5577 #else /* H5_HAVE_FILTER_SZIP */
5578 SKIPPED();
5579 puts(" Szip filter is not enabled.");
5580 #endif /* H5_HAVE_FILTER_SZIP */
5581 return 0;
5582
5583 #ifdef H5_HAVE_FILTER_SZIP
5584 error:
5585 return -1;
5586 #endif /* H5_HAVE_FILTER_SZIP */
5587 } /* end test_can_apply_szip() */
5588
5589
5590 /* This message derives from H5Z */
5591 const H5Z_class2_t H5Z_SET_LOCAL_TEST[1] = {{
5592 H5Z_CLASS_T_VERS,
5593 H5Z_FILTER_SET_LOCAL_TEST, /* Filter id number */
5594 1, 1,
5595 "set_local_test", /* Filter name for debugging */
5596 NULL, /* The "can apply" callback */
5597 set_local_bogus2, /* The "set local" callback */
5598 filter_bogus2, /* The actual filter function */
5599 }};
5600
5601
5602 /*-------------------------------------------------------------------------
5603 * Function: test_set_local
5604 *
5605 * Purpose: Tests library behavior for "set local" filter callback
5606 *
5607 * Return: Success: 0
5608 * Failure: -1
5609 *
5610 * Programmer: Quincey Koziol
5611 * Monday, April 7, 2003
5612 *
5613 *-------------------------------------------------------------------------
5614 */
5615 static herr_t
test_set_local(hid_t fapl)5616 test_set_local(hid_t fapl)
5617 {
5618 char filename[FILENAME_BUF_SIZE];
5619 hid_t file; /* File ID */
5620 hid_t dsid; /* Dataset ID */
5621 hid_t sid; /* Dataspace ID */
5622 hid_t dcpl; /* Dataspace creation property list ID */
5623 const hsize_t dims[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */
5624 const hsize_t chunk_dims[2] = {2, 25}; /* Chunk dimensions */
5625 hsize_t dset_size; /* Dataset size */
5626 unsigned cd_values[2]={BOGUS2_PARAM_1, BOGUS2_PARAM_2}; /* Parameters for Bogus2 filter */
5627 size_t i,j; /* Local index variables */
5628 double n; /* Local index variables */
5629
5630 TESTING("dataset filter 'set local' callback");
5631
5632 h5_fixname(FILENAME[5], fapl, filename, sizeof filename);
5633
5634 /* Initialize the integer & floating-point dataset */
5635 n=1.0F;
5636 for(i = 0; i < DSET_DIM1; i++)
5637 for(j = 0; j < DSET_DIM2; j++) {
5638 points[i][j] = (int)n++;
5639 points_dbl[i][j] = (double)1.5F*n++;
5640 }
5641
5642 /* Open file */
5643 if((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) {
5644 H5_FAILED();
5645 printf(" Line %d: Can't open file\n",__LINE__);
5646 goto error;
5647 }
5648
5649 /* Create dcpl with special filter */
5650 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) {
5651 H5_FAILED();
5652 printf(" Line %d: Can't create dcpl\n",__LINE__);
5653 goto error;
5654 } /* end if */
5655 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) {
5656 H5_FAILED();
5657 printf(" Line %d: Can't set chunk sizes\n",__LINE__);
5658 goto error;
5659 } /* end if */
5660 if(H5Zregister (H5Z_SET_LOCAL_TEST) < 0) {
5661 H5_FAILED();
5662 printf(" Line %d: Can't register 'set local' filter\n",__LINE__);
5663 goto error;
5664 }
5665 if(H5Pset_filter(dcpl, H5Z_FILTER_SET_LOCAL_TEST, 0, (size_t)BOGUS2_PERM_NPARMS, cd_values) < 0) {
5666 H5_FAILED();
5667 printf(" Line %d: Can't set bogus2 filter\n",__LINE__);
5668 goto error;
5669 }
5670
5671 /* Create the data space */
5672 if((sid = H5Screate_simple(2, dims, NULL)) < 0) {
5673 H5_FAILED();
5674 printf(" Line %d: Can't open dataspace\n",__LINE__);
5675 goto error;
5676 } /* end if */
5677
5678 /* Create new dataset */
5679 if((dsid = H5Dcreate2(file, DSET_SET_LOCAL_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) {
5680 H5_FAILED();
5681 printf(" Line %d: Can't create dataset\n",__LINE__);
5682 goto error;
5683 } /* end if */
5684
5685 /* Write data */
5686 if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
5687 H5_FAILED();
5688 printf(" Line %d: Error writing dataset data\n",__LINE__);
5689 goto error;
5690 } /* end if */
5691
5692 /* Close dataset */
5693 if(H5Dclose(dsid) < 0) {
5694 H5_FAILED();
5695 printf(" Line %d: Can't close dataset\n",__LINE__);
5696 goto error;
5697 } /* end if */
5698
5699 /* Create new dataset */
5700 /* (Shouldn't get modified by output filter) */
5701 if((dsid = H5Dcreate2(file, DSET_SET_LOCAL_NAME_2, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) {
5702 H5_FAILED();
5703 printf(" Line %d: Can't create dataset\n",__LINE__);
5704 goto error;
5705 } /* end if */
5706
5707 /* Write data */
5708 if(H5Dwrite(dsid, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_dbl) < 0) {
5709 H5_FAILED();
5710 printf(" Line %d: Error writing dataset data\n",__LINE__);
5711 goto error;
5712 } /* end if */
5713
5714 /* Close dataset */
5715 if(H5Dclose(dsid) < 0) {
5716 H5_FAILED();
5717 printf(" Line %d: Can't close dataset\n",__LINE__);
5718 goto error;
5719 } /* end if */
5720
5721 /* Close dataspace */
5722 if(H5Sclose(sid) < 0) {
5723 H5_FAILED();
5724 printf(" Line %d: Can't close dataspace\n", __LINE__);
5725 goto error;
5726 } /* end if */
5727
5728 /* Close dataset creation property list */
5729 if(H5Pclose(dcpl) < 0) {
5730 H5_FAILED();
5731 printf(" Line %d: Can't close dcpl\n", __LINE__);
5732 goto error;
5733 } /* end if */
5734
5735 /* Close file (flushes & empties cache) */
5736 if(H5Fclose(file) < 0) {
5737 H5_FAILED();
5738 printf(" Line %d: Can't close file\n", __LINE__);
5739 goto error;
5740 } /* end if */
5741
5742 /* Open file */
5743 if((file = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) {
5744 H5_FAILED();
5745 printf(" Line %d: Can't open file\n", __LINE__);
5746 goto error;
5747 }
5748
5749 /* Re-open dataset */
5750 if((dsid = H5Dopen2(file, DSET_SET_LOCAL_NAME, H5P_DEFAULT)) < 0) {
5751 H5_FAILED();
5752 printf(" Line %d: Can't open dataset\n", __LINE__);
5753 goto error;
5754 } /* end if */
5755
5756 /* Query the dataset's size on disk */
5757 if((dset_size = H5Dget_storage_size(dsid)) == 0) {
5758 H5_FAILED();
5759 printf(" Line %d: Error querying dataset size\n", __LINE__);
5760 goto error;
5761 } /* end if */
5762
5763 /* Verify that the size indicates data is uncompressed */
5764 if((H5Tget_size(H5T_NATIVE_INT) * dims[0] * dims[1]) != dset_size) {
5765 H5_FAILED();
5766 printf(" Line %d: Incorrect dataset size: %lu\n", __LINE__, (unsigned long)dset_size);
5767 goto error;
5768 } /* end if */
5769
5770 /* Read data */
5771 if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
5772 H5_FAILED();
5773 printf(" Line %d: Error reading dataset data\n", __LINE__);
5774 goto error;
5775 } /* end if */
5776
5777 /* Compare data */
5778 /* Check that the values read are the modified version of what was written */
5779 for(i=0; i<dims[0]; i++) {
5780 for(j=0; j<dims[1]; j++) {
5781 if((points[i][j]+(int)sizeof(int)) != check[i][j]) {
5782 H5_FAILED();
5783 printf(" Line %d: Read different values than written.\n",__LINE__);
5784 printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
5785 printf(" At original: %d\n",points[i][j]);
5786 printf(" At returned: %d\n",check[i][j]);
5787 goto error;
5788 } /* end if */
5789 } /* end for */
5790 } /* end for */
5791
5792 /* Close dataset */
5793 if(H5Dclose(dsid) < 0) {
5794 H5_FAILED();
5795 printf(" Line %d: Can't close dataset\n", __LINE__);
5796 goto error;
5797 } /* end if */
5798
5799 /* Re-open second dataset */
5800 if((dsid = H5Dopen2(file, DSET_SET_LOCAL_NAME_2, H5P_DEFAULT)) < 0) {
5801 H5_FAILED();
5802 printf(" Line %d: Can't open dataset\n", __LINE__);
5803 goto error;
5804 } /* end if */
5805
5806 /* Query the dataset's size on disk */
5807 if((dset_size = H5Dget_storage_size(dsid)) == 0) {
5808 H5_FAILED();
5809 printf(" Line %d: Error querying dataset size\n", __LINE__);
5810 goto error;
5811 } /* end if */
5812
5813 /* Verify that the size indicates data is uncompressed */
5814 if((H5Tget_size(H5T_NATIVE_DOUBLE) * dims[0] * dims[1]) != dset_size) {
5815 H5_FAILED();
5816 printf(" Line %d: Incorrect dataset size: %lu\n", __LINE__, (unsigned long)dset_size);
5817 goto error;
5818 } /* end if */
5819
5820 /* Read data */
5821 if(H5Dread(dsid, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_dbl) < 0) {
5822 H5_FAILED();
5823 printf(" Line %d: Error reading dataset data\n", __LINE__);
5824 goto error;
5825 } /* end if */
5826
5827 /* Compare data */
5828 /* Check that the values read are the modified version of what was written */
5829 for(i=0; i<dims[0]; i++) {
5830 for(j=0; j<dims[1]; j++) {
5831 /* If the difference between two values is greater than 0.001%, they're
5832 * considered not equal. */
5833 if(!DBL_REL_EQUAL(points_dbl[i][j],check_dbl[i][j],0.00001F)) {
5834 H5_FAILED();
5835 printf(" Line %d: Read different values than written.\n",__LINE__);
5836 printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
5837 printf(" At original: %f\n",points_dbl[i][j]);
5838 printf(" At returned: %f\n",check_dbl[i][j]);
5839 goto error;
5840 } /* end if */
5841 } /* end for */
5842 } /* end for */
5843
5844 /* Close dataset */
5845 if(H5Dclose(dsid) < 0) {
5846 H5_FAILED();
5847 printf(" Line %d: Can't close dataset\n",__LINE__);
5848 goto error;
5849 } /* end if */
5850
5851 /* Close file */
5852 if(H5Fclose(file) < 0) {
5853 H5_FAILED();
5854 printf(" Line %d: Can't close file\n",__LINE__);
5855 goto error;
5856 } /* end if */
5857
5858
5859 PASSED();
5860 return 0;
5861
5862 error:
5863 return -1;
5864 } /* end test_set_local() */
5865
5866
5867 /*-------------------------------------------------------------------------
5868 * Function: test_compare_dcpl
5869 *
5870 * Purpose: Verifies that if the same DCPL was used to create two
5871 * datasets, the DCPLs retrieved from each dataset should
5872 * compare equal.
5873 *
5874 * Return: Success: 0
5875 * Failure: -1
5876 *
5877 * Programmer: Quincey Koziol
5878 * Wednesday, January 7, 2004
5879 *
5880 *-------------------------------------------------------------------------
5881 */
5882 static herr_t
test_compare_dcpl(hid_t file)5883 test_compare_dcpl(hid_t file)
5884 {
5885 hid_t dsid=(-1); /* Dataset ID */
5886 hid_t sid=(-1); /* Dataspace ID */
5887 hid_t dcpl=(-1); /* Dataspace creation property list ID */
5888 hid_t dcpl1=(-1),dcpl2=(-1); /* Dataspace creation property list IDs from datasets */
5889 const hsize_t dims[2] = {500, 4096}; /* Dataspace dimensions */
5890 const hsize_t chunk_dims[2] = {250, 2048}; /* Chunk dimensions */
5891
5892 TESTING("comparing dataset creation property lists");
5893
5894 /* Create the data space */
5895 if((sid = H5Screate_simple(2, dims, NULL)) < 0) TEST_ERROR
5896
5897 /* Create dcpl with special filter */
5898 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR
5899 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) TEST_ERROR
5900
5901 /* Set gzip parameter (if available) */
5902 #ifdef H5_HAVE_FILTER_DEFLATE
5903 if(H5Pset_deflate (dcpl, 9) < 0) TEST_ERROR
5904 #endif /* H5_HAVE_FILTER_DEFLATE */
5905
5906 /* Create first dataset */
5907 if((dsid = H5Dcreate2(file, DSET_COMPARE_DCPL_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR
5908
5909 /* Get copy of dataset's dataset creation property list */
5910 if((dcpl1=H5Dget_create_plist(dsid)) < 0) TEST_ERROR
5911
5912 /* Close dataset */
5913 if(H5Dclose (dsid) < 0) TEST_ERROR
5914
5915 /* Create second dataset */
5916 if((dsid = H5Dcreate2(file, DSET_COMPARE_DCPL_NAME_2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR
5917
5918 /* Get copy of dataset's dataset creation property list */
5919 if((dcpl2=H5Dget_create_plist(dsid)) < 0) TEST_ERROR
5920
5921 /* Close dataset */
5922 if(H5Dclose (dsid) < 0) TEST_ERROR
5923
5924 /* Close dataspace */
5925 if(H5Sclose(sid) < 0) TEST_ERROR
5926
5927 /* Compare dataset creation property lists */
5928 if(H5Pequal(dcpl1,dcpl2)<=0) TEST_ERROR
5929
5930 /* Close dataset creation property lists */
5931 if(H5Pclose(dcpl) < 0) TEST_ERROR
5932 if(H5Pclose(dcpl1) < 0) TEST_ERROR
5933 if(H5Pclose(dcpl2) < 0) TEST_ERROR
5934
5935
5936 PASSED();
5937
5938 return 0;
5939
5940 error:
5941 H5E_BEGIN_TRY {
5942 H5Dclose(dsid);
5943 H5Sclose(sid);
5944 H5Pclose(dcpl);
5945 H5Pclose(dcpl1);
5946 H5Pclose(dcpl2);
5947 } H5E_END_TRY;
5948 return -1;
5949 } /* end test_compare_dcpl() */
5950
5951
5952 /*-------------------------------------------------------------------------
5953 * Function: test_copy_dcpl
5954 *
5955 * Purpose: Verifies whether the copy of dataset creation property
5956 * list works. It tests the DCPL for chunked layout with
5957 * filter and for contiguous layout with external storage.
5958 * (Please see #1608 in Bugzilla)
5959 *
5960 * Return: Success: 0
5961 * Failure: -1
5962 *
5963 * Programmer: Raymond Lu
5964 * 28 January 2010
5965 *
5966 *-------------------------------------------------------------------------
5967 */
5968 static herr_t
test_copy_dcpl(hid_t file,hid_t fapl)5969 test_copy_dcpl(hid_t file, hid_t fapl)
5970 {
5971 hid_t dsid1=(-1), dsid2=(-1); /* Dataset ID */
5972 hid_t new_dsid1=(-1), new_dsid2=(-1); /* Dataset ID */
5973 hid_t sid=(-1); /* Dataspace ID */
5974 hid_t dcpl=(-1); /* Dataset creation property list ID */
5975 hid_t dcpl1=(-1),dcpl2=(-1); /* Copies of creation property list IDs */
5976 hid_t dcpl1_copy=(-1),dcpl2_copy=(-1);/* Copies of creation property list IDs */
5977 const hsize_t dims[2] = {500, 4096}; /* Dataspace dimensions */
5978 const hsize_t chunk_dims[2] = {250, 2048}; /* Chunk dimensions */
5979 char filename[FILENAME_BUF_SIZE];
5980 hid_t new_file=(-1);
5981
5982 TESTING("copying dataset creation property lists");
5983
5984 /* Create the data space */
5985 if((sid = H5Screate_simple(2, dims, NULL)) < 0) TEST_ERROR
5986
5987 /* Create dcpl with special filter */
5988 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR
5989 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) TEST_ERROR
5990 if(H5Pset_fletcher32(dcpl) < 0) TEST_ERROR
5991
5992 /* Create first dataset of chunking with filter */
5993 if((dsid1 = H5Dcreate2(file, DSET_COPY_DCPL_NAME_1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl,
5994 H5P_DEFAULT)) < 0) TEST_ERROR
5995
5996 /* Close dataset */
5997 if(H5Dclose (dsid1) < 0) TEST_ERROR
5998
5999 /* Reopen the first dataset */
6000 if((dsid1 = H5Dopen2(file, DSET_COPY_DCPL_NAME_1, H5P_DEFAULT)) < 0) TEST_ERROR
6001
6002 /* Get the copy of dataset's creation property list */
6003 if((dcpl1=H5Dget_create_plist(dsid1)) < 0) TEST_ERROR
6004 if((dcpl1_copy = H5Pcopy(dcpl1)) < 0) TEST_ERROR
6005
6006 /* Close dataset */
6007 if(H5Dclose (dsid1) < 0) TEST_ERROR
6008
6009 /* Change the DCPL for contiguous layout with external storage. The size of the reserved
6010 * space in the external file is the size of the dataset - 500*4096*sizeof(int).
6011 * There's no need to clean up the external file since the library doesn't create it
6012 * until the data is written to it. */
6013 if(H5Pset_layout(dcpl, H5D_CONTIGUOUS) < 0) TEST_ERROR
6014 if(H5Premove_filter(dcpl, H5Z_FILTER_FLETCHER32) < 0) TEST_ERROR
6015 if(H5Pset_external(dcpl, COPY_DCPL_EXTFILE_NAME, (off_t)0, (hsize_t)(500 * 4096 * sizeof(int))) < 0) TEST_ERROR
6016
6017 /* Create second dataset of contiguous layout with external storage */
6018 if((dsid2 = H5Dcreate2(file, DSET_COPY_DCPL_NAME_2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl,
6019 H5P_DEFAULT)) < 0) TEST_ERROR
6020
6021 /* Close dataset */
6022 if(H5Dclose (dsid2) < 0) TEST_ERROR
6023
6024 /* Reopen the second dataset */
6025 if((dsid2 = H5Dopen2(file, DSET_COPY_DCPL_NAME_2, H5P_DEFAULT)) < 0) TEST_ERROR
6026
6027 /* Get copy of dataset's dataset creation property list */
6028 if((dcpl2=H5Dget_create_plist(dsid2)) < 0) TEST_ERROR
6029 if((dcpl2_copy = H5Pcopy(dcpl2)) < 0) TEST_ERROR
6030
6031 /* Close dataset */
6032 if(H5Dclose (dsid2) < 0) TEST_ERROR
6033
6034 /* Create a second file and create 2 datasets with the copies of the DCPLs in the first
6035 * file. Test whether the copies of DCPLs work. */
6036 h5_fixname(FILENAME[11], fapl, filename, sizeof filename);
6037 if((new_file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
6038 TEST_ERROR
6039
6040 if((new_dsid1 = H5Dcreate2(new_file, DSET_COPY_DCPL_NAME_1, H5T_NATIVE_INT, sid,
6041 H5P_DEFAULT, dcpl1_copy, H5P_DEFAULT)) < 0) TEST_ERROR
6042
6043 if((new_dsid2 = H5Dcreate2(new_file, DSET_COPY_DCPL_NAME_2, H5T_NATIVE_INT, sid,
6044 H5P_DEFAULT, dcpl2_copy, H5P_DEFAULT)) < 0) TEST_ERROR
6045
6046 /* Close dataspace */
6047 if(H5Sclose(sid) < 0) TEST_ERROR
6048
6049 /* Close datasets */
6050 if(H5Dclose (new_dsid1) < 0) TEST_ERROR
6051 if(H5Dclose (new_dsid2) < 0) TEST_ERROR
6052
6053 /* Close the second file */
6054 if(H5Fclose (new_file) < 0) TEST_ERROR
6055
6056 /* Close dataset creation property lists */
6057 if(H5Pclose(dcpl) < 0) TEST_ERROR
6058 if(H5Pclose(dcpl1) < 0) TEST_ERROR
6059 if(H5Pclose(dcpl2) < 0) TEST_ERROR
6060 if(H5Pclose(dcpl1_copy) < 0) TEST_ERROR
6061 if(H5Pclose(dcpl2_copy) < 0) TEST_ERROR
6062
6063 PASSED();
6064
6065 return 0;
6066
6067 error:
6068 H5E_BEGIN_TRY {
6069 H5Dclose(dsid1);
6070 H5Dclose(dsid2);
6071 H5Dclose(new_dsid1);
6072 H5Dclose(new_dsid2);
6073 H5Sclose(sid);
6074 H5Pclose(dcpl);
6075 H5Pclose(dcpl1);
6076 H5Pclose(dcpl2);
6077 H5Pclose(dcpl1_copy);
6078 H5Pclose(dcpl2_copy);
6079 } H5E_END_TRY;
6080 return -1;
6081 } /* end test_copy_dcpl() */
6082
6083
6084 /*-------------------------------------------------------------------------
6085 * Function: test_filter_delete
6086 *
6087 * Purpose: Tests deletion of filters from a dataset creation property list
6088 *
6089 * Return: Success: 0
6090 * Failure: -1
6091 *
6092 * Programmer: Pedro Vicente
6093 * Monday, January 26, 2004
6094 *
6095 *-------------------------------------------------------------------------
6096 */
6097 static herr_t
test_filter_delete(hid_t file)6098 test_filter_delete(hid_t file)
6099 {
6100 H5Z_filter_t filtn; /* filter identification number */
6101 hid_t dsid=-1; /* dataset ID */
6102 hid_t sid=-1; /* dataspace ID */
6103 hid_t dcpl=-1; /* dataset creation property list ID */
6104 hid_t dcpl1=-1; /* dataset creation property list ID */
6105 hsize_t dims[2]={20,20}; /* dataspace dimensions */
6106 hsize_t chunk_dims[2]={10,10}; /* chunk dimensions */
6107 int nfilters; /* number of filters in DCPL */
6108 unsigned flags; /* flags for filter */
6109 herr_t ret; /* generic return value */
6110 int i;
6111
6112 TESTING("filter deletion");
6113
6114 #ifdef H5_HAVE_FILTER_DEFLATE
6115 /* create the data space */
6116 if((sid = H5Screate_simple(2, dims, NULL)) < 0) goto error;
6117
6118 /* create dcpl */
6119 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
6120 if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0) goto error;
6121
6122 if(H5Pset_fletcher32 (dcpl) < 0) goto error;
6123 if(H5Pset_deflate (dcpl, 6) < 0) goto error;
6124 if(H5Pset_shuffle (dcpl) < 0) goto error;
6125
6126 /* create a dataset */
6127 if((dsid = H5Dcreate2(file,"dsetdel", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) goto error;
6128
6129 /* get copy of dataset's dataset creation property list */
6130 if((dcpl1=H5Dget_create_plist(dsid)) < 0) goto error;
6131
6132 /*----------------------------------------------------------------------
6133 * delete the deflate filter
6134 *----------------------------------------------------------------------
6135 */
6136 /* delete the deflate filter */
6137 if(H5Premove_filter(dcpl1,H5Z_FILTER_DEFLATE) < 0) goto error;
6138
6139 /* get information about filters */
6140 if((nfilters = H5Pget_nfilters(dcpl1)) < 0) goto error;
6141
6142 /* check if filter was deleted */
6143 for(i=0; i<nfilters; i++) {
6144 filtn = H5Pget_filter2(dcpl1, (unsigned)i, NULL, NULL, NULL, (size_t)0, NULL, NULL);
6145 if(H5Z_FILTER_DEFLATE==filtn)
6146 goto error;
6147 }
6148
6149 /* try to get the info for the deflate filter */
6150 H5E_BEGIN_TRY {
6151 ret = H5Pget_filter_by_id2(dcpl1, H5Z_FILTER_DEFLATE, &flags, NULL, NULL, (size_t)0, NULL, NULL);
6152 } H5E_END_TRY;
6153 if(ret >=0) {
6154 H5_FAILED();
6155 printf(" Line %d: Shouldn't have deleted filter!\n",__LINE__);
6156 goto error;
6157 } /* end if */
6158
6159 /* try to delete the deflate filter again */
6160 H5E_BEGIN_TRY {
6161 ret=H5Premove_filter(dcpl1,H5Z_FILTER_DEFLATE);
6162 } H5E_END_TRY;
6163 if(ret >=0) {
6164 H5_FAILED();
6165 printf(" Line %d: Shouldn't have deleted filter!\n",__LINE__);
6166 goto error;
6167 } /* end if */
6168
6169 /*----------------------------------------------------------------------
6170 * delete all filters
6171 *----------------------------------------------------------------------
6172 */
6173 /* delete all filters */
6174 if(H5Premove_filter(dcpl1,H5Z_FILTER_ALL) < 0) goto error;
6175
6176 /* get information about filters */
6177 if((nfilters = H5Pget_nfilters(dcpl1)) < 0) goto error;
6178
6179 /* check if filters were deleted */
6180 if(nfilters)goto error;
6181
6182 /*----------------------------------------------------------------------
6183 * close
6184 *----------------------------------------------------------------------
6185 */
6186
6187 /* clean up objects used for this test */
6188 if(H5Pclose (dcpl) < 0) goto error;
6189 if(H5Pclose (dcpl1) < 0) goto error;
6190 if(H5Dclose (dsid) < 0) goto error;
6191 if(H5Sclose (sid) < 0) goto error;
6192
6193 PASSED();
6194 #else
6195 SKIPPED();
6196 #endif
6197 return 0;
6198
6199 error:
6200 H5E_BEGIN_TRY {
6201 H5Pclose(dcpl);
6202 H5Pclose(dcpl1);
6203 H5Dclose(dsid);
6204 H5Sclose(sid);
6205 } H5E_END_TRY;
6206 return -1;
6207 } /* end test_filter_delete() */
6208
6209
6210
6211 /*-------------------------------------------------------------------------
6212 * Function: auxread_fdata
6213 *
6214 * Purpose: reads a dataset "NAME" from FID
6215 *
6216 * Return: Success: 0
6217 * Failure: -1
6218 *
6219 * Programmer: Pedro Vicente
6220 * Monday, March 8, 2004
6221 *
6222 *-------------------------------------------------------------------------
6223 */
6224 static herr_t
auxread_fdata(hid_t fid,const char * name)6225 auxread_fdata(hid_t fid, const char *name)
6226 {
6227 hid_t dset_id=-1; /* dataset ID */
6228 hid_t dcpl_id=-1; /* dataset creation property list ID */
6229 hid_t space_id=-1; /* space ID */
6230 hid_t ftype_id=-1; /* file data type ID */
6231 hid_t mtype_id=-1; /* memory data type ID */
6232 size_t msize; /* memory size of memory type */
6233 void *buf=NULL; /* data buffer */
6234 hsize_t nelmts; /* number of elements in dataset */
6235 int rank; /* rank of dataset */
6236 hsize_t dims[H5S_MAX_RANK];/* dimensions of dataset */
6237 int i;
6238
6239 if((dset_id = H5Dopen2(fid, name, H5P_DEFAULT)) < 0)
6240 goto error;
6241 if((space_id = H5Dget_space(dset_id)) < 0)
6242 goto error;
6243 if((ftype_id = H5Dget_type(dset_id)) < 0)
6244 goto error;
6245 if((dcpl_id = H5Dget_create_plist(dset_id)) < 0)
6246 goto error;
6247 if((rank = H5Sget_simple_extent_ndims(space_id)) < 0)
6248 goto error;
6249 HDmemset(dims, 0, sizeof dims);
6250 if(H5Sget_simple_extent_dims(space_id, dims, NULL) < 0)
6251 goto error;
6252 nelmts = 1;
6253 for(i = 0; i < rank; i++)
6254 nelmts *= dims[i];
6255 if((mtype_id = H5Tget_native_type(ftype_id, H5T_DIR_DEFAULT)) < 0)
6256 goto error;
6257 if((msize = H5Tget_size(mtype_id)) == 0)
6258 goto error;
6259
6260 if(nelmts) {
6261 buf = (void *)HDmalloc((size_t)(nelmts * msize));
6262 if(buf == NULL) {
6263 printf( "cannot read into memory\n" );
6264 goto error;
6265 }
6266 if(H5Dread(dset_id, mtype_id, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
6267 goto error;
6268 }
6269
6270 if(H5Pclose(dcpl_id) < 0)
6271 goto error;
6272 if(H5Sclose(space_id) < 0)
6273 goto error;
6274 if(H5Dclose(dset_id) < 0)
6275 goto error;
6276 if(buf)
6277 HDfree(buf);
6278
6279 return 0;
6280
6281 error:
6282 H5E_BEGIN_TRY {
6283 H5Pclose(dcpl_id);
6284 H5Sclose(space_id);
6285 H5Dclose(dset_id);
6286 H5Tclose(ftype_id);
6287 H5Tclose(mtype_id);
6288 if(buf)
6289 HDfree(buf);
6290 } H5E_END_TRY;
6291 return -1;
6292 }
6293
6294
6295 /*-------------------------------------------------------------------------
6296 * Function: test_filters_endianess
6297 *
6298 * Purpose: Reads/writes data with filters (big-endian/little-endian data)
6299 *
6300 * Return: Success: 0
6301 * Failure: -1
6302 *
6303 * Programmer: Pedro Vicente
6304 * Monday, March 8, 2004
6305 *
6306 *-------------------------------------------------------------------------
6307 */
6308 static herr_t
test_filters_endianess(void)6309 test_filters_endianess(void)
6310 {
6311 hid_t fid=-1; /* file ID */
6312 hid_t dsid=-1; /* dataset ID */
6313 hid_t sid=-1; /* dataspace ID */
6314 hid_t dcpl=-1; /* dataset creation property list ID */
6315 const char *data_file = H5_get_srcdir_filename("test_filters_le.h5"); /* Corrected test file name */
6316
6317 TESTING("filters with big-endian/little-endian data");
6318
6319 /*-------------------------------------------------------------------------
6320 * step 1: open a file written on a little-endian machine
6321 *-------------------------------------------------------------------------
6322 */
6323
6324 /* open */
6325 if((fid = H5Fopen(data_file, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
6326
6327 /* read */
6328 if(auxread_fdata(fid,"dset") < 0) TEST_ERROR
6329
6330 /* close */
6331 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
6332
6333 /*-------------------------------------------------------------------------
6334 * step 2: open a file written on a big-endian machine
6335 *-------------------------------------------------------------------------
6336 */
6337
6338 /* compose the name of the file to open, using the srcdir, if appropriate */
6339 data_file = H5_get_srcdir_filename("test_filters_be.h5"); /* Corrected test file name */
6340
6341 /* open */
6342 if((fid = H5Fopen(data_file, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
6343
6344 /* read */
6345 if(auxread_fdata(fid,"dset") < 0) TEST_ERROR
6346
6347 /* close */
6348 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
6349
6350 PASSED();
6351
6352 return 0;
6353
6354 error:
6355 H5E_BEGIN_TRY {
6356 H5Pclose(dcpl);
6357 H5Dclose(dsid);
6358 H5Sclose(sid);
6359 H5Fclose(fid);
6360 } H5E_END_TRY;
6361 return -1;
6362 } /* end test_filters_endianess() */
6363
6364
6365 /*-------------------------------------------------------------------------
6366 * Function: test_zero_dims
6367 *
6368 * Purpose: Tests read/writes to zero-sized extendible datasets
6369 *
6370 * Return: Success: 0
6371 * Failure: -1
6372 *
6373 * Programmer: Quincey Koziol
6374 * Tuesday, July 27, 2004
6375 *
6376 *-------------------------------------------------------------------------
6377 */
6378 static herr_t
test_zero_dims(hid_t file)6379 test_zero_dims(hid_t file)
6380 {
6381 hid_t s = -1, d = -1, dcpl = -1;
6382 hsize_t dsize = 0, dmax = H5S_UNLIMITED, csize = 5;
6383 herr_t ret;
6384
6385 TESTING("I/O on datasets with zero-sized dims");
6386
6387 /*
6388 * One-dimensional dataset
6389 */
6390 if((s = H5Screate_simple(1, &dsize, &dmax)) < 0) FAIL_STACK_ERROR
6391
6392 /* Try creating chunked dataset with undefined chunk dimensions */
6393 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
6394 if(H5Pset_layout(dcpl, H5D_CHUNKED) < 0) FAIL_STACK_ERROR
6395
6396 H5E_BEGIN_TRY {
6397 d = H5Dcreate2(file, ZERODIM_DATASET, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT);
6398 } H5E_END_TRY;
6399 if(d > 0) {
6400 H5Dclose(d);
6401 FAIL_PUTS_ERROR("created dataset with undefined chunk dimensions")
6402 } /* end if */
6403
6404 /* Try creating chunked dataset with zero-sized chunk dimensions */
6405 H5E_BEGIN_TRY {
6406 ret = H5Pset_chunk(dcpl, 1, &dsize);
6407 } H5E_END_TRY;
6408 if(ret > 0)
6409 FAIL_PUTS_ERROR("set zero-sized chunk dimensions")
6410
6411 if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
6412
6413 /* Create the zero-sized extendible dataset */
6414 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
6415 if(H5Pset_chunk(dcpl, 1, &csize) < 0) FAIL_STACK_ERROR
6416 if((d = H5Dcreate2(file, ZERODIM_DATASET, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
6417
6418 /* Various no-op writes */
6419 if(H5Dwrite(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, (void*)911) < 0) FAIL_STACK_ERROR
6420 if(H5Dwrite(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, NULL) < 0) FAIL_STACK_ERROR
6421 if(H5Dwrite(d, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, (void*)911) < 0) FAIL_STACK_ERROR
6422 if(H5Dwrite(d, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, NULL) < 0) FAIL_STACK_ERROR
6423
6424 /* Various no-op reads */
6425 if(H5Dread(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, (void*)911) < 0) FAIL_STACK_ERROR
6426 if(H5Dread(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, NULL) < 0) FAIL_STACK_ERROR
6427 if(H5Dread(d, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, (void*)911) < 0) FAIL_STACK_ERROR
6428 if(H5Dread(d, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, NULL) < 0) FAIL_STACK_ERROR
6429
6430 if(H5Dclose(d) < 0) FAIL_STACK_ERROR
6431 if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
6432 if(H5Sclose(s) < 0) FAIL_STACK_ERROR
6433
6434 PASSED();
6435 return 0;
6436
6437 error:
6438 H5E_BEGIN_TRY {
6439 H5Pclose(dcpl);
6440 H5Dclose(d);
6441 H5Sclose(s);
6442 } H5E_END_TRY;
6443 return -1;
6444 } /* end test_zero_dims() */
6445
6446
6447 /*-------------------------------------------------------------------------
6448 * Function: test_missing_chunk
6449 *
6450 * Purpose: Tests that reads from chunked dataset with undefined fill value and
6451 * not all chunks written don't overwrite data in user's buffer
6452 * for missing chunks.
6453 *
6454 * Return: Success: 0
6455 * Failure: -1
6456 *
6457 * Programmer: Quincey Koziol
6458 * Tuesday, August 25, 2004
6459 *
6460 *-------------------------------------------------------------------------
6461 */
6462 static herr_t
test_missing_chunk(hid_t file)6463 test_missing_chunk(hid_t file)
6464 {
6465 hid_t s = -1, d = -1, dcpl = -1;
6466 hsize_t hs_start[1];
6467 hsize_t hs_stride[1],
6468 hs_count[1],
6469 hs_block[1];
6470 int wdata[MISSING_CHUNK_DIM],
6471 rdata[MISSING_CHUNK_DIM];
6472 hsize_t dsize=100, dmax=H5S_UNLIMITED;
6473 hsize_t csize=5;
6474 size_t u;
6475
6476 TESTING("Read dataset with unwritten chunk & undefined fill value");
6477
6478 /* Initialize data for 1-D dataset */
6479 for(u = 0; u < MISSING_CHUNK_DIM; u++) {
6480 wdata[u] = (int)u;
6481 rdata[u] = 911;
6482 } /* end for */
6483
6484 /* Create dataspace */
6485 if((s = H5Screate_simple(1, &dsize, &dmax)) < 0) TEST_ERROR;
6486
6487 /* Create dataset creation property list */
6488 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
6489
6490 /* Set to chunked */
6491 if(H5Pset_chunk(dcpl, 1, &csize) < 0) TEST_ERROR;
6492
6493 /* Undefine fill value */
6494 if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, NULL) < 0) TEST_ERROR;
6495
6496 /* Create dataset */
6497 if((d = H5Dcreate2(file, MISSING_CHUNK_DATASET, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
6498
6499 /* Select elements in every other chunk for 1-D dataset */
6500 hs_start[0]=0;
6501 hs_stride[0]=10;
6502 hs_count[0]=10;
6503 hs_block[0]=5;
6504 if(H5Sselect_hyperslab(s, H5S_SELECT_SET, hs_start, hs_stride, hs_count,
6505 hs_block) < 0) TEST_ERROR;
6506
6507 /* Write selected data */
6508 if(H5Dwrite(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, wdata) < 0) TEST_ERROR;
6509
6510 /* Read all data */
6511 if(H5Dread(d, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata) < 0) TEST_ERROR;
6512
6513 /* Validata values read */
6514 for(u=0; u<MISSING_CHUNK_DIM; u++) {
6515 if((u%10)>=5) {
6516 if(rdata[u]!=911) {
6517 printf(" Line %d: Incorrect value, rdata[%u]=%d\n",__LINE__,(unsigned)u,rdata[u]);
6518 TEST_ERROR;
6519 } /* end if */
6520 } /* end if */
6521 else {
6522 if(rdata[u]!=wdata[u]) {
6523 printf(" Line %d: Incorrect value, wdata[%u]=%d, rdata[%u]=%d\n",__LINE__,(unsigned)u,wdata[u],(unsigned)u,rdata[u]);
6524 TEST_ERROR;
6525 } /* end if */
6526 } /* end else */
6527 } /* end for */
6528
6529 /* Close everything */
6530 if(H5Pclose(dcpl) < 0) TEST_ERROR;
6531 if(H5Sclose(s) < 0) TEST_ERROR;
6532 if(H5Dclose(d) < 0) TEST_ERROR;
6533
6534 PASSED();
6535 return 0;
6536
6537 error:
6538 H5E_BEGIN_TRY {
6539 H5Pclose(dcpl);
6540 H5Dclose(d);
6541 H5Sclose(s);
6542 } H5E_END_TRY;
6543 return -1;
6544 } /* end test_missing_chunk() */
6545
6546
6547 /*-------------------------------------------------------------------------
6548 * Function: test_random_chunks
6549 *
6550 * Purpose: Tests that write/read on randomly selected chunks in 2 datasets.
6551 * One dataset has fixed dimensions, and the other has unlimited
6552 * dimensions which are extended before write/read operations.
6553 *
6554 *
6555 * Return: Success: 0
6556 * Failure: -1
6557 *
6558 * Programmer: Christian Chilan
6559 * Monday, March 26, 2007
6560 *
6561 *-------------------------------------------------------------------------
6562 */
6563 static herr_t
test_random_chunks(hid_t fapl)6564 test_random_chunks(hid_t fapl)
6565 {
6566 char filename[FILENAME_BUF_SIZE];
6567 hid_t s=-1, m=-1, d=-1, dcpl=-1, file=-1;
6568 int wbuf[NPOINTS],
6569 rbuf[NPOINTS],
6570 check2[20][20];
6571 hsize_t coord[NPOINTS][2];
6572 hsize_t dsize[2]={100,100}, dmax[2]={H5S_UNLIMITED, H5S_UNLIMITED}, csize[2]={10,10}, nsize[2]={200,200};
6573 hsize_t msize[1]={NPOINTS};
6574 const char dname[]="dataset";
6575 int chunk_row, chunk_col;
6576 size_t i, j;
6577
6578
6579 TESTING("Write/read on randomly selected chunks");
6580
6581 assert(NPOINTS < 100);
6582
6583 h5_fixname(FILENAME[6], fapl, filename, sizeof filename);
6584
6585 /* Create file for first test */
6586 if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) TEST_ERROR;
6587
6588 /* Create dataspace */
6589 if((s = H5Screate_simple(2, dsize, NULL)) < 0) TEST_ERROR;
6590
6591 /* Create dataset creation property list */
6592 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
6593
6594 /* Set chunked layout */
6595 if(H5Pset_chunk(dcpl, 2, csize) < 0) TEST_ERROR;
6596
6597 /* Set early allocation time */
6598 if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
6599
6600 /* Create dataset */
6601 if((d = H5Dcreate2(file, dname, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
6602
6603 /* Initialization of check array for repeated coordinates */
6604 for(i=0; i<dsize[0]/csize[0]; i++)
6605 for(j=0; j<dsize[1]/csize[1]; j++)
6606 check2[i][j] = 0;
6607
6608 /* Generate random point coordinates. Only one point is selected per chunk */
6609 for(i=0; i<NPOINTS; i++){
6610 do {
6611 chunk_row = (int)HDrandom () % (int)(dsize[0]/csize[0]);
6612 chunk_col = (int)HDrandom () % (int)(dsize[1]/csize[1]);
6613 } while (check2[chunk_row][chunk_col]);
6614
6615 wbuf[i] = check2[chunk_row][chunk_col] = chunk_row+chunk_col+1;
6616 coord[i][0] = (hsize_t)chunk_row * csize[0];
6617 coord[i][1] = (hsize_t)chunk_col * csize[1];
6618 }
6619
6620 /* Create dataspace for write buffer */
6621 if((m = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
6622
6623 /* Select the random points for writing */
6624 if(H5Sselect_elements(s, H5S_SELECT_SET, (size_t)NPOINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
6625
6626 /* Write into dataset */
6627 if(H5Dwrite(d, H5T_NATIVE_INT, m, s, H5P_DEFAULT, wbuf) < 0) TEST_ERROR;
6628
6629 /* Close resources*/
6630 if(H5Sclose(s) < 0) TEST_ERROR;
6631 if(H5Sclose(m) < 0) TEST_ERROR;
6632 if(H5Pclose(dcpl) < 0) TEST_ERROR;
6633 if(H5Dclose(d) < 0) TEST_ERROR;
6634 if(H5Fclose(file) < 0) TEST_ERROR;
6635
6636 /* Open file again */
6637 if((file = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) TEST_ERROR;
6638
6639 /* Open dataset */
6640 if((d = H5Dopen2(file, dname, H5P_DEFAULT)) < 0) TEST_ERROR;
6641
6642 /* Get dataset dataspace */
6643 if((s = H5Dget_space(d)) < 0) TEST_ERROR;
6644
6645 /* Create dataspace for read buffer */
6646 if((m = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
6647
6648 /* Select the random points for reading */
6649 if(H5Sselect_elements (s, H5S_SELECT_SET, (size_t)NPOINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
6650
6651 /* Read from dataset */
6652 if(H5Dread(d, H5T_NATIVE_INT, m, s, H5P_DEFAULT, rbuf) < 0) TEST_ERROR;
6653
6654 /* Verify that written and read data are the same */
6655 for(i = 0; i < NPOINTS; i++)
6656 if(rbuf[i] != wbuf[i]){
6657 printf(" Line %d: Incorrect value, wbuf[%u]=%d, rbuf[%u]=%d\n",__LINE__,(unsigned)i,wbuf[i],(unsigned)i,rbuf[i]);
6658 printf(" coord[%u] = {%lu, %lu}\n", (unsigned)i, (unsigned long)coord[i][0], (unsigned long)coord[i][1]);
6659 TEST_ERROR;
6660 } /* end if */
6661
6662 /* Close resources */
6663 if(H5Sclose(s) < 0) TEST_ERROR;
6664 if(H5Sclose(m) < 0) TEST_ERROR;
6665 if(H5Dclose(d) < 0) TEST_ERROR;
6666 if(H5Fclose(file) < 0) TEST_ERROR;
6667
6668
6669 /* Create file for second test */
6670 if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) TEST_ERROR;
6671
6672 /* Create dataspace with unlimited maximum dimensions */
6673 if((s = H5Screate_simple(2, dsize, dmax)) < 0) TEST_ERROR;
6674
6675 /* Create dataset creation property list */
6676 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
6677
6678 /* Set chunked layout */
6679 if(H5Pset_chunk(dcpl, 2, csize) < 0) TEST_ERROR;
6680
6681 /* Set allocation time to early */
6682 if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
6683
6684 /* Create dataset */
6685 if((d = H5Dcreate2(file, dname, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
6686
6687 /* Extend both dimensions of the dataset */
6688 if(H5Dset_extent(d, nsize) < 0) TEST_ERROR;
6689
6690 /* Reset the dataset dataspace to new dimensions */
6691 if(H5Sset_extent_simple(s, 2, nsize, dmax) < 0) TEST_ERROR;
6692
6693 /* Initialize check buffer for repeated coordinates */
6694 for(i = 0; i < nsize[0]/csize[0]; i++)
6695 for(j = 0; j < nsize[1] / csize[1]; j++)
6696 check2[i][j] = 0;
6697
6698 /* Generate random point coordinates. Only one point is selected per chunk */
6699 for(i = 0; i < NPOINTS; i++){
6700 do {
6701 chunk_row = (int)HDrandom() % (int)(nsize[0] / csize[0]);
6702 chunk_col = (int)HDrandom() % (int)(nsize[1] / csize[1]);
6703 } while (check2[chunk_row][chunk_col]);
6704
6705 wbuf[i] = check2[chunk_row][chunk_col] = chunk_row + chunk_col + 1;
6706 coord[i][0] = (hsize_t)chunk_row * csize[0];
6707 coord[i][1] = (hsize_t)chunk_col * csize[1];
6708 }
6709
6710 /* Create dataspace for write buffer */
6711 if((m = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
6712
6713 /* Select the random points for writing */
6714 if(H5Sselect_elements(s, H5S_SELECT_SET, (size_t)NPOINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
6715
6716 /* Write into dataset */
6717 if(H5Dwrite(d, H5T_NATIVE_INT, m, s, H5P_DEFAULT, wbuf) < 0) TEST_ERROR;
6718
6719 /* Close resources */
6720 if(H5Sclose(s) < 0) TEST_ERROR;
6721 if(H5Sclose(m) < 0) TEST_ERROR;
6722 if(H5Pclose(dcpl) < 0) TEST_ERROR;
6723 if(H5Dclose(d) < 0) TEST_ERROR;
6724 if(H5Fclose(file) < 0) TEST_ERROR;
6725
6726 /* Open file again */
6727 if((file = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) TEST_ERROR;
6728
6729 /* Open dataset */
6730 if((d = H5Dopen2(file, dname, H5P_DEFAULT)) < 0) TEST_ERROR;
6731
6732 /* Get dataset dataspace */
6733 if((s = H5Dget_space(d)) < 0) TEST_ERROR;
6734
6735 /* Create dataspace for read buffer */
6736 if((m = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
6737
6738 /* Select the random points for reading */
6739 if(H5Sselect_elements (s, H5S_SELECT_SET, (size_t)NPOINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
6740
6741 /* Read from dataset */
6742 if(H5Dread(d, H5T_NATIVE_INT, m, s, H5P_DEFAULT, rbuf) < 0) TEST_ERROR;
6743
6744 /* Verify that written and read data are the same */
6745 for(i = 0; i < NPOINTS; i++)
6746 if(rbuf[i] != wbuf[i]){
6747 printf(" Line %d: Incorrect value, wbuf[%u]=%d, rbuf[%u]=%d\n",__LINE__,(unsigned)i,wbuf[i],(unsigned)i,rbuf[i]);
6748 TEST_ERROR;
6749 } /* end if */
6750
6751 /* Close resources */
6752 if(H5Sclose(s) < 0) TEST_ERROR;
6753 if(H5Sclose(m) < 0) TEST_ERROR;
6754 if(H5Dclose(d) < 0) TEST_ERROR;
6755 if(H5Fclose(file) < 0) TEST_ERROR;
6756
6757 PASSED();
6758 return 0;
6759
6760 error:
6761 H5E_BEGIN_TRY {
6762 H5Pclose(dcpl);
6763 H5Sclose(s);
6764 H5Sclose(m);
6765 H5Dclose(d);
6766 H5Fclose(file);
6767 } H5E_END_TRY;
6768 return -1;
6769 } /* end test_random_chunks() */
6770
6771 #ifndef H5_NO_DEPRECATED_SYMBOLS
6772 /* Empty can_apply and set_local callbacks */
6773 static htri_t
can_apply_deprec(hid_t H5_ATTR_UNUSED dcpl_id,hid_t H5_ATTR_UNUSED type_id,hid_t H5_ATTR_UNUSED space_id)6774 can_apply_deprec(hid_t H5_ATTR_UNUSED dcpl_id, hid_t H5_ATTR_UNUSED type_id, hid_t H5_ATTR_UNUSED space_id)
6775 {
6776 return 1;
6777 }
6778
6779 static herr_t
set_local_deprec(hid_t H5_ATTR_UNUSED dcpl_id,hid_t H5_ATTR_UNUSED type_id,hid_t H5_ATTR_UNUSED space_id)6780 set_local_deprec(hid_t H5_ATTR_UNUSED dcpl_id, hid_t H5_ATTR_UNUSED type_id, hid_t H5_ATTR_UNUSED space_id)
6781 {
6782 return(SUCCEED);
6783 }
6784
6785 /* Old style H5Z_class_t, essentially a copy of the "bogus" filter */
6786 const H5Z_class1_t H5Z_DEPREC[1] = {{
6787 H5Z_FILTER_DEPREC, /* Filter id number */
6788 "deprec", /* Filter name for debugging */
6789 can_apply_deprec, /* The "can apply" callback */
6790 set_local_deprec, /* The "set local" callback */
6791 filter_bogus, /* The actual filter function */
6792 }};
6793
6794
6795 /*-------------------------------------------------------------------------
6796 * Function: test_deprec
6797 *
6798 * Purpose: Tests deprecated API symbols
6799 *
6800 * Return: Success: 0
6801 * Failure: -1
6802 *
6803 * Programmer: Quincey Koziol
6804 * Monday, October 8, 2007
6805 *
6806 *-------------------------------------------------------------------------
6807 */
6808 static herr_t
test_deprec(hid_t file)6809 test_deprec(hid_t file)
6810 {
6811 hid_t dataset, space, small_space, create_parms, dcpl;
6812 hsize_t dims[2], small_dims[2];
6813 hsize_t deprec_size;
6814 herr_t status;
6815 hsize_t csize[2];
6816
6817 TESTING("deprecated API routines");
6818
6819 /* Create the data space */
6820 dims[0] = 256;
6821 dims[1] = 512;
6822 space = H5Screate_simple(2, dims, NULL);
6823 assert(space>=0);
6824
6825 /* Create a small data space for compact dataset */
6826 small_dims[0] = 16;
6827 small_dims[1] = 8;
6828 small_space = H5Screate_simple(2, small_dims, NULL);
6829 assert(space>=0);
6830
6831 /*
6832 * Create a dataset using the default dataset creation properties. We're
6833 * not sure what they are, so we won't check.
6834 */
6835 if((dataset = H5Dcreate1(file, DSET_DEPREC_NAME, H5T_NATIVE_DOUBLE, space, H5P_DEFAULT)) < 0) goto error;
6836
6837 /* Close the dataset */
6838 if(H5Dclose(dataset) < 0) goto error;
6839
6840 /*
6841 * Try creating a dataset that already exists. This should fail since a
6842 * dataset can only be created once. Temporarily turn off error
6843 * reporting.
6844 */
6845 H5E_BEGIN_TRY {
6846 dataset = H5Dcreate1(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
6847 H5P_DEFAULT);
6848 } H5E_END_TRY;
6849 if(dataset >= 0) {
6850 H5_FAILED();
6851 puts(" Library allowed overwrite of existing dataset.");
6852 goto error;
6853 }
6854
6855 /*
6856 * Open the dataset we created above and then close it. This is how
6857 * existing datasets are accessed.
6858 */
6859 if((dataset = H5Dopen1(file, DSET_DEPREC_NAME)) < 0) goto error;
6860 if(H5Dclose(dataset) < 0) goto error;
6861
6862 /*
6863 * Try opening a non-existent dataset. This should fail since new datasets
6864 * cannot be created with this function. Temporarily turn off error
6865 * reporting.
6866 */
6867 H5E_BEGIN_TRY {
6868 dataset = H5Dopen1(file, "does_not_exist");
6869 } H5E_END_TRY;
6870 if(dataset >= 0) {
6871 H5_FAILED();
6872 puts(" Opened a non-existent dataset.");
6873 goto error;
6874 }
6875
6876 /*
6877 * Create a new dataset that uses chunked storage instead of the default
6878 * layout.
6879 */
6880 create_parms = H5Pcreate(H5P_DATASET_CREATE);
6881 assert(create_parms >= 0);
6882
6883 /* Add the deflate filter, if available */
6884 #if defined H5_HAVE_FILTER_DEFLATE
6885 {
6886 H5Z_filter_t filtn; /* filter identification number */
6887 size_t cd_nelmts = 1; /* Number of filter parameters */
6888 unsigned cd_value; /* Filter parameter */
6889
6890 if(H5Pset_deflate(create_parms, 6) < 0) goto error;
6891
6892 /* Check for the deflate filter */
6893 filtn = H5Pget_filter1(create_parms, (unsigned)0, NULL, &cd_nelmts, &cd_value, (size_t)0, NULL);
6894 if(H5Z_FILTER_DEFLATE != filtn)
6895 goto error;
6896 if(1 != cd_nelmts)
6897 goto error;
6898 if(6 != cd_value)
6899 goto error;
6900
6901 /* Check for the deflate filter */
6902 if(H5Pget_filter_by_id1(create_parms, H5Z_FILTER_DEFLATE, NULL, &cd_nelmts, &cd_value, (size_t)0, NULL) < 0) goto error;
6903 if(1 != cd_nelmts)
6904 goto error;
6905 if(6 != cd_value)
6906 goto error;
6907 }
6908 #endif /* H5_HAVE_FILTER_DEFLATE */
6909
6910 /* Attempt to create a dataset with invalid chunk sizes */
6911 csize[0] = dims[0]*2;
6912 csize[1] = dims[1]*2;
6913 status = H5Pset_chunk(create_parms, 2, csize);
6914 assert(status >= 0);
6915 H5E_BEGIN_TRY {
6916 dataset = H5Dcreate1(file, DSET_DEPREC_NAME_CHUNKED, H5T_NATIVE_DOUBLE, space,
6917 create_parms);
6918 } H5E_END_TRY;
6919 if(dataset >= 0) {
6920 H5_FAILED();
6921 puts(" Opened a dataset with incorrect chunking parameters.");
6922 goto error;
6923 }
6924
6925 csize[0] = 5;
6926 csize[1] = 100;
6927 status = H5Pset_chunk(create_parms, 2, csize);
6928 assert(status >= 0);
6929
6930 if((dataset = H5Dcreate1(file, DSET_DEPREC_NAME_CHUNKED, H5T_NATIVE_DOUBLE, space, create_parms)) < 0) goto error;
6931 H5Pclose(create_parms);
6932
6933 /*
6934 * Close the chunked dataset.
6935 */
6936 if(H5Dclose(dataset) < 0) goto error;
6937
6938
6939 /*
6940 * Open the dataset we created above and then close it. This is how
6941 * existing datasets are accessed.
6942 */
6943 if((dataset = H5Dopen1(file, DSET_DEPREC_NAME_CHUNKED)) < 0) goto error;
6944 if(H5Dclose(dataset) < 0) goto error;
6945
6946 /*
6947 * Create a compact dataset, then close it.
6948 */
6949 create_parms = H5Pcreate(H5P_DATASET_CREATE);
6950 assert(create_parms >= 0);
6951 status = H5Pset_layout(create_parms, H5D_COMPACT);
6952 assert(status >= 0);
6953 status = H5Pset_alloc_time(create_parms, H5D_ALLOC_TIME_EARLY);
6954 assert(status >= 0);
6955
6956 if((dataset = H5Dcreate1(file, DSET_DEPREC_NAME_COMPACT, H5T_NATIVE_DOUBLE, small_space, create_parms)) < 0) goto error;
6957 H5Pclose(create_parms);
6958 if(H5Dclose(dataset) < 0) goto error;
6959
6960 /*
6961 * Open the dataset we created above and then close it. This is how
6962 * existing datasets are accessed.
6963 */
6964 if((dataset = H5Dopen1(file, DSET_DEPREC_NAME_COMPACT)) < 0) goto error;
6965 if(H5Dclose(dataset) < 0) goto error;
6966
6967 /* Test H5Zregister with deprecated H5Z_class1_t */
6968 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
6969 if(H5Pset_chunk(dcpl, 2, csize) < 0) goto error;
6970 if(H5Zregister(H5Z_DEPREC) < 0) goto error;
6971 if(H5Pset_filter(dcpl, H5Z_FILTER_DEPREC, 0, (size_t)0, NULL) < 0) goto error;
6972
6973 puts("");
6974 if(test_filter_internal(file,DSET_DEPREC_NAME_FILTER,dcpl,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&deprec_size) < 0) goto error;
6975
6976 if(H5Pclose(dcpl) < 0) goto error;
6977
6978 return 0;
6979
6980 error:
6981 return -1;
6982 } /* end test_deprec() */
6983 #endif /* H5_NO_DEPRECATED_SYMBOLS */
6984
6985
6986 /*-------------------------------------------------------------------------
6987 * Function: test_huge_chunks
6988 *
6989 * Purpose: Tests that datasets with chunks >4GB can't be created.
6990 *
6991 * Return: Success: 0
6992 * Failure: -1
6993 *
6994 * Programmer: Quincey Koziol
6995 * Thursday, May 1, 2008
6996 *
6997 *-------------------------------------------------------------------------
6998 */
6999 static herr_t
test_huge_chunks(hid_t fapl)7000 test_huge_chunks(hid_t fapl)
7001 {
7002 char filename[FILENAME_BUF_SIZE];
7003 hid_t fid = -1; /* File ID */
7004 hid_t dcpl = -1; /* Dataset creation property list ID */
7005 hid_t sid = -1; /* Dataspace ID */
7006 hid_t dsid = -1; /* Dataset ID */
7007 hsize_t dim, chunk_dim; /* Dataset and chunk dimensions */
7008 hsize_t dim2[3], chunk_dim2[3]; /* Dataset and chunk dimensions */
7009 herr_t ret; /* Generic return value */
7010
7011 TESTING("creating dataset with >4GB chunks");
7012
7013 h5_fixname(FILENAME[7], fapl, filename, sizeof filename);
7014
7015 /* Create file */
7016 if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
7017
7018 /* Create dataset creation property list */
7019 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
7020
7021 /* Try to set too large of a chunk for 1-D dataset (# of elements) */
7022 chunk_dim = TOO_HUGE_CHUNK_DIM;
7023 H5E_BEGIN_TRY {
7024 ret = H5Pset_chunk(dcpl, 1, &chunk_dim);
7025 } H5E_END_TRY;
7026 if(ret >= 0)
7027 FAIL_PUTS_ERROR(" Set chunk size with too large of chunk dimensions.")
7028
7029 /* Try to set too large of a chunk for n-D dataset (# of elements) */
7030 chunk_dim2[0] = TOO_HUGE_CHUNK_DIM2_0;
7031 chunk_dim2[1] = TOO_HUGE_CHUNK_DIM2_1;
7032 chunk_dim2[2] = TOO_HUGE_CHUNK_DIM2_2;
7033 H5E_BEGIN_TRY {
7034 ret = H5Pset_chunk(dcpl, 3, chunk_dim2);
7035 } H5E_END_TRY;
7036 if(ret >= 0)
7037 FAIL_PUTS_ERROR(" Set chunk size with too large of chunk dimensions.")
7038
7039 /* Set 1-D chunk size */
7040 chunk_dim = HUGE_CHUNK_DIM;
7041 if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
7042
7043 /* Create 1-D dataspace */
7044 dim = HUGE_DIM;
7045 if((sid = H5Screate_simple(1, &dim, NULL)) < 0) FAIL_STACK_ERROR
7046
7047 /* Try to create dataset */
7048 H5E_BEGIN_TRY {
7049 dsid = H5Dcreate2(fid, HUGE_DATASET, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
7050 } H5E_END_TRY;
7051 if(dsid >= 0)
7052 FAIL_PUTS_ERROR(" 1-D Dataset with too large of chunk dimensions created.")
7053
7054 /* Close 1-D dataspace */
7055 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
7056
7057
7058 /* Set n-D chunk size */
7059 chunk_dim2[0] = HUGE_CHUNK_DIM2_0;
7060 chunk_dim2[1] = HUGE_CHUNK_DIM2_1;
7061 chunk_dim2[2] = HUGE_CHUNK_DIM2_2;
7062 if(H5Pset_chunk(dcpl, 3, chunk_dim2) < 0) FAIL_STACK_ERROR
7063
7064 /* Create n-D dataspace */
7065 dim2[0] = HUGE_DIM2_0;
7066 dim2[1] = HUGE_DIM2_1;
7067 dim2[2] = HUGE_DIM2_2;
7068 if((sid = H5Screate_simple(3, dim2, NULL)) < 0) FAIL_STACK_ERROR
7069
7070 /* Try to create dataset */
7071 H5E_BEGIN_TRY {
7072 dsid = H5Dcreate2(fid, HUGE_DATASET2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
7073 } H5E_END_TRY;
7074 if(dsid >= 0)
7075 FAIL_PUTS_ERROR(" n-D Dataset with too large of chunk dimensions created.")
7076
7077 /* Close n-D dataspace */
7078 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
7079
7080 /* Close everything else */
7081 if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
7082 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
7083
7084 PASSED();
7085 return 0;
7086
7087 error:
7088 H5E_BEGIN_TRY {
7089 H5Pclose(dcpl);
7090 H5Dclose(dsid);
7091 H5Sclose(sid);
7092 H5Fclose(fid);
7093 } H5E_END_TRY;
7094 return -1;
7095 } /* end test_huge_chunks() */
7096
7097
7098 /*-------------------------------------------------------------------------
7099 * Function: test_chunk_cache
7100 *
7101 * Purpose: Tests API for setting rdcc info on a DAPL, and interaction
7102 * with the corresponding properties in the file structure.
7103 *
7104 * Return: Success: 0
7105 * Failure: -1
7106 *
7107 * Programmer: Neil Fortner
7108 * Wednesday, October 29, 2008
7109 *
7110 *-------------------------------------------------------------------------
7111 */
7112 static herr_t
test_chunk_cache(hid_t fapl)7113 test_chunk_cache(hid_t fapl)
7114 {
7115 char filename[FILENAME_BUF_SIZE];
7116 hid_t fid = -1; /* File ID */
7117 hid_t fapl_local = -1; /* Local fapl */
7118 hid_t fapl_def = -1; /* Default fapl */
7119 hid_t dcpl = -1; /* Dataset creation property list ID */
7120 hid_t dapl1 = -1; /* Dataset access property list ID */
7121 hid_t dapl2 = -1; /* Dataset access property list ID */
7122 hid_t sid = -1; /* Dataspace ID */
7123 hid_t dsid = -1; /* Dataset ID */
7124 hsize_t dim, chunk_dim; /* Dataset and chunk dimensions */
7125 size_t nslots_1, nslots_2, nslots_3, nslots_4; /* rdcc number of elements */
7126 size_t nbytes_1, nbytes_2, nbytes_3, nbytes_4; /* rdcc number of bytes */
7127 size_t nlinks; /* Number of link traversals */
7128 double w0_1, w0_2, w0_3, w0_4; /* rdcc preemption policy */
7129
7130 TESTING("dataset chunk cache configuration");
7131
7132 /* Create a default fapl and dapl */
7133 if ((fapl_def = H5Pcreate(H5P_FILE_ACCESS)) < 0) FAIL_STACK_ERROR
7134 if ((dapl1 = H5Pcreate(H5P_DATASET_ACCESS)) < 0) FAIL_STACK_ERROR
7135
7136 /* Verify that H5Pget_chunk_cache(dapl) returns the same values as are in
7137 * the default fapl.
7138 */
7139 if (H5Pget_cache(fapl_def, NULL, &nslots_1, &nbytes_1, &w0_1) < 0) FAIL_STACK_ERROR
7140 if (H5Pget_chunk_cache(dapl1, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7141 if ((nslots_1 != nslots_4) || (nbytes_1 != nbytes_4) || !DBL_ABS_EQUAL(w0_1, w0_4))
7142 FAIL_PUTS_ERROR(" Cache values from default dapl do not match those from fapl.")
7143
7144 /* Set a lapl property on dapl1 (to verify inheritance) */
7145 if (H5Pset_nlinks(dapl1, (size_t)134) < 0) FAIL_STACK_ERROR
7146 if (H5Pget_nlinks(dapl1, &nlinks) < 0) FAIL_STACK_ERROR
7147 if (nlinks != 134)
7148 FAIL_PUTS_ERROR(" nlinks parameter not set properly on dapl.")
7149
7150 /* Copy fapl passed to this function (as we will be modifying it) */
7151 if ((fapl_local = H5Pcopy(fapl)) < 0) FAIL_STACK_ERROR
7152
7153 /* Set new rdcc settings on fapl */
7154 nslots_2 = nslots_1 * 2;
7155 nbytes_2 = nbytes_1 * 2;
7156 w0_2 = w0_1 / 2.0F;
7157 if (H5Pset_cache(fapl_local, 0, nslots_2, nbytes_2, w0_2) < 0) FAIL_STACK_ERROR
7158
7159 h5_fixname(FILENAME[8], fapl, filename, sizeof filename);
7160
7161 /* Create file */
7162 if ((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_local)) < 0) FAIL_STACK_ERROR
7163
7164 /* Create dataset creation property list */
7165 if ((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
7166
7167 /* Set chunking */
7168 chunk_dim = 10;
7169 if (H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
7170
7171 /* Create 1-D dataspace */
7172 dim = 100;
7173 if ((sid = H5Screate_simple(1, &dim, NULL)) < 0) FAIL_STACK_ERROR
7174
7175 /* Create dataset with default dapl */
7176 if ((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl1)) < 0)
7177 FAIL_STACK_ERROR
7178
7179 /* Retrieve dapl from dataset, verfiy cache values are the same as on fapl_local */
7180 if ((dapl2 = H5Dget_access_plist(dsid)) < 0) FAIL_STACK_ERROR
7181 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7182 if ((nslots_2 != nslots_4) || (nbytes_2 != nbytes_4) || !DBL_ABS_EQUAL(w0_2, w0_4))
7183 FAIL_PUTS_ERROR(" Cache values from retrieved dapl do not match those from fapl.")
7184 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR
7185
7186 /* Set new values on dapl1. nbytes will be set to default, so the file
7187 * property will override this setting */
7188 nslots_3 = nslots_2 * 2;
7189 nbytes_3 = H5D_CHUNK_CACHE_NBYTES_DEFAULT;
7190 w0_3 = w0_2 / 2;
7191 if (H5Pset_chunk_cache(dapl1, nslots_3, nbytes_3, w0_3) < 0) FAIL_STACK_ERROR
7192
7193 /* Close dataset, reopen with dapl1. Note the use of a dapl with H5Oopen */
7194 if (H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7195 if ((dsid = H5Oopen(fid, "dset", dapl1)) < 0) FAIL_STACK_ERROR
7196
7197 /* Retrieve dapl from dataset, verfiy cache values are the same as on dapl1 */
7198 /* Note we rely on the knowledge that H5Pget_chunk_cache retrieves these
7199 * values directly from the dataset structure, and not from a copy of the
7200 * dapl used to open the dataset (which is not preserved).
7201 */
7202 if ((dapl2 = H5Dget_access_plist(dsid)) < 0) FAIL_STACK_ERROR
7203 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7204 if ((nslots_3 != nslots_4) || (nbytes_2 != nbytes_4) || !DBL_ABS_EQUAL(w0_3, w0_4))
7205 FAIL_PUTS_ERROR(" Cache values from retrieved dapl do not match those from dapl1.")
7206 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR
7207
7208 /* Close dataset, reopen with H5P_DEFAULT as dapl */
7209 if (H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7210 if ((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
7211
7212 /* Retrieve dapl from dataset, verfiy cache values are the same on fapl_local */
7213 if ((dapl2 = H5Dget_access_plist(dsid)) < 0) FAIL_STACK_ERROR
7214 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7215 if ((nslots_2 != nslots_4) || (nbytes_2 != nbytes_4) || !DBL_ABS_EQUAL(w0_2, w0_4))
7216 FAIL_PUTS_ERROR(" Cache values from retrieved dapl do not match those from fapl.")
7217 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR
7218
7219 /* Similary, test use of H5Dcreate2 with H5P_DEFAULT */
7220 if (H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7221 if ((dsid = H5Dcreate2(fid, "dset2", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
7222 FAIL_STACK_ERROR
7223 if ((dapl2 = H5Dget_access_plist(dsid)) < 0) FAIL_STACK_ERROR
7224 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7225 if ((nslots_2 != nslots_4) || (nbytes_2 != nbytes_4) || !DBL_ABS_EQUAL(w0_2, w0_4))
7226 FAIL_PUTS_ERROR(" Cache values from retrieved dapl do not match those from fapl.")
7227 /* Don't close dapl2, we will use it in the next section */
7228
7229 /* Modify cache values on fapl_local */
7230 nbytes_3 = nbytes_2 * 2;
7231 if (H5Pset_cache(fapl_local, 0, nslots_3, nbytes_3, w0_3) < 0) FAIL_STACK_ERROR
7232
7233 /* Close and reopen file with new fapl_local */
7234 if (H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7235 if (H5Fclose(fid) < 0) FAIL_STACK_ERROR
7236 if ((fid = H5Fopen(filename, H5F_ACC_RDWR, fapl_local)) < 0) FAIL_STACK_ERROR
7237
7238 /* Verify that dapl2 retrieved earlier (using values from the old fapl)
7239 * sets its values in the new file (test use of H5Dopen2 with a dapl)
7240 */
7241 if ((dsid = H5Dopen2(fid, "dset", dapl2)) < 0) FAIL_STACK_ERROR
7242 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR /* Close dapl2, to avoid id leak */
7243 if ((dapl2 = H5Dget_access_plist(dsid)) < 0) FAIL_STACK_ERROR
7244 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7245 if ((nslots_2 != nslots_4) || (nbytes_2 != nbytes_4) || !DBL_ABS_EQUAL(w0_2, w0_4))
7246 FAIL_PUTS_ERROR(" Cache values from retrieved dapl do not match those from dapl2.")
7247
7248 /* Test H5D_CHUNK_CACHE_NSLOTS_DEFAULT and H5D_CHUNK_CACHE_W0_DEFAULT */
7249 nslots_2 = H5D_CHUNK_CACHE_NSLOTS_DEFAULT;
7250 w0_2 = H5D_CHUNK_CACHE_W0_DEFAULT;
7251 if (H5Pset_chunk_cache(dapl2, nslots_2, nbytes_2, w0_2) < 0) FAIL_STACK_ERROR
7252
7253 if (H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7254 if ((dsid = H5Dopen2(fid, "dset", dapl2)) < 0) FAIL_STACK_ERROR
7255 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR /* Close dapl2, to avoid id leak */
7256 if ((dapl2 = H5Dget_access_plist(dsid)) < 0) FAIL_STACK_ERROR
7257 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7258 if ((nslots_3 != nslots_4) || (nbytes_2 != nbytes_4) || !DBL_ABS_EQUAL(w0_3, w0_4))
7259 FAIL_PUTS_ERROR(" Cache values from retrieved dapl do not match those expected.")
7260 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR
7261
7262 /* Verify that the file has indeed started using the new cache values (test
7263 * use of H5Oopen with H5P_DEFAULT) */
7264 if (H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7265 if ((dsid = H5Oopen(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
7266 if ((dapl2 = H5Dget_access_plist(dsid)) < 0) FAIL_STACK_ERROR
7267 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7268 if ((nslots_3 != nslots_4) || (nbytes_3 != nbytes_4) || !DBL_ABS_EQUAL(w0_3, w0_4))
7269 FAIL_PUTS_ERROR(" Cache values from retrieved dapl do not match those from fapl.")
7270 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR
7271
7272 /* Verify functionality of H5Pcopy with a dapl */
7273 if ((dapl2 = H5Pcopy(dapl1)) < 0) FAIL_STACK_ERROR
7274 if (H5Pget_chunk_cache(dapl2, &nslots_4, &nbytes_4, &w0_4) < 0) FAIL_STACK_ERROR
7275 if ((nslots_3 != nslots_4) || (nbytes_1 != nbytes_4) || !DBL_ABS_EQUAL(w0_3, w0_4))
7276 FAIL_PUTS_ERROR(" Cache values from dapl2 do not match those from dapl1.")
7277
7278 /* Close */
7279 if (H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7280 if (H5Sclose(sid) < 0) FAIL_STACK_ERROR
7281 if (H5Pclose(fapl_local) < 0) FAIL_STACK_ERROR
7282 if (H5Pclose(fapl_def) < 0) FAIL_STACK_ERROR
7283 if (H5Pclose(dapl1) < 0) FAIL_STACK_ERROR
7284 if (H5Pclose(dapl2) < 0) FAIL_STACK_ERROR
7285 if (H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
7286 if (H5Fclose(fid) < 0) FAIL_STACK_ERROR
7287
7288 PASSED();
7289 return 0;
7290
7291 error:
7292 H5E_BEGIN_TRY {
7293 H5Pclose(fapl_local);
7294 H5Pclose(fapl_def);
7295 H5Pclose(dapl1);
7296 H5Pclose(dapl2);
7297 H5Pclose(dcpl);
7298 H5Dclose(dsid);
7299 H5Sclose(sid);
7300 H5Fclose(fid);
7301 } H5E_END_TRY;
7302 return -1;
7303 } /* end test_chunk_cache() */
7304
7305
7306 /*-------------------------------------------------------------------------
7307 * Function: test_big_chunks_bypass_cache
7308 *
7309 * Purpose: When the chunk size is bigger than the cache size and the
7310 * chunk isn't on disk, this test verifies that the library
7311 * bypasses the cache.
7312 *
7313 * Note: This test is not very conclusive - it doesn't actually check
7314 * if the chunks bypass the cache... :-( -QAK
7315 *
7316 * Return: Success: 0
7317 * Failure: -1
7318 *
7319 * Programmer: Raymond Lu
7320 * 11 Feb 2009
7321 *
7322 *-------------------------------------------------------------------------
7323 */
7324 static herr_t
test_big_chunks_bypass_cache(hid_t fapl)7325 test_big_chunks_bypass_cache(hid_t fapl)
7326 {
7327 char filename[FILENAME_BUF_SIZE];
7328 hid_t fid = -1; /* File ID */
7329 hid_t fapl_local = -1; /* File access property list ID */
7330 hid_t dcpl = -1; /* Dataset creation property list ID */
7331 hid_t sid = -1; /* Dataspace ID */
7332 hid_t dsid = -1; /* Dataset ID */
7333 hsize_t dim, chunk_dim; /* Dataset and chunk dimensions */
7334 size_t rdcc_nelmts, rdcc_nbytes;
7335 int fvalue = BYPASS_FILL_VALUE;
7336 hsize_t count, stride, offset, block;
7337 static int wdata[BYPASS_CHUNK_DIM/2], rdata1[BYPASS_DIM],
7338 rdata2[BYPASS_CHUNK_DIM/2];
7339 int i, j;
7340
7341 TESTING("big chunks bypassing the cache");
7342
7343 h5_fixname(FILENAME[9], fapl, filename, sizeof filename);
7344
7345 /* Copy fapl passed to this function (as we will be modifying it) */
7346 if((fapl_local = H5Pcopy(fapl)) < 0) FAIL_STACK_ERROR
7347
7348 /* Define cache size to be smaller than chunk size */
7349 rdcc_nelmts = BYPASS_CHUNK_DIM/5;
7350 rdcc_nbytes = sizeof(int)*BYPASS_CHUNK_DIM/5;
7351 if(H5Pset_cache(fapl_local, 0, rdcc_nelmts, rdcc_nbytes, 0.0F) < 0) FAIL_STACK_ERROR
7352
7353 /* Create file */
7354 if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_local)) < 0) FAIL_STACK_ERROR
7355
7356 /* Create 1-D dataspace */
7357 dim = BYPASS_DIM;
7358 if((sid = H5Screate_simple(1, &dim, NULL)) < 0) FAIL_STACK_ERROR
7359
7360 /* Create dataset creation property list */
7361 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
7362
7363 /* Define chunk size. There will be only 2 chunks in the dataset. */
7364 chunk_dim = BYPASS_CHUNK_DIM;
7365 if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
7366
7367 /* Define fill value, fill time, and chunk allocation time */
7368 if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fvalue) < 0) FAIL_STACK_ERROR
7369 if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_IFSET) < 0) FAIL_STACK_ERROR
7370 if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_INCR) < 0) FAIL_STACK_ERROR
7371
7372 /* Create the first 1-D dataset */
7373 if((dsid = H5Dcreate2(fid, BYPASS_DATASET1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
7374 FAIL_STACK_ERROR
7375
7376 /* Select first chunk to write the data */
7377 offset = 0;
7378 count = 1;
7379 stride = 1;
7380 block = BYPASS_CHUNK_DIM / 2;
7381 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &offset, &stride, &count, &block) < 0)
7382 FAIL_STACK_ERROR
7383
7384 /* Initialize data to write */
7385 for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
7386 wdata[i] = i;
7387
7388 /* This write should go through the cache because fill value is used. */
7389 if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, wdata) < 0)
7390 FAIL_STACK_ERROR
7391
7392 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7393
7394 /* Reopen the dataset */
7395 if((dsid = H5Dopen2(fid, BYPASS_DATASET1, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
7396
7397 /* Reads both 2 chunks. Reading the second chunk should bypass the cache because the
7398 * chunk is bigger than the cache size and it isn't allocated on disk. */
7399 if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata1) < 0)
7400 FAIL_STACK_ERROR
7401
7402 for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
7403 if(rdata1[i] != i) {
7404 printf(" Read different values than written in the 1st chunk.\n");
7405 printf(" At line %d and index %d, rdata1 = %d. It should be %d.\n", __LINE__, i, rdata1[i], i);
7406 TEST_ERROR
7407 } /* end if */
7408
7409 for(j = BYPASS_CHUNK_DIM / 2; j < BYPASS_DIM; j++)
7410 if(rdata1[j] != fvalue) {
7411 printf(" Read different values than written in the 2nd chunk.\n");
7412 printf(" At line %d and index %d, rdata1 = %d. It should be %d.\n", __LINE__, i, rdata1[i], fvalue);
7413 TEST_ERROR
7414 } /* end if */
7415
7416 /* Close the first dataset */
7417 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7418
7419 /* Create a second dataset without fill value. This time, both write
7420 * and read should bypass the cache because the chunk is bigger than the
7421 * cache size and it's not allocated on disk. */
7422 if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_NEVER) < 0) FAIL_STACK_ERROR
7423
7424 if((dsid = H5Dcreate2(fid, BYPASS_DATASET2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
7425 FAIL_STACK_ERROR
7426
7427 if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, wdata) < 0)
7428 FAIL_STACK_ERROR
7429
7430 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7431
7432 /* Reopen the dataset */
7433 if((dsid = H5Dopen2(fid, BYPASS_DATASET2, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
7434
7435 /* Read back only the part that was written to the file. Reading the
7436 * half chunk should bypass the cache because the chunk is bigger than
7437 * the cache size. */
7438 if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, rdata2) < 0)
7439
7440 for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
7441 if(rdata2[i] != i) {
7442 printf(" Read different values than written in the chunk.\n");
7443 printf(" At line %d and index %d, rdata2 = %d. It should be %d.\n", __LINE__, i, rdata2[i], i);
7444 TEST_ERROR
7445 } /* end if */
7446
7447 /* Close IDs */
7448 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
7449 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7450 if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
7451 if(H5Pclose(fapl_local) < 0) FAIL_STACK_ERROR
7452 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
7453
7454 PASSED();
7455 return 0;
7456
7457 error:
7458 H5E_BEGIN_TRY {
7459 H5Pclose(dcpl);
7460 H5Pclose(fapl_local);
7461 H5Dclose(dsid);
7462 H5Sclose(sid);
7463 H5Fclose(fid);
7464 } H5E_END_TRY;
7465 return -1;
7466 } /* end test_big_chunks_bypass_cache() */
7467
7468 /* This message derives from H5Z */
7469 const H5Z_class2_t H5Z_EXPAND[1] = {{
7470 H5Z_CLASS_T_VERS, /* H5Z_class_t version */
7471 H5Z_FILTER_EXPAND, /* Filter id number */
7472 1, 1, /* Encoding and decoding enabled */
7473 "expand", /* Filter name for debugging */
7474 NULL, /* The "can apply" callback */
7475 NULL, /* The "set local" callback */
7476 filter_expand, /* The actual filter function */
7477 }};
7478
7479 /* Global "expansion factor" for filter_expand() routine */
7480 static size_t filter_expand_factor_g = 0;
7481
7482
7483 /*-------------------------------------------------------------------------
7484 * Function: filter_expand
7485 *
7486 * Purpose: For testing library's behavior when a filter expands a chunk
7487 * too much.
7488 *
7489 * Note: This filter doesn't actually re-allocate the buffer to be
7490 * larger, it just changes the buffer size to a value that's too
7491 * large. The library should throw an error before using the
7492 * incorrect buffer information.
7493 *
7494 * Return: Success: Data chunk size
7495 * Failure: 0
7496 *
7497 * Programmer: Quincey Koziol
7498 * Mar 31, 2009
7499 *
7500 *-------------------------------------------------------------------------
7501 */
7502 static size_t
filter_expand(unsigned int flags,size_t H5_ATTR_UNUSED cd_nelmts,const unsigned int H5_ATTR_UNUSED * cd_values,size_t nbytes,size_t * buf_size,void H5_ATTR_UNUSED ** buf)7503 filter_expand(unsigned int flags, size_t H5_ATTR_UNUSED cd_nelmts,
7504 const unsigned int H5_ATTR_UNUSED *cd_values, size_t nbytes,
7505 size_t *buf_size, void H5_ATTR_UNUSED **buf)
7506 {
7507 size_t ret_value = 0;
7508
7509 if(flags & H5Z_FLAG_REVERSE) {
7510 /* Don't do anything when filter is applied in reverse */
7511 *buf_size = nbytes;
7512 ret_value = nbytes;
7513 } /* end if */
7514 else {
7515 /* Check for expanding the chunk */
7516 if(filter_expand_factor_g > 0) {
7517 /* Expand the buffer size beyond what can be encoded */
7518 *buf_size = nbytes * 256 * 256 * 256 * filter_expand_factor_g;
7519 ret_value = *buf_size;
7520 } /* end if */
7521 else {
7522 /* Don't expand the chunk's size */
7523 *buf_size = nbytes;
7524 ret_value = nbytes;
7525 } /* end else */
7526 } /* end else */
7527
7528 return ret_value;
7529 } /* end filter_expand() */
7530
7531
7532 /*-------------------------------------------------------------------------
7533 * Function: test_chunk_expand
7534 *
7535 * Purpose: Tests support for proper error handling when a chunk expands
7536 * too much after a filter is applied
7537 *
7538 * Return: Success: 0
7539 * Failure: -1
7540 *
7541 * Programmer: Quincey Koziol
7542 * Tuesday, March 31, 2009
7543 *
7544 *-------------------------------------------------------------------------
7545 */
7546 static herr_t
test_chunk_expand(hid_t fapl)7547 test_chunk_expand(hid_t fapl)
7548 {
7549 char filename[FILENAME_BUF_SIZE];
7550 hid_t fid = -1; /* File ID */
7551 hid_t dcpl = -1; /* Dataset creation property list ID */
7552 hid_t sid = -1; /* Dataspace ID */
7553 hid_t scalar_sid = -1;/* Scalar dataspace ID */
7554 hid_t dsid = -1; /* Dataset ID */
7555 hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
7556 hsize_t hs_offset; /* Hyperslab offset */
7557 hsize_t hs_size; /* Hyperslab size */
7558 H5D_alloc_time_t alloc_time; /* Storage allocation time */
7559 unsigned write_elem, read_elem; /* Element written/read */
7560 unsigned u; /* Local index variable */
7561 size_t size; /* Size of type */
7562 herr_t status; /* Generic return value */
7563
7564 TESTING("filter expanding chunks too much");
7565
7566 h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
7567
7568 size = sizeof(size_t);
7569 if(size <= 4) {
7570 SKIPPED();
7571 puts(" Current machine can't test for error");
7572 } /* end if */
7573 else {
7574 /* Register "expansion" filter */
7575 if(H5Zregister(H5Z_EXPAND) < 0) FAIL_STACK_ERROR
7576
7577 /* Check that the filter was registered */
7578 if(TRUE != H5Zfilter_avail(H5Z_FILTER_EXPAND)) FAIL_STACK_ERROR
7579
7580 /* Loop over storage allocation time */
7581 for(alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; alloc_time++) {
7582 /* Create file */
7583 if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
7584
7585 /* Create dataset creation property list */
7586 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
7587
7588 /* Set chunking */
7589 chunk_dim = 10;
7590 if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
7591
7592 /* Set fill time */
7593 if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) FAIL_STACK_ERROR
7594
7595 /* Set allocation time */
7596 if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR
7597
7598 /* Set "expand" filter */
7599 if(H5Pset_filter(dcpl, H5Z_FILTER_EXPAND, 0, (size_t)0, NULL) < 0) FAIL_STACK_ERROR
7600
7601 /* Create scalar dataspace */
7602 if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
7603
7604 /* Create 1-D dataspace */
7605 dim = 100;
7606 max_dim = H5S_UNLIMITED;
7607 if((sid = H5Screate_simple(1, &dim, &max_dim)) < 0) FAIL_STACK_ERROR
7608
7609 /* Create chunked dataset */
7610 if(H5D_ALLOC_TIME_EARLY == alloc_time) {
7611 /* Make the expansion factor large enough to cause failure right away */
7612 filter_expand_factor_g = 8;
7613
7614 H5E_BEGIN_TRY {
7615 dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
7616 } H5E_END_TRY;
7617 if(dsid >= 0) FAIL_PUTS_ERROR("should fail to create dataset when allocation time is early");
7618 } /* end if */
7619 else {
7620 if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
7621 FAIL_STACK_ERROR
7622
7623 /* Fill elements */
7624 hs_size = 1;
7625 for(u = 0; u < 100; u++) {
7626 /* Select a single element in the dataset */
7627 hs_offset = u;
7628 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
7629
7630 /* Read (unwritten) element from dataset */
7631 read_elem = 1;
7632 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7633
7634 /* Verify unwritten element is fill value (0) */
7635 if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
7636
7637 /* Don't expand chunks yet */
7638 filter_expand_factor_g = 0;
7639
7640 /* Write element to dataset */
7641 write_elem = u;
7642 if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
7643
7644 /* Read element from dataset */
7645 read_elem = write_elem + 1;
7646 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7647
7648 /* Verify written element is read in */
7649 if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
7650
7651 /* Expand chunks now */
7652 filter_expand_factor_g = 8;
7653
7654 /* Write element to dataset */
7655 write_elem = u;
7656 H5E_BEGIN_TRY {
7657 status = H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem);
7658 } H5E_END_TRY;
7659 if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
7660 } /* end for */
7661
7662 /* Incrementally extend dataset and verify write/reads */
7663 while(dim < 1000) {
7664 /* Extend dataset */
7665 dim += 100;
7666 if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
7667
7668 /* Close old dataspace */
7669 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
7670
7671 /* Get dataspace for dataset now */
7672 if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
7673
7674 /* Fill new elements */
7675 hs_size = 1;
7676 for(u = 0; u < 100; u++) {
7677 /* Select a single element in the dataset */
7678 hs_offset = (dim + u) - 100;
7679 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
7680
7681 /* Read (unwritten) element from dataset */
7682 read_elem = 1;
7683 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7684
7685 /* Verify unwritten element is fill value (0) */
7686 if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
7687
7688 /* Don't expand chunks yet */
7689 filter_expand_factor_g = 0;
7690
7691 /* Write element to dataset */
7692 write_elem = u;
7693 if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
7694
7695 /* Read element from dataset */
7696 read_elem = write_elem + 1;
7697 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7698
7699 /* Verify written element is read in */
7700 if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
7701
7702 /* Expand chunks now */
7703 filter_expand_factor_g = 8;
7704
7705 /* Write element to dataset */
7706 write_elem = u;
7707 H5E_BEGIN_TRY {
7708 status = H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem);
7709 } H5E_END_TRY;
7710 if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
7711 } /* end for */
7712 } /* end while */
7713
7714 /* Close dataset */
7715 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7716 } /* end else */
7717
7718 /* Close everything */
7719 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
7720 if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
7721 if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
7722 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
7723
7724 /* If the dataset was created, do some extra testing */
7725 if(H5D_ALLOC_TIME_EARLY != alloc_time) {
7726 /* Re-open file & dataset */
7727 if((fid = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) FAIL_STACK_ERROR
7728
7729 /* Open dataset */
7730 if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
7731
7732 /* Create scalar dataspace */
7733 if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
7734
7735 /* Get dataspace for dataset now */
7736 if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
7737
7738 /* Read elements */
7739 hs_size = 1;
7740 for(u = 0; u < 1000; u++) {
7741 /* Select a single element in the dataset */
7742 hs_offset = u;
7743 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
7744
7745 /* Read element from dataset */
7746 read_elem = u + 1;
7747 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7748
7749 /* Verify unwritten element is proper value */
7750 if(read_elem != (u % 100)) FAIL_PUTS_ERROR("invalid element read");
7751
7752 /* Don't expand chunks yet */
7753 filter_expand_factor_g = 0;
7754
7755 /* Write element to dataset */
7756 write_elem = u % 100;
7757 if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
7758
7759 /* Read element from dataset */
7760 read_elem = write_elem + 1;
7761 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7762
7763 /* Verify written element is read in */
7764 if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
7765
7766 /* Expand chunks now */
7767 filter_expand_factor_g = 8;
7768
7769 /* Write element to dataset */
7770 write_elem = u % 100;
7771 H5E_BEGIN_TRY {
7772 status = H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem);
7773 } H5E_END_TRY;
7774 if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
7775 } /* end for */
7776
7777 /* Close everything */
7778 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
7779 if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
7780 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7781 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
7782
7783 /* Re-open file */
7784 if((fid = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) FAIL_STACK_ERROR
7785
7786 /* Delete dataset */
7787 if(H5Ldelete(fid, "dset", H5P_DEFAULT) < 0) FAIL_STACK_ERROR
7788
7789 /* Close everything */
7790 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
7791 } /* end if */
7792 } /* end for */
7793
7794 /* Unregister "expansion" filter */
7795 if(H5Zunregister(H5Z_FILTER_EXPAND) < 0) FAIL_STACK_ERROR
7796
7797 /* Check that the filter was unregistered */
7798 if(FALSE != H5Zfilter_avail(H5Z_FILTER_EXPAND)) FAIL_STACK_ERROR
7799
7800 PASSED();
7801 } /* end else */
7802
7803 return 0;
7804
7805 error:
7806 H5E_BEGIN_TRY {
7807 H5Pclose(dcpl);
7808 H5Dclose(dsid);
7809 H5Sclose(sid);
7810 H5Sclose(scalar_sid);
7811 H5Fclose(fid);
7812 } H5E_END_TRY;
7813 return -1;
7814 } /* end test_chunk_expand() */
7815
7816
7817 /*-------------------------------------------------------------------------
7818 * Function: test_large_chunk_shrink
7819 *
7820 * Purpose: Tests support for shrinking a chunk larger than 1 MB by a
7821 * size greater than 1 MB.
7822 *
7823 * Return: Success: 0
7824 * Failure: -1
7825 *
7826 * Programmer: Neil Fortner
7827 * Monday, November 31, 2011
7828 *
7829 *-------------------------------------------------------------------------
7830 */
7831 static herr_t
test_large_chunk_shrink(hid_t fapl)7832 test_large_chunk_shrink(hid_t fapl)
7833 {
7834 char filename[FILENAME_BUF_SIZE];
7835 hid_t fid = -1; /* File ID */
7836 hid_t dcpl = -1; /* Dataset creation property list ID */
7837 hid_t sid = -1; /* Dataspace ID */
7838 hid_t scalar_sid = -1;/* Scalar dataspace ID */
7839 hid_t dsid = -1; /* Dataset ID */
7840 hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
7841 hsize_t hs_offset; /* Hyperslab offset */
7842 hsize_t hs_size; /* Hyperslab size */
7843 unsigned write_elem, read_elem; /* Element written/read */
7844
7845 TESTING("shrinking large chunk");
7846
7847 h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
7848
7849 /* Create file */
7850 if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
7851
7852 /* Create dataset creation property list */
7853 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
7854
7855 /* Set 2 MB chunk size */
7856 chunk_dim = 2 * 1024 * 1024 / sizeof(unsigned);
7857 if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
7858
7859 /* Create scalar dataspace */
7860 if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
7861
7862 /* Create 1-D dataspace */
7863 dim = 2 * 1024 * 1024 / sizeof(unsigned);
7864 max_dim = H5S_UNLIMITED;
7865 if((sid = H5Screate_simple(1, &dim, &max_dim)) < 0) FAIL_STACK_ERROR
7866
7867 /* Create 2 MB chunked dataset */
7868 if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
7869 FAIL_STACK_ERROR
7870
7871 /* Select last element in the dataset */
7872 hs_offset = dim - 1;
7873 hs_size = 1;
7874 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
7875
7876 /* Read (unwritten) element from dataset */
7877 read_elem = 1;
7878 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7879
7880 /* Verify unwritten element is fill value (0) */
7881 if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
7882
7883 /* Write element to dataset */
7884 write_elem = 2;
7885 if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
7886
7887 /* Read element from dataset */
7888 read_elem = write_elem + 1;
7889 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7890
7891 /* Verify written element is read in */
7892 if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
7893
7894 /* Shrink dataset to 512 KB */
7895 dim = 512 * 1024 / sizeof(unsigned);
7896 if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
7897
7898 /* Expand dataset back to 2MB */
7899 dim = 2 * 1024 * 1024 / sizeof(unsigned);
7900 if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
7901
7902 /* Read element from dataset */
7903 read_elem = 1;
7904 if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
7905
7906 /* Verify element is now 0 */
7907 if(read_elem != 0) FAIL_PUTS_ERROR("invalid element read");
7908
7909 /* Close everything */
7910 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
7911 if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
7912 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
7913 if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
7914 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
7915
7916 PASSED();
7917
7918 return 0;
7919
7920 error:
7921 H5E_BEGIN_TRY {
7922 H5Pclose(dcpl);
7923 H5Dclose(dsid);
7924 H5Sclose(sid);
7925 H5Sclose(scalar_sid);
7926 H5Fclose(fid);
7927 } H5E_END_TRY;
7928 return -1;
7929 } /* end test_large_chunk_shrink() */
7930
7931
7932 /*-------------------------------------------------------------------------
7933 *
7934 * test_idx_compatible():
7935 * Verify that the 1.8 branch cannot read datasets that use
7936 * Fixed Array indexing method.
7937 *
7938 *-------------------------------------------------------------------------
7939 */
7940 static herr_t
test_idx_compatible(void)7941 test_idx_compatible(void)
7942 {
7943 hid_t fid = -1; /* File id */
7944 hid_t did = -1; /* Dataset id */
7945 char *srcdir = HDgetenv("srcdir"); /* where the src code is located */
7946 char filename[FILENAME_BUF_SIZE] = ""; /* old test file name */
7947
7948 /* Output message about test being performed */
7949 TESTING("Compatibility for datasets that use Fixed Array indexing\n");
7950
7951 /* Generate correct name for test file by prepending the source path */
7952 if(srcdir && ((HDstrlen(srcdir) + HDstrlen(FIXED_IDX_FILE) + 1) < sizeof(filename))) {
7953 HDstrcpy(filename, srcdir);
7954 HDstrcat(filename, "/");
7955 }
7956 HDstrcat(filename, FIXED_IDX_FILE);
7957
7958 /* Open the file */
7959 if((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
7960 FAIL_STACK_ERROR
7961
7962 /* Should not able to read the dataset w/o filter that use Fixed Array indexing */
7963 H5E_BEGIN_TRY {
7964 if((did = H5Dopen2(fid, DSET, H5P_DEFAULT)) != FAIL)
7965 TEST_ERROR
7966 } H5E_END_TRY;
7967
7968 /* Should not able to read the dataset w/ filter that use Fixed Array indexing */
7969 H5E_BEGIN_TRY {
7970 if((did = H5Dopen2(fid, DSET_FILTER, H5P_DEFAULT)) != FAIL)
7971 TEST_ERROR
7972 } H5E_END_TRY;
7973
7974 if(H5Fclose(fid) < 0)
7975 FAIL_STACK_ERROR
7976
7977 PASSED();
7978 return 0;
7979
7980 error:
7981 H5E_BEGIN_TRY {
7982 H5Dclose(did);
7983 H5Fclose(fid);
7984 } H5E_END_TRY;
7985 return -1;
7986 } /* test_idx_compatible */
7987
7988
7989 /*-------------------------------------------------------------------------
7990 * Function: test_zero_dim_dset
7991 *
7992 * Purpose: Tests support for reading a 1D chunled dataset with
7993 * dimension size = 0.
7994 *
7995 * Return: Success: 0
7996 * Failure: -1
7997 *
7998 * Programmer: Mohamad Chaarawi
7999 * Wednesdat, July 9, 2014
8000 *
8001 *-------------------------------------------------------------------------
8002 */
8003 static herr_t
test_zero_dim_dset(hid_t fapl)8004 test_zero_dim_dset(hid_t fapl)
8005 {
8006 char filename[FILENAME_BUF_SIZE];
8007 hid_t fid = -1; /* File ID */
8008 hid_t dcpl = -1; /* Dataset creation property list ID */
8009 hid_t sid = -1; /* Dataspace ID */
8010 hid_t dsid = -1; /* Dataset ID */
8011 hsize_t dim, chunk_dim; /* Dataset and chunk dimensions */
8012 int data[1];
8013
8014 TESTING("shrinking large chunk");
8015
8016 h5_fixname(FILENAME[13], fapl, filename, sizeof filename);
8017
8018 /* Create file */
8019 if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
8020
8021 /* Create dataset creation property list */
8022 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
8023
8024 /* Set 1 chunk size */
8025 chunk_dim = 1;
8026 if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
8027
8028 /* Create 1D dataspace with 0 dim size */
8029 dim = 0;
8030 if((sid = H5Screate_simple(1, &dim, NULL)) < 0) FAIL_STACK_ERROR
8031
8032 /* Create chunked dataset */
8033 if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
8034 FAIL_STACK_ERROR
8035
8036 /* write 0 elements from dataset */
8037 if(H5Dwrite(dsid, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, data) < 0) FAIL_STACK_ERROR
8038
8039 /* Read 0 elements from dataset */
8040 if(H5Dread(dsid, H5T_NATIVE_INT, sid, sid, H5P_DEFAULT, data) < 0) FAIL_STACK_ERROR
8041
8042 /* Close everything */
8043 if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
8044 if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
8045 if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
8046 if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
8047
8048 PASSED();
8049
8050 return 0;
8051
8052 error:
8053 H5E_BEGIN_TRY {
8054 H5Pclose(dcpl);
8055 H5Dclose(dsid);
8056 H5Sclose(sid);
8057 H5Fclose(fid);
8058 } H5E_END_TRY;
8059 return -1;
8060 } /* end test_zero_dim_dset() */
8061
8062
8063 /*-------------------------------------------------------------------------
8064 * Function: test_scatter
8065 *
8066 * Purpose: Tests H5Dscatter with a variety of different selections
8067 * and source buffer sizes.
8068 *
8069 * Return: Success: 0
8070 * Failure: -1
8071 *
8072 * Programmer: Neil Fortner
8073 * Wednesday, January 16, 2013
8074 *
8075 *-------------------------------------------------------------------------
8076 */
8077 typedef struct scatter_info_t {
8078 int *src_buf; /* Source data buffer */
8079 size_t block; /* Maximum number of elements to return to H5Dscatter() */
8080 size_t size; /* Remaining number of elements to return */
8081 } scatter_info_t;
8082
8083 #define TEST_SCATTER_CHECK_ARR(ARR, EXP) \
8084 for(i=0; i<(int)(sizeof(ARR)/sizeof(ARR[0])); i++) \
8085 for(j=0; j<(int)(sizeof(ARR[0])/sizeof(ARR[0][0])); j++) \
8086 for(k=0; k<(int)(sizeof(ARR[0][0])/sizeof(ARR[0][0][0])); k++) \
8087 if(ARR[i][j][k] != EXP[i][j][k]) { \
8088 H5_FAILED(); AT(); \
8089 printf(" " #ARR "[%d][%d][%d] == %d, " #EXP "[%d][%d][%d] == %d\n", i, j, k, ARR[i][j][k], i, j, k, EXP[i][j][k]); \
8090 goto error; \
8091 }
8092
8093 static herr_t
scatter_cb(void ** src_buf,size_t * src_buf_bytes_used,void * _scatter_info)8094 scatter_cb(void **src_buf/*out*/, size_t *src_buf_bytes_used/*out*/,
8095 void *_scatter_info)
8096 {
8097 scatter_info_t *scatter_info = (scatter_info_t *)_scatter_info;
8098 size_t nelmts; /* Number of elements to return in src_buf */
8099
8100 /* Calculate number of elements */
8101 nelmts = MIN(scatter_info->block, scatter_info->size);
8102 HDassert(nelmts > 0);
8103
8104 /* Set output variables */
8105 *src_buf = (void *)scatter_info->src_buf;
8106 *src_buf_bytes_used = nelmts * sizeof(scatter_info->src_buf[0]);
8107
8108 /* Update scatter_info */
8109 scatter_info->src_buf += nelmts;
8110 scatter_info->size -= nelmts;
8111
8112 return SUCCEED;
8113 }
8114
8115 static herr_t
test_scatter(void)8116 test_scatter(void)
8117 {
8118 hid_t sid = -1; /* Dataspace ID */
8119 hsize_t dim[3] = {8, 5, 8}; /* Dataspace dimensions */
8120 hsize_t start[3] = {0, 0, 0};
8121 hsize_t stride[3] = {0, 0, 0};
8122 hsize_t count[3] = {0, 0, 0};
8123 hsize_t block[3] = {0, 0, 0};
8124 hsize_t start2[3] = {0, 0, 0};
8125 hsize_t count2[3] = {0, 0, 0};
8126 hsize_t point[4][3] = {{2, 3, 2}, {3, 0, 2}, {7, 2, 0}, {0, 1, 5}};
8127 size_t src_buf_size;
8128 int src_buf[36]; /* Source data buffer */
8129 int dst_buf[8][5][8]; /* Destination data buffer */
8130 int expect_dst_buf[8][5][8]; /* Expected destination data buffer */
8131 scatter_info_t scatter_info; /* Operator data for callback */
8132 int i, j, k, src_i; /* Local index variables */
8133
8134 TESTING("H5Dscatter()");
8135
8136 /* Create dataspace */
8137 if((sid = H5Screate_simple(3, dim, NULL)) < 0) TEST_ERROR
8138
8139 /* Initialize src_buf */
8140 for(i=0; i<(int)(sizeof(src_buf)/sizeof(src_buf[0])); i++)
8141 src_buf[i] = i + 1;
8142
8143
8144 /*
8145 * Test 1: Simple case
8146 */
8147 /* Select hyperslab */
8148 count[0] = 1;
8149 count[1] = 1;
8150 count[2] = 8;
8151 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8152 TEST_ERROR
8153
8154 /* Initialize dst_buf and expect_dst_buf */
8155 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8156 for(i=0; i<8; i++)
8157 expect_dst_buf[0][0][i] = src_buf[i];
8158
8159 /* Loop over buffer sizes */
8160 for(src_buf_size=1; src_buf_size<=9; src_buf_size++) {
8161 /* Reset dst_buf */
8162 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8163
8164 /* Set up scatter info */
8165 scatter_info.src_buf = src_buf;
8166 scatter_info.block = src_buf_size;
8167 scatter_info.size = 8;
8168
8169 /* Scatter data */
8170 if(H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, dst_buf) < 0)
8171 TEST_ERROR
8172
8173 /* Verify data */
8174 TEST_SCATTER_CHECK_ARR(dst_buf, expect_dst_buf)
8175 } /* end for */
8176
8177
8178 /*
8179 * Test 2: Single block in dataset
8180 */
8181 /* Select hyperslab */
8182 start[0] = 3;
8183 start[1] = 2;
8184 start[2] = 4;
8185 count[0] = 2;
8186 count[1] = 3;
8187 count[2] = 2;
8188 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8189 TEST_ERROR
8190
8191 /* Initialize expect_dst_buf */
8192 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8193 src_i = 0;
8194 for(i=3; i<5; i++)
8195 for(j=2; j<5; j++)
8196 for(k=4; k<6; k++)
8197 expect_dst_buf[i][j][k] = src_buf[src_i++];
8198
8199 /* Loop over buffer sizes */
8200 for(src_buf_size=1; src_buf_size<=13; src_buf_size++) {
8201 /* Reset dst_buf */
8202 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8203
8204 /* Set up scatter info */
8205 scatter_info.src_buf = src_buf;
8206 scatter_info.block = src_buf_size;
8207 scatter_info.size = 12;
8208
8209 /* Scatter data */
8210 if(H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, dst_buf) < 0)
8211 TEST_ERROR
8212
8213 /* Verify data */
8214 TEST_SCATTER_CHECK_ARR(dst_buf, expect_dst_buf)
8215 } /* end for */
8216
8217
8218 /*
8219 * Test 3: Multiple blocks
8220 */
8221 /* Select hyperslab */
8222 start[0] = 1;
8223 start[1] = 1;
8224 start[2] = 1;
8225 stride[0] = 3;
8226 stride[1] = 4;
8227 stride[2] = 5;
8228 count[0] = 3;
8229 count[1] = 1;
8230 count[2] = 2;
8231 block[0] = 1;
8232 block[1] = 3;
8233 block[2] = 2;
8234 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride ,count, block) < 0)
8235 TEST_ERROR
8236
8237 /* Initialize expect_dst_buf */
8238 /* Iterate over block containing selection, checking if each element is in
8239 * selection. Note that the algorithm used here (if statement) would not
8240 * work for overlapping hyperslabs. */
8241 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8242 src_i = 0;
8243 for(i=1; i<8; i++)
8244 for(j=1; j<4; j++)
8245 for(k=1; k<8; k++)
8246 if((hsize_t)i >= start[0]
8247 && ((hsize_t)i - start[0]) % stride[0] < block[0]
8248 && ((hsize_t)i - start[0]) / stride[0] < count[0]
8249 && (hsize_t)j >= start[1]
8250 && ((hsize_t)j - start[1]) % stride[1] < block[1]
8251 && ((hsize_t)j - start[1]) / stride[1] < count[1]
8252 && (hsize_t)k >= start[2]
8253 && ((hsize_t)k - start[2]) % stride[2] < block[2]
8254 && ((hsize_t)k - start[2]) / stride[2] < count[2])
8255 expect_dst_buf[i][j][k] = src_buf[src_i++];
8256
8257 /* Loop over buffer sizes */
8258 for(src_buf_size=1; src_buf_size<=37; src_buf_size++) {
8259 /* Reset dst_buf */
8260 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8261
8262 /* Set up scatter info */
8263 scatter_info.src_buf = src_buf;
8264 scatter_info.block = src_buf_size;
8265 scatter_info.size = 36;
8266
8267 /* Scatter data */
8268 if(H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, dst_buf) < 0)
8269 TEST_ERROR
8270
8271 /* Verify data */
8272 TEST_SCATTER_CHECK_ARR(dst_buf, expect_dst_buf)
8273 } /* end for */
8274
8275
8276 /*
8277 * Test 4: Compound selection
8278 */
8279 /* Select hyperslabs */
8280 start[0] = 2;
8281 start[1] = 1;
8282 start[2] = 1;
8283 count[0] = 2;
8284 count[1] = 3;
8285 count[2] = 2;
8286 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8287 TEST_ERROR
8288 start2[0] = 1;
8289 start2[1] = 2;
8290 start2[2] = 2;
8291 count2[0] = 3;
8292 count2[1] = 2;
8293 count2[2] = 2;
8294 if(H5Sselect_hyperslab(sid, H5S_SELECT_XOR, start2, NULL ,count2, NULL) < 0)
8295 TEST_ERROR
8296
8297 /* Initialize expect_dst_buf */
8298 /* Iterate over block containing selection, checking if each element is in
8299 * selection. */
8300 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8301 src_i = 0;
8302 for(i=1; i<4; i++)
8303 for(j=1; j<4; j++)
8304 for(k=1; k<4; k++)
8305 if(!(((hsize_t)i >= start[0] && (hsize_t)i < start[0] + count[0])
8306 && ((hsize_t)j >= start[1] && (hsize_t)j < start[1] + count[1])
8307 && ((hsize_t)k >= start[2] && (hsize_t)k < start[2] + count[2]))
8308 != !(((hsize_t)i >= start2[0] && (hsize_t)i < start2[0] + count2[0])
8309 && ((hsize_t)j >= start2[1] && (hsize_t)j < start2[1] + count2[1])
8310 && ((hsize_t)k >= start2[2] && (hsize_t)k < start2[2] + count2[2])))
8311 expect_dst_buf[i][j][k] = src_buf[src_i++];
8312
8313 /* Loop over buffer sizes */
8314 for(src_buf_size=1; src_buf_size<=17; src_buf_size++) {
8315 /* Reset dst_buf */
8316 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8317
8318 /* Set up scatter info */
8319 scatter_info.src_buf = src_buf;
8320 scatter_info.block = src_buf_size;
8321 scatter_info.size = 16;
8322
8323 /* Scatter data */
8324 if(H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, dst_buf) < 0)
8325 TEST_ERROR
8326
8327 /* Verify data */
8328 TEST_SCATTER_CHECK_ARR(dst_buf, expect_dst_buf)
8329 } /* end for */
8330
8331
8332 /*
8333 * Test 5: Point selection
8334 */
8335 /* Select hyperslabs */
8336 if(H5Sselect_elements(sid, H5S_SELECT_SET, sizeof(point) / sizeof(point[0]), (hsize_t *)point) < 0)
8337 TEST_ERROR
8338
8339 /* Initialize expect_dst_buf */
8340 /* Iterate over block containing selection, checking if each element is in
8341 * selection. */
8342 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8343 for(i=0; i<(int)(sizeof(point) / sizeof(point[0])); i++)
8344 expect_dst_buf[point[i][0]][point[i][1]][point[i][2]]
8345 = src_buf[i];
8346
8347 /* Loop over buffer sizes */
8348 for(src_buf_size=1; src_buf_size<=5; src_buf_size++) {
8349 /* Reset dst_buf */
8350 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8351
8352 /* Set up scatter info */
8353 scatter_info.src_buf = src_buf;
8354 scatter_info.block = src_buf_size;
8355 scatter_info.size = 4;
8356
8357 /* Scatter data */
8358 if(H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, dst_buf) < 0)
8359 TEST_ERROR
8360
8361 /* Verify data */
8362 TEST_SCATTER_CHECK_ARR(dst_buf, expect_dst_buf)
8363 } /* end for */
8364
8365
8366 /* Close everything */
8367 if(H5Sclose(sid) < 0) TEST_ERROR
8368
8369 PASSED();
8370
8371 return 0;
8372
8373 error:
8374 H5E_BEGIN_TRY {
8375 H5Sclose(sid);
8376 } H5E_END_TRY;
8377 return -1;
8378 } /* end test_scatter() */
8379
8380
8381 /*-------------------------------------------------------------------------
8382 * Function: test_gather
8383 *
8384 * Purpose: Tests H5Dgather with a variety of different selections and
8385 * destination buffer sizes.
8386 *
8387 * Return: Success: 0
8388 * Failure: -1
8389 *
8390 * Programmer: Neil Fortner
8391 * Wednesday, January 16, 2013
8392 *
8393 *-------------------------------------------------------------------------
8394 */
8395 typedef struct gather_info_t {
8396 int *expect_dst_buf; /* Expected destination data buffer */
8397 size_t max_nelmts; /* Maximum number of elements passed to callback */
8398 hbool_t last_call; /* Whether this should be the last time the callback is called */
8399 } gather_info_t;
8400
8401 static herr_t
gather_cb(const void * dst_buf,size_t dst_buf_bytes_used,void * _gather_info)8402 gather_cb(const void *dst_buf, size_t dst_buf_bytes_used,
8403 void *_gather_info)
8404 {
8405 gather_info_t *gather_info = (gather_info_t *)_gather_info;
8406 size_t nelmts; /* Number of elements in src_buf */
8407 int i; /* Local index variable */
8408
8409 HDassert(dst_buf_bytes_used > 0);
8410
8411 /* Calculate number of elements */
8412 nelmts = dst_buf_bytes_used / sizeof(gather_info->expect_dst_buf[0]);
8413
8414 /* Make sure the number of bytes is a multiple of the number of elements */
8415 if(nelmts * sizeof(gather_info->expect_dst_buf[0]) != dst_buf_bytes_used)
8416 TEST_ERROR
8417
8418 /* Make sure we weren't passed more data than we requested to be passed at
8419 * once */
8420 if(nelmts > gather_info->max_nelmts)
8421 TEST_ERROR
8422
8423 /* If we were passed less data than requested, make sure this is the last
8424 * time the callback was called */
8425 if(gather_info->last_call)
8426 TEST_ERROR
8427 if(nelmts < gather_info->max_nelmts)
8428 gather_info->last_call = TRUE;
8429
8430 /* Compare data and expected data */
8431 for(i=0; i<(int)nelmts; i++)
8432 if(((const int *)dst_buf)[i] != *((gather_info->expect_dst_buf)++))
8433 TEST_ERROR
8434
8435 return SUCCEED;
8436
8437 error:
8438 return FAIL;
8439 }
8440
8441 static herr_t
test_gather(void)8442 test_gather(void)
8443 {
8444 hid_t sid = -1; /* Dataspace ID */
8445 hsize_t dim[3] = {8, 5, 8}; /* Dataspace dimensions */
8446 hsize_t start[3] = {0, 0, 0};
8447 hsize_t stride[3] = {0, 0, 0};
8448 hsize_t count[3] = {0, 0, 0};
8449 hsize_t block[3] = {0, 0, 0};
8450 hsize_t start2[3] = {0, 0, 0};
8451 hsize_t count2[3] = {0, 0, 0};
8452 hsize_t point[4][3] = {{2, 3, 2}, {3, 0, 2}, {7, 2, 0}, {0, 1, 5}};
8453 size_t dst_buf_size;
8454 int src_buf[8][5][8]; /* Source data buffer */
8455 int dst_buf[36]; /* Destination data buffer */
8456 int expect_dst_buf[36]; /* Expected destination data buffer */
8457 gather_info_t gather_info; /* Operator data for callback */
8458 int i, j, k, dst_i; /* Local index variables */
8459
8460 TESTING("H5Dgather()");
8461
8462 /* Create dataspace */
8463 if((sid = H5Screate_simple(3, dim, NULL)) < 0) TEST_ERROR
8464
8465 /* Initialize src_buf */
8466 for(i=0; i<(int)(sizeof(src_buf)/sizeof(src_buf[0])); i++)
8467 for(j=0; j<(int)(sizeof(src_buf[0])/sizeof(src_buf[0][0])); j++)
8468 for(k=0; k<(int)(sizeof(src_buf[0][0])/sizeof(src_buf[0][0][0])); k++)
8469 src_buf[i][j][k] = 1 + k
8470 + (int)(sizeof(src_buf[0][0]) / sizeof(src_buf[0][0][0])) * j
8471 + (int)(sizeof(src_buf[0]) / sizeof(src_buf[0][0][0])) * i;
8472
8473
8474 /*
8475 * Test 1: Simple case
8476 */
8477 /* Select hyperslab */
8478 count[0] = 1;
8479 count[1] = 1;
8480 count[2] = 8;
8481 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8482 TEST_ERROR
8483
8484 /* Initialize expect_dst_buf */
8485 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8486 for(i=0; i<8; i++)
8487 expect_dst_buf[i] = src_buf[0][0][i];
8488
8489 /* Loop over buffer sizes */
8490 for(dst_buf_size=1; dst_buf_size<=9; dst_buf_size++) {
8491 /* Reset dst_buf */
8492 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8493
8494 /* Initialize gather_info */
8495 gather_info.expect_dst_buf = expect_dst_buf;
8496 gather_info.max_nelmts = dst_buf_size;
8497 gather_info.last_call = FALSE;
8498
8499 /* Gather data */
8500 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, dst_buf_size * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info) < 0)
8501 TEST_ERROR
8502
8503 /* Verify that all data has been gathered (and verified) */
8504 if(gather_info.expect_dst_buf - expect_dst_buf != 8) TEST_ERROR
8505 } /* end for */
8506
8507 /* Test without a callback */
8508 /* Loop over buffer sizes */
8509 for(dst_buf_size=8; dst_buf_size<=9; dst_buf_size++) {
8510 /* Reset dst_buf */
8511 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8512
8513 /* Gather data */
8514 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, dst_buf_size * sizeof(dst_buf[0]), dst_buf, NULL, NULL) < 0)
8515 TEST_ERROR
8516
8517 /* Verify data */
8518 for(i=0; i<(int)(sizeof(dst_buf)/sizeof(dst_buf[0])); i++)
8519 if(dst_buf[i] != expect_dst_buf[i])
8520 TEST_ERROR
8521 } /* end for */
8522
8523 /* Test with a dst_buf_size that is not a multiple of the datatype size */
8524 /* Reset dst_buf */
8525 dst_buf_size = 7;
8526 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8527
8528 /* Initialize gather_info */
8529 gather_info.expect_dst_buf = expect_dst_buf;
8530 gather_info.max_nelmts = dst_buf_size - 1;
8531 gather_info.last_call = FALSE;
8532
8533 /* Gather data */
8534 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, dst_buf_size * sizeof(dst_buf[0]) - 1, dst_buf, gather_cb, &gather_info) < 0)
8535 TEST_ERROR
8536
8537 /* Verify that all data has been gathered (and verified) */
8538 if(gather_info.expect_dst_buf - expect_dst_buf != 8) TEST_ERROR
8539
8540
8541 /*
8542 * Test 2: Single block in dataset
8543 */
8544 /* Select hyperslab */
8545 start[0] = 3;
8546 start[1] = 2;
8547 start[2] = 4;
8548 count[0] = 2;
8549 count[1] = 3;
8550 count[2] = 2;
8551 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8552 TEST_ERROR
8553
8554 /* Initialize expect_dst_buf */
8555 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8556 dst_i = 0;
8557 for(i=3; i<5; i++)
8558 for(j=2; j<5; j++)
8559 for(k=4; k<6; k++)
8560 expect_dst_buf[dst_i++] = src_buf[i][j][k];
8561
8562 /* Loop over buffer sizes */
8563 for(dst_buf_size=1; dst_buf_size<=13; dst_buf_size++) {
8564 /* Reset dst_buf */
8565 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8566
8567 /* Initialize gather_info */
8568 gather_info.expect_dst_buf = expect_dst_buf;
8569 gather_info.max_nelmts = dst_buf_size;
8570 gather_info.last_call = FALSE;
8571
8572 /* Gather data */
8573 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, dst_buf_size * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info) < 0)
8574 TEST_ERROR
8575
8576 /* Verify that all data has been gathered (and verified) */
8577 if(gather_info.expect_dst_buf - expect_dst_buf != 12) TEST_ERROR
8578 } /* end for */
8579
8580
8581 /*
8582 * Test 3: Multiple blocks
8583 */
8584 /* Select hyperslab */
8585 start[0] = 1;
8586 start[1] = 1;
8587 start[2] = 1;
8588 stride[0] = 3;
8589 stride[1] = 4;
8590 stride[2] = 5;
8591 count[0] = 3;
8592 count[1] = 1;
8593 count[2] = 2;
8594 block[0] = 1;
8595 block[1] = 3;
8596 block[2] = 2;
8597 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, stride ,count, block) < 0)
8598 TEST_ERROR
8599
8600 /* Initialize expect_dst_buf */
8601 /* Iterate over block containing selection, checking if each element is in
8602 * selection. Note that the algorithm used here (if statement) would not
8603 * work for overlapping hyperslabs. */
8604 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8605 dst_i = 0;
8606 for(i=1; i<8; i++)
8607 for(j=1; j<4; j++)
8608 for(k=1; k<8; k++)
8609 if((hsize_t)i >= start[0]
8610 && ((hsize_t)i - start[0]) % stride[0] < block[0]
8611 && ((hsize_t)i - start[0]) / stride[0] < count[0]
8612 && (hsize_t)j >= start[1]
8613 && ((hsize_t)j - start[1]) % stride[1] < block[1]
8614 && ((hsize_t)j - start[1]) / stride[1] < count[1]
8615 && (hsize_t)k >= start[2]
8616 && ((hsize_t)k - start[2]) % stride[2] < block[2]
8617 && ((hsize_t)k - start[2]) / stride[2] < count[2])
8618 expect_dst_buf[dst_i++] = src_buf[i][j][k];
8619
8620 /* Loop over buffer sizes */
8621 for(dst_buf_size=1; dst_buf_size<=37; dst_buf_size++) {
8622 /* Reset dst_buf */
8623 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8624
8625 /* Initialize gather_info */
8626 gather_info.expect_dst_buf = expect_dst_buf;
8627 gather_info.max_nelmts = dst_buf_size;
8628 gather_info.last_call = FALSE;
8629
8630 /* Gather data */
8631 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, dst_buf_size * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info) < 0)
8632 TEST_ERROR
8633
8634 /* Verify that all data has been gathered (and verified) */
8635 if(gather_info.expect_dst_buf - expect_dst_buf != 36) TEST_ERROR
8636 } /* end for */
8637
8638
8639 /*
8640 * Test 4: Compound selection
8641 */
8642 /* Select hyperslabs */
8643 start[0] = 2;
8644 start[1] = 1;
8645 start[2] = 1;
8646 count[0] = 2;
8647 count[1] = 3;
8648 count[2] = 2;
8649 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8650 TEST_ERROR
8651 start2[0] = 1;
8652 start2[1] = 2;
8653 start2[2] = 2;
8654 count2[0] = 3;
8655 count2[1] = 2;
8656 count2[2] = 2;
8657 if(H5Sselect_hyperslab(sid, H5S_SELECT_XOR, start2, NULL ,count2, NULL) < 0)
8658 TEST_ERROR
8659
8660 /* Initialize expect_dst_buf */
8661 /* Iterate over block containing selection, checking if each element is in
8662 * selection. */
8663 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8664 dst_i = 0;
8665 for(i=1; i<4; i++)
8666 for(j=1; j<4; j++)
8667 for(k=1; k<4; k++)
8668 if(!(((hsize_t)i >= start[0] && (hsize_t)i < start[0] + count[0])
8669 && ((hsize_t)j >= start[1] && (hsize_t)j < start[1] + count[1])
8670 && ((hsize_t)k >= start[2] && (hsize_t)k < start[2] + count[2]))
8671 != !(((hsize_t)i >= start2[0] && (hsize_t)i < start2[0] + count2[0])
8672 && ((hsize_t)j >= start2[1] && (hsize_t)j < start2[1] + count2[1])
8673 && ((hsize_t)k >= start2[2] && (hsize_t)k < start2[2] + count2[2])))
8674 expect_dst_buf[dst_i++] = src_buf[i][j][k];
8675
8676 /* Loop over buffer sizes */
8677 for(dst_buf_size=1; dst_buf_size<=17; dst_buf_size++) {
8678 /* Reset dst_buf */
8679 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8680
8681 /* Initialize gather_info */
8682 gather_info.expect_dst_buf = expect_dst_buf;
8683 gather_info.max_nelmts = dst_buf_size;
8684 gather_info.last_call = FALSE;
8685
8686 /* Gather data */
8687 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, dst_buf_size * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info) < 0)
8688 TEST_ERROR
8689
8690 /* Verify that all data has been gathered (and verified) */
8691 if(gather_info.expect_dst_buf - expect_dst_buf != 16) TEST_ERROR
8692 } /* end for */
8693
8694
8695 /*
8696 * Test 5: Point selection
8697 */
8698 /* Select hyperslabs */
8699 if(H5Sselect_elements(sid, H5S_SELECT_SET, sizeof(point) / sizeof(point[0]), (hsize_t *)point) < 0)
8700 TEST_ERROR
8701
8702 /* Initialize expect_dst_buf */
8703 /* Iterate over block containing selection, checking if each element is in
8704 * selection. */
8705 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8706 for(i=0; i<(int)(sizeof(point) / sizeof(point[0])); i++)
8707 expect_dst_buf[i] = src_buf[point[i][0]][point[i][1]][point[i][2]];
8708
8709 /* Loop over buffer sizes */
8710 for(dst_buf_size=1; dst_buf_size<=5; dst_buf_size++) {
8711 /* Reset dst_buf */
8712 (void)HDmemset(dst_buf, 0, sizeof(dst_buf));
8713
8714 /* Initialize gather_info */
8715 gather_info.expect_dst_buf = expect_dst_buf;
8716 gather_info.max_nelmts = dst_buf_size;
8717 gather_info.last_call = FALSE;
8718
8719 /* Gather data */
8720 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, dst_buf_size * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info) < 0)
8721 TEST_ERROR
8722
8723 /* Verify that all data has been gathered (and verified) */
8724 if(gather_info.expect_dst_buf - expect_dst_buf != 4) TEST_ERROR
8725 } /* end for */
8726
8727
8728 /* Close everything */
8729 if(H5Sclose(sid) < 0) TEST_ERROR
8730
8731 PASSED();
8732
8733 return 0;
8734
8735 error:
8736 H5E_BEGIN_TRY {
8737 H5Sclose(sid);
8738 } H5E_END_TRY;
8739 return -1;
8740 } /* end test_gather() */
8741
8742
8743 /*-------------------------------------------------------------------------
8744 * Function: test_scatter_error
8745 *
8746 * Purpose: Tests H5Dscatter with a variety of different conditions
8747 * that should cause errors.
8748 *
8749 * Return: Success: 0
8750 * Failure: -1
8751 *
8752 * Programmer: Neil Fortner
8753 * Monday, February 4, 2013
8754 *
8755 *-------------------------------------------------------------------------
8756 */
8757 static herr_t
scatter_error_cb_fail(void ** src_buf,size_t * src_buf_bytes_used,void * _scatter_info)8758 scatter_error_cb_fail(void **src_buf/*out*/, size_t *src_buf_bytes_used/*out*/,
8759 void *_scatter_info)
8760 {
8761 scatter_info_t *scatter_info = (scatter_info_t *)_scatter_info;
8762 size_t nelmts; /* Number of elements to return in src_buf */
8763
8764 /* Calculate number of elements */
8765 nelmts = MIN(scatter_info->block, scatter_info->size);
8766 HDassert(nelmts > 0);
8767
8768 /* Set output variables */
8769 *src_buf = (void *)scatter_info->src_buf;
8770 *src_buf_bytes_used = nelmts * sizeof(scatter_info->src_buf[0]);
8771
8772 return FAIL;
8773 }
8774
8775 static herr_t
scatter_error_cb_null(void ** src_buf,size_t * src_buf_bytes_used,void * _scatter_info)8776 scatter_error_cb_null(void **src_buf/*out*/, size_t *src_buf_bytes_used/*out*/,
8777 void *_scatter_info)
8778 {
8779 scatter_info_t *scatter_info = (scatter_info_t *)_scatter_info;
8780 size_t nelmts; /* Number of elements to return in src_buf */
8781
8782 /* Calculate number of elements */
8783 nelmts = MIN(scatter_info->block, scatter_info->size);
8784 HDassert(nelmts > 0);
8785
8786 /* Set output variables */
8787 *src_buf = NULL;
8788 *src_buf_bytes_used = nelmts * sizeof(scatter_info->src_buf[0]);
8789
8790 return SUCCEED;
8791 }
8792
8793 static herr_t
scatter_error_cb_unalign(void ** src_buf,size_t * src_buf_bytes_used,void * _src_buf_bytes_used)8794 scatter_error_cb_unalign(void **src_buf/*out*/, size_t *src_buf_bytes_used/*out*/,
8795 void *_src_buf_bytes_used)
8796 {
8797 /* Set output variables */
8798 *src_buf = _src_buf_bytes_used;
8799 *src_buf_bytes_used = *(size_t *)_src_buf_bytes_used;
8800
8801 return SUCCEED;
8802 }
8803
8804 static herr_t
test_scatter_error(void)8805 test_scatter_error(void)
8806 {
8807 hid_t sid = -1; /* Dataspace ID */
8808 hsize_t dim[1] = {10}; /* Dataspace dimensions */
8809 hsize_t start[3] = {2};
8810 hsize_t count[3] = {6};
8811 int src_buf[7]; /* Source data buffer */
8812 int dst_buf[10]; /* Destination data buffer */
8813 scatter_info_t scatter_info; /* Operator data for callback */
8814 size_t cb_unalign_nbytes; /* Number of bytes to return for unaligned test */
8815 herr_t ret; /* Return value */
8816 int i; /* Local index variable */
8817
8818 TESTING("H5Dscatter() error conditions");
8819
8820 /* Create dataspace */
8821 if((sid = H5Screate_simple(1, dim, NULL)) < 0) TEST_ERROR
8822
8823 /* Initialize src_buf */
8824 for(i=0; i<(int)(sizeof(src_buf)/sizeof(src_buf[0])); i++)
8825 src_buf[i] = i + 1;
8826
8827 /* Select hyperslab */
8828 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8829 TEST_ERROR
8830
8831 /* Verify that base configuration passes */
8832 scatter_info.src_buf = src_buf;
8833 scatter_info.block = sizeof(src_buf)/sizeof(src_buf[0]);
8834 scatter_info.size = 6;
8835 if(H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, dst_buf) < 0)
8836 TEST_ERROR
8837
8838
8839 /*
8840 * Test invalid parameters
8841 */
8842 scatter_info.src_buf = src_buf;
8843 scatter_info.size = 6;
8844 H5E_BEGIN_TRY {
8845 ret = H5Dscatter(NULL, NULL, H5T_NATIVE_INT, sid, dst_buf);
8846 } H5E_END_TRY
8847 if(ret >= 0) TEST_ERROR
8848
8849 scatter_info.src_buf = src_buf;
8850 scatter_info.size = 6;
8851 H5E_BEGIN_TRY {
8852 ret = H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, sid, sid, dst_buf);
8853 } H5E_END_TRY
8854 if(ret >= 0) TEST_ERROR
8855
8856 scatter_info.src_buf = src_buf;
8857 scatter_info.size = 6;
8858 H5E_BEGIN_TRY {
8859 ret = H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, H5T_NATIVE_INT, dst_buf);
8860 } H5E_END_TRY
8861 if(ret >= 0) TEST_ERROR
8862
8863 scatter_info.src_buf = src_buf;
8864 scatter_info.size = 6;
8865 H5E_BEGIN_TRY {
8866 ret = H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, NULL);
8867 } H5E_END_TRY
8868 if(ret >= 0) TEST_ERROR
8869
8870
8871 /*
8872 * Test returning too many elements in callback
8873 */
8874 scatter_info.src_buf = src_buf;
8875 scatter_info.size = 7;
8876 H5E_BEGIN_TRY {
8877 ret = H5Dscatter((H5D_scatter_func_t)scatter_cb, &scatter_info, H5T_NATIVE_INT, sid, dst_buf);
8878 } H5E_END_TRY
8879 if(ret >= 0) TEST_ERROR
8880
8881
8882 /*
8883 * Test callback returns failure
8884 */
8885 scatter_info.src_buf = src_buf;
8886 scatter_info.size = 6;
8887 H5E_BEGIN_TRY {
8888 ret = H5Dscatter((H5D_scatter_func_t)scatter_error_cb_fail, &scatter_info, H5T_NATIVE_INT, sid, dst_buf);
8889 } H5E_END_TRY
8890 if(ret >= 0) TEST_ERROR
8891
8892
8893 /*
8894 * Test callback returns NULL buffer
8895 */
8896 scatter_info.src_buf = src_buf;
8897 scatter_info.size = 6;
8898 H5E_BEGIN_TRY {
8899 ret = H5Dscatter((H5D_scatter_func_t)scatter_error_cb_null, &scatter_info, H5T_NATIVE_INT, sid, dst_buf);
8900 } H5E_END_TRY
8901 if(ret >= 0) TEST_ERROR
8902
8903
8904 /*
8905 * Test callback returns 0 for src_buf_bytes_used
8906 */
8907 cb_unalign_nbytes = 0;
8908 H5E_BEGIN_TRY {
8909 ret = H5Dscatter((H5D_scatter_func_t)scatter_error_cb_unalign, &cb_unalign_nbytes, H5T_NATIVE_INT, sid, dst_buf);
8910 } H5E_END_TRY
8911 if(ret >= 0) TEST_ERROR
8912
8913
8914 /*
8915 * Test callback returns src_buf_bytes_used that is not a multiple of
8916 * datatype size
8917 */
8918 cb_unalign_nbytes = sizeof(src_buf[0]) - 1;
8919 H5E_BEGIN_TRY {
8920 ret = H5Dscatter((H5D_scatter_func_t)scatter_error_cb_unalign, &cb_unalign_nbytes, H5T_NATIVE_INT, sid, dst_buf);
8921 } H5E_END_TRY
8922 if(ret >= 0) TEST_ERROR
8923
8924 cb_unalign_nbytes = sizeof(src_buf[0]) + 1;
8925 H5E_BEGIN_TRY {
8926 ret = H5Dscatter((H5D_scatter_func_t)scatter_error_cb_unalign, &cb_unalign_nbytes, H5T_NATIVE_INT, sid, dst_buf);
8927 } H5E_END_TRY
8928 if(ret >= 0) TEST_ERROR
8929
8930
8931 /* Close everything */
8932 if(H5Sclose(sid) < 0) TEST_ERROR
8933
8934 PASSED();
8935
8936 return 0;
8937
8938 error:
8939 H5E_BEGIN_TRY {
8940 H5Sclose(sid);
8941 } H5E_END_TRY;
8942 return -1;
8943 } /* end test_scatter_error() */
8944
8945
8946 /*-------------------------------------------------------------------------
8947 * Function: test_gather_error
8948 *
8949 * Purpose: Tests H5Dgather with a variety of different conditions
8950 * that should cause errors.
8951 *
8952 * Return: Success: 0
8953 * Failure: -1
8954 *
8955 * Programmer: Neil Fortner
8956 * Monday, February 4, 2013
8957 *
8958 *-------------------------------------------------------------------------
8959 */
8960 static herr_t
gather_error_cb_fail(const void H5_ATTR_UNUSED * dst_buf,size_t H5_ATTR_UNUSED dst_buf_bytes_used,void H5_ATTR_UNUSED * op_data)8961 gather_error_cb_fail(const void H5_ATTR_UNUSED *dst_buf,
8962 size_t H5_ATTR_UNUSED dst_buf_bytes_used, void H5_ATTR_UNUSED *op_data)
8963 {
8964 return FAIL;
8965 }
8966
8967 static herr_t
test_gather_error(void)8968 test_gather_error(void)
8969 {
8970 hid_t sid = -1; /* Dataspace ID */
8971 hsize_t dim[1] = {10}; /* Dataspace dimensions */
8972 hsize_t start[1] = {2};
8973 hsize_t count[1] = {6};
8974 int src_buf[10]; /* Source data buffer */
8975 int dst_buf[6]; /* Destination data buffer */
8976 int expect_dst_buf[6]; /* Expected destination data buffer */
8977 gather_info_t gather_info; /* Operator data for callback */
8978 herr_t ret; /* Return value */
8979 int i; /* Local index variable */
8980
8981 TESTING("H5Dgather() error conditions");
8982
8983 /* Create dataspace */
8984 if((sid = H5Screate_simple(1, dim, NULL)) < 0) TEST_ERROR
8985
8986 /* Initialize src_buf */
8987 for(i=0; i<(int)(sizeof(src_buf)/sizeof(src_buf[0])); i++)
8988 src_buf[i] = 1 + i;
8989
8990 /* Select hyperslab */
8991 if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL ,count, NULL) < 0)
8992 TEST_ERROR
8993
8994 /* Initialize expect_dst_buf */
8995 (void)HDmemset(expect_dst_buf, 0, sizeof(expect_dst_buf));
8996 for(i=0; i<6; i++)
8997 expect_dst_buf[i] = src_buf[i + 2];
8998
8999 /* Verify that base configuration passes */
9000 gather_info.expect_dst_buf = expect_dst_buf;
9001 gather_info.max_nelmts = 6;
9002 gather_info.last_call = FALSE;
9003 if(H5Dgather(sid, src_buf, H5T_NATIVE_INT, 6 * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info) < 0)
9004 TEST_ERROR
9005
9006 /*
9007 * Test invalid parameters
9008 */
9009 gather_info.expect_dst_buf = expect_dst_buf;
9010 gather_info.last_call = FALSE;
9011 H5E_BEGIN_TRY {
9012 ret = H5Dgather(H5T_NATIVE_INT, src_buf, H5T_NATIVE_INT, 6 * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info);
9013 } H5E_END_TRY
9014 if(ret >= 0) TEST_ERROR
9015
9016 gather_info.expect_dst_buf = expect_dst_buf;
9017 gather_info.last_call = FALSE;
9018 H5E_BEGIN_TRY {
9019 ret = H5Dgather(sid, NULL, H5T_NATIVE_INT, 6 * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info);
9020 } H5E_END_TRY
9021 if(ret >= 0) TEST_ERROR
9022
9023 gather_info.expect_dst_buf = expect_dst_buf;
9024 gather_info.last_call = FALSE;
9025 H5E_BEGIN_TRY {
9026 ret = H5Dgather(sid, src_buf, sid, 6 * sizeof(dst_buf[0]), dst_buf, gather_cb, &gather_info);
9027 } H5E_END_TRY
9028 if(ret >= 0) TEST_ERROR
9029
9030 gather_info.expect_dst_buf = expect_dst_buf;
9031 gather_info.last_call = FALSE;
9032 H5E_BEGIN_TRY {
9033 ret = H5Dgather(sid, src_buf, H5T_NATIVE_INT, 0, dst_buf, gather_cb, &gather_info);
9034 } H5E_END_TRY
9035 if(ret >= 0) TEST_ERROR
9036
9037 gather_info.expect_dst_buf = expect_dst_buf;
9038 gather_info.last_call = FALSE;
9039 H5E_BEGIN_TRY {
9040 ret = H5Dgather(sid, src_buf, H5T_NATIVE_INT, 1, dst_buf, gather_cb, &gather_info);
9041 } H5E_END_TRY
9042 if(ret >= 0) TEST_ERROR
9043
9044 gather_info.expect_dst_buf = expect_dst_buf;
9045 gather_info.last_call = FALSE;
9046 H5E_BEGIN_TRY {
9047 ret = H5Dgather(sid, src_buf, H5T_NATIVE_INT, 6 * sizeof(dst_buf[0]), NULL, gather_cb, &gather_info);
9048 } H5E_END_TRY
9049 if(ret >= 0) TEST_ERROR
9050
9051 gather_info.expect_dst_buf = expect_dst_buf;
9052 gather_info.last_call = FALSE;
9053 H5E_BEGIN_TRY {
9054 ret = H5Dgather(sid, src_buf, H5T_NATIVE_INT, 5 * sizeof(dst_buf[0]), dst_buf, NULL, &gather_info);
9055 } H5E_END_TRY
9056 if(ret >= 0) TEST_ERROR
9057
9058
9059 /*
9060 * Test callback returns failure
9061 */
9062 gather_info.expect_dst_buf = expect_dst_buf;
9063 gather_info.last_call = FALSE;
9064 H5E_BEGIN_TRY {
9065 ret = H5Dgather(sid, src_buf, H5T_NATIVE_INT, 6 * sizeof(dst_buf[0]), dst_buf, gather_error_cb_fail, NULL);
9066 } H5E_END_TRY
9067 if(ret >= 0) TEST_ERROR
9068
9069
9070 /* Close everything */
9071 if(H5Sclose(sid) < 0) TEST_ERROR
9072
9073 PASSED();
9074
9075 return 0;
9076
9077 error:
9078 H5E_BEGIN_TRY {
9079 H5Sclose(sid);
9080 } H5E_END_TRY;
9081 return -1;
9082 } /* end test_gather_error() */
9083
9084 /*-------------------------------------------------------------------------
9085 * Function: test_compact_dirty
9086 *
9087 * Purpose: Verify that issue #2 reported in HDFFV-10051 is fixed:
9088 * --the layout "dirty" flag for a compact dataset is not reset
9089 * properly after flushing the data at dataset close.
9090 *
9091 * Return: Success: 0
9092 * Failure: -1
9093 *
9094 * Programmer: Vailin Choi; April 2017
9095 *
9096 *-------------------------------------------------------------------------
9097 */
9098 static herr_t
test_compact_dirty(hid_t fapl)9099 test_compact_dirty(hid_t fapl)
9100 {
9101 hid_t fid = -1; /* File ID */
9102 hid_t did = -1; /* Dataset ID */
9103 hid_t sid = -1; /* Dataspace ID */
9104 hid_t dcpl = -1; /* Dataset creation property list */
9105 hsize_t dims[1] = {10}; /* Dimension */
9106 int wbuf[10]; /* Data buffer */
9107 char filename[FILENAME_BUF_SIZE]; /* Filename */
9108 int i; /* Local index variable */
9109 hbool_t dirty; /* The dirty flag */
9110
9111 TESTING("compact dataset repeated open/close and dirty flag");
9112
9113 /* Create a file */
9114 h5_fixname(FILENAME[1], fapl, filename, sizeof filename);
9115 if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
9116 TEST_ERROR
9117
9118 /* Initialize data */
9119 for(i = 0; i < 10; i++)
9120 wbuf[i] = i;
9121
9122 /* Create dataspace */
9123 if((sid = H5Screate_simple(1, dims, NULL)) < 0)
9124 TEST_ERROR
9125
9126 /* Set compact layout */
9127 if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
9128 TEST_ERROR
9129 if(H5Pset_layout(dcpl, H5D_COMPACT) < 0)
9130 TEST_ERROR
9131 if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0)
9132 TEST_ERROR
9133
9134 /* Create a compact dataset */
9135 if((did = H5Dcreate2(fid, DSET_COMPACT_MAX_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
9136 TEST_ERROR
9137
9138 /* Write to the dataset */
9139 if(H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0)
9140 TEST_ERROR
9141
9142 /* Close the dataset */
9143 if(H5Dclose(did) < 0)
9144 TEST_ERROR
9145
9146 /* Open the dataset */
9147 if((did = H5Dopen2(fid, DSET_COMPACT_MAX_NAME, H5P_DEFAULT)) < 0)
9148 TEST_ERROR
9149
9150 /* Retrieve the "dirty" flag from the compact dataset layout */
9151 if(H5D__layout_compact_dirty_test(did, &dirty) < 0)
9152 TEST_ERROR
9153
9154 /* Verify that the "dirty" flag is false */
9155 if(dirty)
9156 TEST_ERROR
9157
9158 /* Close the dataset */
9159 if(H5Dclose(did) < 0)
9160 TEST_ERROR
9161
9162 /* Close the dataspace */
9163 if(H5Sclose(sid) < 0)
9164 TEST_ERROR
9165
9166 /* Close the dataset creation property list */
9167 if(H5Pclose(dcpl) < 0)
9168 TEST_ERROR
9169
9170 /* Close the file */
9171 if(H5Fclose(fid) < 0)
9172 TEST_ERROR
9173
9174 PASSED();
9175 return 0;
9176
9177 error:
9178 H5E_BEGIN_TRY {
9179 H5Sclose(sid);
9180 H5Pclose(dcpl);
9181 H5Dclose(did);
9182 H5Fclose(fid);
9183 } H5E_END_TRY;
9184 return -1;
9185 } /* test_compact_dirty() */
9186
9187
9188 /*-------------------------------------------------------------------------
9189 * Function: main
9190 *
9191 * Purpose: Tests the dataset interface (H5D)
9192 *
9193 * Return: Success: exit(0)
9194 *
9195 * Failure: exit(1)
9196 *
9197 * Programmer: Robb Matzke
9198 * Tuesday, December 9, 1997
9199 *
9200 *-------------------------------------------------------------------------
9201 */
9202 int
main(void)9203 main(void)
9204 {
9205 char filename[FILENAME_BUF_SIZE];
9206 hid_t file, grp, fapl, fapl2;
9207 hbool_t new_format;
9208 int mdc_nelmts;
9209 size_t rdcc_nelmts;
9210 size_t rdcc_nbytes;
9211 double rdcc_w0;
9212 int nerrors = 0;
9213 const char *envval;
9214
9215 /* Don't run this test using certain file drivers */
9216 envval = HDgetenv("HDF5_DRIVER");
9217 if(envval == NULL)
9218 envval = "nomatch";
9219
9220 /* Set the random # seed */
9221 HDsrandom((unsigned)HDtime(NULL));
9222
9223 /* Testing setup */
9224 h5_reset();
9225 fapl = h5_fileaccess();
9226
9227 /* Turn off the chunk cache, so all the chunks are immediately written to disk */
9228 if(H5Pget_cache(fapl, &mdc_nelmts, &rdcc_nelmts, &rdcc_nbytes, &rdcc_w0) < 0)
9229 goto error;
9230 rdcc_nbytes = 0;
9231 if(H5Pset_cache(fapl, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0) < 0)
9232 goto error;
9233
9234 /* Copy the file access property list */
9235 if((fapl2 = H5Pcopy(fapl)) < 0) TEST_ERROR
9236
9237 /* Set the "use the latest version of the format" bounds for creating objects in the file */
9238 if(H5Pset_libver_bounds(fapl2, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) TEST_ERROR
9239
9240 h5_fixname(FILENAME[0], fapl, filename, sizeof filename);
9241
9242 /* Test with old & new format groups */
9243 for(new_format = FALSE; new_format <= TRUE; new_format++) {
9244 hid_t my_fapl;
9245
9246 /* Set the FAPL for the type of format */
9247 if(new_format) {
9248 puts("\nTesting with new file format:");
9249 my_fapl = fapl2;
9250 } /* end if */
9251 else {
9252 puts("Testing with old file format:");
9253 my_fapl = fapl;
9254 } /* end else */
9255
9256 /* Create the file for this test */
9257 if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, my_fapl)) < 0)
9258 goto error;
9259
9260 /* Cause the library to emit initial messages */
9261 if((grp = H5Gcreate2(file, "emit diagnostics", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
9262 goto error;
9263 if(H5Oset_comment(grp, "Causes diagnostic messages to be emitted") < 0)
9264 goto error;
9265 if(H5Gclose(grp) < 0)
9266 goto error;
9267
9268 nerrors += (test_create(file) < 0 ? 1 : 0);
9269 nerrors += (test_simple_io(envval, my_fapl) < 0 ? 1 : 0);
9270 nerrors += (test_compact_io(my_fapl) < 0 ? 1 : 0);
9271 nerrors += (test_max_compact(my_fapl) < 0 ? 1 : 0);
9272 nerrors += (test_compact_dirty(my_fapl) < 0 ? 1 : 0);
9273 nerrors += (test_conv_buffer(file) < 0 ? 1 : 0);
9274 nerrors += (test_tconv(file) < 0 ? 1 : 0);
9275 nerrors += (test_filters(file, my_fapl) < 0 ? 1 : 0);
9276 nerrors += (test_onebyte_shuffle(file) < 0 ? 1 : 0);
9277 nerrors += (test_nbit_int(file) < 0 ? 1 : 0);
9278 nerrors += (test_nbit_float(file) < 0 ? 1 : 0);
9279 nerrors += (test_nbit_double(file) < 0 ? 1 : 0);
9280 nerrors += (test_nbit_array(file) < 0 ? 1 : 0);
9281 nerrors += (test_nbit_compound(file) < 0 ? 1 : 0);
9282 nerrors += (test_nbit_compound_2(file) < 0 ? 1 : 0);
9283 nerrors += (test_nbit_compound_3(file) < 0 ? 1 : 0);
9284 nerrors += (test_nbit_int_size(file) < 0 ? 1 : 0);
9285 nerrors += (test_nbit_flt_size(file) < 0 ? 1 : 0);
9286 nerrors += (test_scaleoffset_int(file) < 0 ? 1 : 0);
9287 nerrors += (test_scaleoffset_int_2(file) < 0 ? 1 : 0);
9288 nerrors += (test_scaleoffset_float(file) < 0 ? 1 : 0);
9289 nerrors += (test_scaleoffset_float_2(file) < 0 ? 1 : 0);
9290 nerrors += (test_scaleoffset_double(file) < 0 ? 1 : 0);
9291 nerrors += (test_scaleoffset_double_2(file) < 0 ? 1 : 0);
9292 nerrors += (test_multiopen (file) < 0 ? 1 : 0);
9293 nerrors += (test_types(file) < 0 ? 1 : 0);
9294 nerrors += (test_userblock_offset(envval, my_fapl) < 0 ? 1 : 0);
9295 nerrors += (test_missing_filter(file) < 0 ? 1 : 0);
9296 nerrors += (test_can_apply(file) < 0 ? 1 : 0);
9297 nerrors += (test_can_apply2(file) < 0 ? 1 : 0);
9298 nerrors += (test_set_local(my_fapl) < 0 ? 1 : 0);
9299 nerrors += (test_can_apply_szip(file) < 0 ? 1 : 0);
9300 nerrors += (test_compare_dcpl(file) < 0 ? 1 : 0);
9301 nerrors += (test_copy_dcpl(file, my_fapl) < 0 ? 1 : 0);
9302 nerrors += (test_filter_delete(file) < 0 ? 1 : 0);
9303 nerrors += (test_filters_endianess() < 0 ? 1 : 0);
9304 nerrors += (test_zero_dims(file) < 0 ? 1 : 0);
9305 nerrors += (test_missing_chunk(file) < 0 ? 1 : 0);
9306 nerrors += (test_random_chunks(my_fapl) < 0 ? 1 : 0);
9307 #ifndef H5_NO_DEPRECATED_SYMBOLS
9308 nerrors += (test_deprec(file) < 0 ? 1 : 0);
9309 #endif /* H5_NO_DEPRECATED_SYMBOLS */
9310 nerrors += (test_huge_chunks(my_fapl) < 0 ? 1 : 0);
9311 nerrors += (test_chunk_cache(my_fapl) < 0 ? 1 : 0);
9312 nerrors += (test_big_chunks_bypass_cache(my_fapl) < 0 ? 1 : 0);
9313 nerrors += (test_chunk_expand(my_fapl) < 0 ? 1 : 0);
9314 nerrors += (test_idx_compatible() < 0 ? 1 : 0);
9315 nerrors += (test_layout_extend(my_fapl) < 0 ? 1 : 0);
9316 nerrors += (test_large_chunk_shrink(my_fapl) < 0 ? 1 : 0);
9317 nerrors += (test_zero_dim_dset(my_fapl) < 0 ? 1 : 0);
9318
9319 if(H5Fclose(file) < 0)
9320 goto error;
9321 } /* end for */
9322
9323 /* Close 2nd FAPL */
9324 if(H5Pclose(fapl2) < 0) TEST_ERROR
9325
9326 /* Tests that do not use files */
9327 nerrors += (test_scatter() < 0 ? 1 : 0);
9328 nerrors += (test_gather() < 0 ? 1 : 0);
9329 nerrors += (test_scatter_error() < 0 ? 1 : 0);
9330 nerrors += (test_gather_error() < 0 ? 1 : 0);
9331
9332 /* Verify symbol table messages are cached */
9333 nerrors += (h5_verify_cached_stabs(FILENAME, fapl) < 0 ? 1 : 0);
9334
9335 if(nerrors)
9336 goto error;
9337 printf("All dataset tests passed.\n");
9338 #ifdef H5_HAVE_FILTER_SZIP
9339 if (GetTestCleanup())
9340 HDremove(NOENCODER_COPY_FILENAME);
9341 #endif /* H5_HAVE_FILTER_SZIP */
9342 h5_cleanup(FILENAME, fapl);
9343
9344 return 0;
9345
9346 error:
9347 nerrors = MAX(1, nerrors);
9348 printf("***** %d DATASET TEST%s FAILED! *****\n",
9349 nerrors, 1 == nerrors ? "" : "S");
9350 return 1;
9351 }
9352
9353