/dports/databases/timescaledb/timescaledb-2.5.1/sql/ |
H A D | policy_internal.sql | 32 chunk_rec RECORD; 46 FOR chunk_rec IN 58 IF chunk_rec.status = 0 THEN 59 PERFORM compress_chunk( chunk_rec.oid ); 60 ELSIF chunk_rec.status = 3 AND recompress_enabled IS TRUE THEN 61 PERFORM decompress_chunk(chunk_rec.oid, if_compressed => true); 63 PERFORM compress_chunk(chunk_rec.oid); 67 …RAISE LOG 'job % completed processing chunk %.%', job_id, chunk_rec.schema_name, chunk_rec.table_n… 87 chunk_rec RECORD;
|
/dports/science/InsightToolkit/ITK-5.0.1/Modules/ThirdParty/HDF5/src/itkhdf5/src/ |
H A D | H5Dnone.c | 240 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__none_idx_iterate() local 261 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__none_idx_iterate() 262 chunk_rec.nbytes = idx_info->layout->size; in H5D__none_idx_iterate() 263 chunk_rec.filter_mask = 0; in H5D__none_idx_iterate() 271 idx = H5VM_array_offset_pre(ndims, idx_info->layout->max_down_chunks, chunk_rec.scaled); in H5D__none_idx_iterate() 274 chunk_rec.chunk_addr = idx_info->storage->idx_addr + idx * idx_info->layout->size; in H5D__none_idx_iterate() 277 if((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__none_idx_iterate() 284 chunk_rec.scaled[curr_dim]++; in H5D__none_idx_iterate() 287 if(chunk_rec.scaled[curr_dim] >= idx_info->layout->chunks[curr_dim]) { in H5D__none_idx_iterate() 289 chunk_rec.scaled[curr_dim] = 0; in H5D__none_idx_iterate()
|
H A D | H5Dsingle.c | 315 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__single_idx_iterate() local 331 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__single_idx_iterate() 332 chunk_rec.chunk_addr = idx_info->storage->idx_addr; in H5D__single_idx_iterate() 335 chunk_rec.nbytes = idx_info->storage->u.single.nbytes; in H5D__single_idx_iterate() 336 chunk_rec.filter_mask = idx_info->storage->u.single.filter_mask; in H5D__single_idx_iterate() 339 chunk_rec.nbytes = idx_info->layout->size; in H5D__single_idx_iterate() 340 chunk_rec.filter_mask = 0; in H5D__single_idx_iterate() 344 if((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__single_idx_iterate()
|
H A D | H5Dfarray.c | 1129 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__farray_idx_iterate_cb() 1130 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__farray_idx_iterate_cb() 1137 if(H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__farray_idx_iterate_cb() 1147 udata->chunk_rec.scaled[curr_dim]++; in H5D__farray_idx_iterate_cb() 1152 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__farray_idx_iterate_cb() 1219 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__farray_idx_iterate() 1223 udata.chunk_rec.filter_mask = 0; in H5D__farray_idx_iterate() 1354 HDassert(chunk_rec); in H5D__farray_idx_delete_cb() 1355 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__farray_idx_delete_cb() 1356 HDassert(chunk_rec->nbytes > 0); in H5D__farray_idx_delete_cb() [all …]
|
H A D | H5Dearray.c | 1250 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__earray_idx_iterate_cb() 1251 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__earray_idx_iterate_cb() 1258 if(H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__earray_idx_iterate_cb() 1268 udata->chunk_rec.scaled[curr_dim]++; in H5D__earray_idx_iterate_cb() 1273 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__earray_idx_iterate_cb() 1343 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__earray_idx_iterate() 1347 udata.chunk_rec.filter_mask = 0; in H5D__earray_idx_iterate() 1495 HDassert(chunk_rec); in H5D__earray_idx_delete_cb() 1496 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__earray_idx_delete_cb() 1497 HDassert(chunk_rec->nbytes > 0); in H5D__earray_idx_delete_cb() [all …]
|
H A D | H5Dchunk.c | 3891 *(hsize_t *)nbytes += chunk_rec->nbytes; in H5D__chunk_allocated_cb() 5740 udata_dst.common.scaled = chunk_rec->scaled; in H5D__chunk_copy_cb() 5743 udata_dst.filter_mask = chunk_rec->filter_mask; in H5D__chunk_copy_cb() 6025 H5D_chunk_rec_t chunk_rec; in H5D__chunk_copy() local 6028 chunk_rec.nbytes = layout_src->size; in H5D__chunk_copy() 6029 chunk_rec.filter_mask = 0; in H5D__chunk_copy() 6030 chunk_rec.chunk_addr = HADDR_UNDEF; in H5D__chunk_copy() 6034 HDmemcpy(chunk_rec.scaled, ent->scaled, sizeof(chunk_rec.scaled)); in H5D__chunk_copy() 6188 …(udata->stream, " 0x%08x %8Zu %10a [", chunk_rec->filter_mask, chunk_rec->nbytes, chunk… in H5D__chunk_dump_index_cb() 6625 chunk_addr = chunk_rec->chunk_addr; in H5D__chunk_format_convert_cb() [all …]
|
H A D | H5Dbtree.c | 1077 H5D_chunk_rec_t chunk_rec; /* Generic chunk record for callback */ in H5D__btree_idx_iterate_cb() local 1084 HDcompile_assert(sizeof(chunk_rec.nbytes) == sizeof(lt_key->nbytes)); in H5D__btree_idx_iterate_cb() 1086 HDcompile_assert(sizeof(chunk_rec.scaled) == sizeof(lt_key->scaled)); in H5D__btree_idx_iterate_cb() 1088 HDcompile_assert(sizeof(chunk_rec.filter_mask) == sizeof(lt_key->filter_mask)); in H5D__btree_idx_iterate_cb() 1091 HDmemcpy(&chunk_rec, lt_key, sizeof(*lt_key)); in H5D__btree_idx_iterate_cb() 1092 chunk_rec.chunk_addr = addr; in H5D__btree_idx_iterate_cb() 1095 if((ret_value = (udata->cb)(&chunk_rec, udata->udata)) < 0) in H5D__btree_idx_iterate_cb()
|
/dports/science/hdf5/hdf5-1.10.6/src/ |
H A D | H5Dnone.c | 240 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__none_idx_iterate() local 261 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__none_idx_iterate() 262 chunk_rec.nbytes = idx_info->layout->size; in H5D__none_idx_iterate() 263 chunk_rec.filter_mask = 0; in H5D__none_idx_iterate() 271 idx = H5VM_array_offset_pre(ndims, idx_info->layout->max_down_chunks, chunk_rec.scaled); in H5D__none_idx_iterate() 274 chunk_rec.chunk_addr = idx_info->storage->idx_addr + idx * idx_info->layout->size; in H5D__none_idx_iterate() 277 if((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__none_idx_iterate() 284 chunk_rec.scaled[curr_dim]++; in H5D__none_idx_iterate() 287 if(chunk_rec.scaled[curr_dim] >= idx_info->layout->chunks[curr_dim]) { in H5D__none_idx_iterate() 289 chunk_rec.scaled[curr_dim] = 0; in H5D__none_idx_iterate()
|
H A D | H5Dsingle.c | 315 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__single_idx_iterate() local 331 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__single_idx_iterate() 332 chunk_rec.chunk_addr = idx_info->storage->idx_addr; in H5D__single_idx_iterate() 335 chunk_rec.nbytes = idx_info->storage->u.single.nbytes; in H5D__single_idx_iterate() 336 chunk_rec.filter_mask = idx_info->storage->u.single.filter_mask; in H5D__single_idx_iterate() 339 chunk_rec.nbytes = idx_info->layout->size; in H5D__single_idx_iterate() 340 chunk_rec.filter_mask = 0; in H5D__single_idx_iterate() 344 if((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__single_idx_iterate()
|
H A D | H5Dfarray.c | 1129 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__farray_idx_iterate_cb() 1130 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__farray_idx_iterate_cb() 1137 if(H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__farray_idx_iterate_cb() 1147 udata->chunk_rec.scaled[curr_dim]++; in H5D__farray_idx_iterate_cb() 1152 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__farray_idx_iterate_cb() 1219 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__farray_idx_iterate() 1223 udata.chunk_rec.filter_mask = 0; in H5D__farray_idx_iterate() 1354 HDassert(chunk_rec); in H5D__farray_idx_delete_cb() 1355 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__farray_idx_delete_cb() 1356 HDassert(chunk_rec->nbytes > 0); in H5D__farray_idx_delete_cb() [all …]
|
H A D | H5Dearray.c | 1250 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__earray_idx_iterate_cb() 1251 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__earray_idx_iterate_cb() 1258 if(H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__earray_idx_iterate_cb() 1268 udata->chunk_rec.scaled[curr_dim]++; in H5D__earray_idx_iterate_cb() 1273 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__earray_idx_iterate_cb() 1343 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__earray_idx_iterate() 1347 udata.chunk_rec.filter_mask = 0; in H5D__earray_idx_iterate() 1495 HDassert(chunk_rec); in H5D__earray_idx_delete_cb() 1496 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__earray_idx_delete_cb() 1497 HDassert(chunk_rec->nbytes > 0); in H5D__earray_idx_delete_cb() [all …]
|
H A D | H5Dchunk.c | 3940 *(hsize_t *)nbytes += chunk_rec->nbytes; in H5D__chunk_allocated_cb() 6090 H5D_chunk_rec_t chunk_rec; in H5D__chunk_copy() local 6093 chunk_rec.nbytes = layout_src->size; in H5D__chunk_copy() 6094 chunk_rec.filter_mask = 0; in H5D__chunk_copy() 6095 chunk_rec.chunk_addr = HADDR_UNDEF; in H5D__chunk_copy() 6099 HDmemcpy(chunk_rec.scaled, ent->scaled, sizeof(chunk_rec.scaled)); in H5D__chunk_copy() 6253 …(udata->stream, " 0x%08x %8Zu %10a [", chunk_rec->filter_mask, chunk_rec->nbytes, chunk… in H5D__chunk_dump_index_cb() 6690 chunk_addr = chunk_rec->chunk_addr; in H5D__chunk_format_convert_cb() 6915 HDassert(chunk_rec); in H5D__get_chunk_info_cb() 6922 chunk_info->nbytes = chunk_rec->nbytes; in H5D__get_chunk_info_cb() [all …]
|
H A D | H5Dbtree.c | 1077 H5D_chunk_rec_t chunk_rec; /* Generic chunk record for callback */ in H5D__btree_idx_iterate_cb() local 1084 HDcompile_assert(sizeof(chunk_rec.nbytes) == sizeof(lt_key->nbytes)); in H5D__btree_idx_iterate_cb() 1086 HDcompile_assert(sizeof(chunk_rec.scaled) == sizeof(lt_key->scaled)); in H5D__btree_idx_iterate_cb() 1088 HDcompile_assert(sizeof(chunk_rec.filter_mask) == sizeof(lt_key->filter_mask)); in H5D__btree_idx_iterate_cb() 1091 HDmemcpy(&chunk_rec, lt_key, sizeof(*lt_key)); in H5D__btree_idx_iterate_cb() 1092 chunk_rec.chunk_addr = addr; in H5D__btree_idx_iterate_cb() 1095 if((ret_value = (udata->cb)(&chunk_rec, udata->udata)) < 0) in H5D__btree_idx_iterate_cb()
|
/dports/math/vtk9/VTK-9.1.0/ThirdParty/hdf5/vtkhdf5/src/ |
H A D | H5Dnone.c | 225 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__none_idx_iterate() local 246 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__none_idx_iterate() 247 chunk_rec.nbytes = idx_info->layout->size; in H5D__none_idx_iterate() 248 chunk_rec.filter_mask = 0; in H5D__none_idx_iterate() 256 idx = H5VM_array_offset_pre(ndims, idx_info->layout->max_down_chunks, chunk_rec.scaled); in H5D__none_idx_iterate() 259 chunk_rec.chunk_addr = idx_info->storage->idx_addr + idx * idx_info->layout->size; in H5D__none_idx_iterate() 262 if ((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__none_idx_iterate() 270 chunk_rec.scaled[curr_dim]++; in H5D__none_idx_iterate() 273 if (chunk_rec.scaled[curr_dim] >= idx_info->layout->chunks[curr_dim]) { in H5D__none_idx_iterate() 275 chunk_rec.scaled[curr_dim] = 0; in H5D__none_idx_iterate()
|
H A D | H5Dsingle.c | 296 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__single_idx_iterate() local 312 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__single_idx_iterate() 313 chunk_rec.chunk_addr = idx_info->storage->idx_addr; in H5D__single_idx_iterate() 316 chunk_rec.nbytes = idx_info->storage->u.single.nbytes; in H5D__single_idx_iterate() 317 chunk_rec.filter_mask = idx_info->storage->u.single.filter_mask; in H5D__single_idx_iterate() 320 chunk_rec.nbytes = idx_info->layout->size; in H5D__single_idx_iterate() 321 chunk_rec.filter_mask = 0; in H5D__single_idx_iterate() 325 if ((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__single_idx_iterate()
|
H A D | H5Dfarray.c | 1105 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__farray_idx_iterate_cb() 1106 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__farray_idx_iterate_cb() 1113 if (H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__farray_idx_iterate_cb() 1123 udata->chunk_rec.scaled[curr_dim]++; in H5D__farray_idx_iterate_cb() 1128 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__farray_idx_iterate_cb() 1194 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__farray_idx_iterate() 1198 udata.chunk_rec.filter_mask = 0; in H5D__farray_idx_iterate() 1329 HDassert(chunk_rec); in H5D__farray_idx_delete_cb() 1330 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__farray_idx_delete_cb() 1331 HDassert(chunk_rec->nbytes > 0); in H5D__farray_idx_delete_cb() [all …]
|
H A D | H5Dearray.c | 1222 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__earray_idx_iterate_cb() 1223 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__earray_idx_iterate_cb() 1230 if (H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__earray_idx_iterate_cb() 1240 udata->chunk_rec.scaled[curr_dim]++; in H5D__earray_idx_iterate_cb() 1245 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__earray_idx_iterate_cb() 1314 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__earray_idx_iterate() 1318 udata.chunk_rec.filter_mask = 0; in H5D__earray_idx_iterate() 1467 HDassert(chunk_rec); in H5D__earray_idx_delete_cb() 1468 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__earray_idx_delete_cb() 1469 HDassert(chunk_rec->nbytes > 0); in H5D__earray_idx_delete_cb() [all …]
|
H A D | H5Dchunk.c | 4185 *(hsize_t *)nbytes += chunk_rec->nbytes; in H5D__chunk_allocated_cb() 6381 H5D_chunk_rec_t chunk_rec; in H5D__chunk_copy() local 6384 chunk_rec.nbytes = layout_src->size; in H5D__chunk_copy() 6385 chunk_rec.filter_mask = 0; in H5D__chunk_copy() 6386 chunk_rec.chunk_addr = HADDR_UNDEF; in H5D__chunk_copy() 6390 H5MM_memcpy(chunk_rec.scaled, ent->scaled, sizeof(chunk_rec.scaled)); in H5D__chunk_copy() 6543 chunk_rec->nbytes, chunk_rec->chunk_addr); in H5D__chunk_dump_index_cb() 6977 chunk_addr = chunk_rec->chunk_addr; in H5D__chunk_format_convert_cb() 7201 HDassert(chunk_rec); in H5D__get_chunk_info_cb() 7211 chunk_info->nbytes = chunk_rec->nbytes; in H5D__get_chunk_info_cb() [all …]
|
H A D | H5Dbtree.c | 1035 …H5D_chunk_rec_t chunk_rec; /* Generic chunk record for call… in H5D__btree_idx_iterate_cb() local 1042 HDcompile_assert(sizeof(chunk_rec.nbytes) == sizeof(lt_key->nbytes)); in H5D__btree_idx_iterate_cb() 1044 HDcompile_assert(sizeof(chunk_rec.scaled) == sizeof(lt_key->scaled)); in H5D__btree_idx_iterate_cb() 1046 HDcompile_assert(sizeof(chunk_rec.filter_mask) == sizeof(lt_key->filter_mask)); in H5D__btree_idx_iterate_cb() 1049 H5MM_memcpy(&chunk_rec, lt_key, sizeof(*lt_key)); in H5D__btree_idx_iterate_cb() 1050 chunk_rec.chunk_addr = addr; in H5D__btree_idx_iterate_cb() 1053 if ((ret_value = (udata->cb)(&chunk_rec, udata->udata)) < 0) in H5D__btree_idx_iterate_cb()
|
/dports/math/vtk8/VTK-8.2.0/ThirdParty/hdf5/vtkhdf5/src/ |
H A D | H5Dnone.c | 240 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__none_idx_iterate() local 261 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__none_idx_iterate() 262 chunk_rec.nbytes = idx_info->layout->size; in H5D__none_idx_iterate() 263 chunk_rec.filter_mask = 0; in H5D__none_idx_iterate() 271 idx = H5VM_array_offset_pre(ndims, idx_info->layout->max_down_chunks, chunk_rec.scaled); in H5D__none_idx_iterate() 274 chunk_rec.chunk_addr = idx_info->storage->idx_addr + idx * idx_info->layout->size; in H5D__none_idx_iterate() 277 if((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__none_idx_iterate() 284 chunk_rec.scaled[curr_dim]++; in H5D__none_idx_iterate() 287 if(chunk_rec.scaled[curr_dim] >= idx_info->layout->chunks[curr_dim]) { in H5D__none_idx_iterate() 289 chunk_rec.scaled[curr_dim] = 0; in H5D__none_idx_iterate()
|
H A D | H5Dsingle.c | 317 H5D_chunk_rec_t chunk_rec; /* generic chunk record */ in H5D__single_idx_iterate() local 333 HDmemset(&chunk_rec, 0, sizeof(chunk_rec)); in H5D__single_idx_iterate() 334 chunk_rec.chunk_addr = idx_info->storage->idx_addr; in H5D__single_idx_iterate() 337 chunk_rec.nbytes = idx_info->storage->u.single.nbytes; in H5D__single_idx_iterate() 338 chunk_rec.filter_mask = idx_info->storage->u.single.filter_mask; in H5D__single_idx_iterate() 341 chunk_rec.nbytes = idx_info->layout->size; in H5D__single_idx_iterate() 342 chunk_rec.filter_mask = 0; in H5D__single_idx_iterate() 346 if((ret_value = (*chunk_cb)(&chunk_rec, chunk_udata)) < 0) in H5D__single_idx_iterate()
|
H A D | H5Dfarray.c | 1137 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__farray_idx_iterate_cb() 1138 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__farray_idx_iterate_cb() 1145 if(H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__farray_idx_iterate_cb() 1155 udata->chunk_rec.scaled[curr_dim]++; in H5D__farray_idx_iterate_cb() 1160 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__farray_idx_iterate_cb() 1227 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__farray_idx_iterate() 1231 udata.chunk_rec.filter_mask = 0; in H5D__farray_idx_iterate() 1362 HDassert(chunk_rec); in H5D__farray_idx_delete_cb() 1363 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__farray_idx_delete_cb() 1364 HDassert(chunk_rec->nbytes > 0); in H5D__farray_idx_delete_cb() [all …]
|
H A D | H5Dearray.c | 1257 udata->chunk_rec.chunk_addr = filt_elmt->addr; in H5D__earray_idx_iterate_cb() 1258 udata->chunk_rec.nbytes = filt_elmt->nbytes; in H5D__earray_idx_iterate_cb() 1265 if(H5F_addr_defined(udata->chunk_rec.chunk_addr)) in H5D__earray_idx_iterate_cb() 1275 udata->chunk_rec.scaled[curr_dim]++; in H5D__earray_idx_iterate_cb() 1280 udata->chunk_rec.scaled[curr_dim] = 0; in H5D__earray_idx_iterate_cb() 1350 HDmemset(&udata.chunk_rec, 0, sizeof(udata.chunk_rec)); in H5D__earray_idx_iterate() 1354 udata.chunk_rec.filter_mask = 0; in H5D__earray_idx_iterate() 1502 HDassert(chunk_rec); in H5D__earray_idx_delete_cb() 1503 HDassert(H5F_addr_defined(chunk_rec->chunk_addr)); in H5D__earray_idx_delete_cb() 1504 HDassert(chunk_rec->nbytes > 0); in H5D__earray_idx_delete_cb() [all …]
|
H A D | H5Dbtree.c | 1076 H5D_chunk_rec_t chunk_rec; /* Generic chunk record for callback */ in H5D__btree_idx_iterate_cb() local 1083 HDcompile_assert(sizeof(chunk_rec.nbytes) == sizeof(lt_key->nbytes)); in H5D__btree_idx_iterate_cb() 1085 HDcompile_assert(sizeof(chunk_rec.scaled) == sizeof(lt_key->scaled)); in H5D__btree_idx_iterate_cb() 1087 HDcompile_assert(sizeof(chunk_rec.filter_mask) == sizeof(lt_key->filter_mask)); in H5D__btree_idx_iterate_cb() 1090 HDmemcpy(&chunk_rec, lt_key, sizeof(*lt_key)); in H5D__btree_idx_iterate_cb() 1091 chunk_rec.chunk_addr = addr; in H5D__btree_idx_iterate_cb() 1094 if((ret_value = (udata->cb)(&chunk_rec, udata->udata)) < 0) in H5D__btree_idx_iterate_cb()
|
/dports/science/hdf5-18/hdf5-1.8.21/src/ |
H A D | H5Dbtree.c | 1026 H5D_chunk_rec_t chunk_rec; /* Generic chunk record for callback */ in H5D__btree_idx_iterate_cb() local 1033 HDcompile_assert(sizeof(chunk_rec.nbytes) == sizeof(lt_key->nbytes)); in H5D__btree_idx_iterate_cb() 1035 HDcompile_assert(sizeof(chunk_rec.offset) == sizeof(lt_key->offset)); in H5D__btree_idx_iterate_cb() 1037 HDcompile_assert(sizeof(chunk_rec.filter_mask) == sizeof(lt_key->filter_mask)); in H5D__btree_idx_iterate_cb() 1040 HDmemcpy(&chunk_rec, lt_key, sizeof(*lt_key)); in H5D__btree_idx_iterate_cb() 1041 chunk_rec.chunk_addr = addr; in H5D__btree_idx_iterate_cb() 1044 if((ret_value = (udata->cb)(&chunk_rec, udata->udata)) < 0) in H5D__btree_idx_iterate_cb()
|