Home
last modified time | relevance | path

Searched refs:dxpl_cache (Results 1 – 24 of 24) sorted by relevance

/dports/science/hdf5-18/hdf5-1.8.21/src/
H A DH5Dscatgath.c52 const H5D_dxpl_cache_t *dxpl_cache, void *_tgath_buf/*out*/);
54 H5S_sel_iter_t *iter, const H5D_dxpl_cache_t *dxpl_cache,
125 if(tmp_io_info.dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__scatter_file()
226 if(tmp_io_info.dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__gather_file()
313 if(dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__scatter_mem()
400 if(dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__gather_mem()
459 …const H5D_dxpl_cache_t *dxpl_cache = io_info->dxpl_cache; /* Local pointer to dataset transfer… in H5D__scatgath_read() local
591 …const H5D_dxpl_cache_t *dxpl_cache = io_info->dxpl_cache; /* Local pointer to dataset transfer… in H5D__scatgath_write() local
641 dxpl_cache, type_info->tconv_buf/*out*/); in H5D__scatgath_write()
758 HDassert(dxpl_cache); in H5D__compound_opt_read()
[all …]
H A DH5Dio.c64 H5D_dxpl_cache_t *dxpl_cache,
471 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__read()
484 if(dxpl_cache->xfer_mode == H5FD_MPIO_COLLECTIVE && in H5D__read()
694 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__write()
731 if(dxpl_cache->xfer_mode == H5FD_MPIO_COLLECTIVE) in H5D__write()
906 H5D_dxpl_cache_t *dxpl_cache, hid_t dxpl_id, in H5D__ioinfo_init()
920 io_info->dxpl_cache = dxpl_cache; in H5D__ioinfo_init()
1052 target_size = dxpl_cache->max_temp_buf; in H5D__typeinfo_init()
1060 && (NULL == dxpl_cache->tconv_buf) && (NULL == dxpl_cache->bkgr_buf)); in H5D__typeinfo_init()
1097 if(bkg_size < dxpl_cache->max_temp_buf) in H5D__typeinfo_init()
[all …]
H A DH5Dselect.c116 if(io_info->dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__select_io()
117 if(NULL == (mem_len = H5FL_SEQ_MALLOC(size_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
119 if(NULL == (mem_off = H5FL_SEQ_MALLOC(hsize_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
121 if(NULL == (file_len = H5FL_SEQ_MALLOC(size_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
123 if(NULL == (file_off = H5FL_SEQ_MALLOC(hsize_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
189 …GET_SEQ_LIST(file_space, H5S_GET_SEQ_LIST_SORTED, &file_iter, io_info->dxpl_cache->vec_size, nelmt… in H5D__select_io()
199 …if(H5S_SELECT_GET_SEQ_LIST(mem_space, 0, &mem_iter, io_info->dxpl_cache->vec_size, nelmts, &mem_ns… in H5D__select_io()
H A DH5Dchunk.c240 const H5D_dxpl_cache_t *dxpl_cache, size_t size);
383 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_direct_write()
2320 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_flush()
2698 HDassert(dxpl_cache); in H5D__chunk_flush_entry()
2851 HDassert(dxpl_cache); in H5D__chunk_cache_evict()
2907 const H5D_dxpl_cache_t *dxpl_cache, size_t size) in H5D__chunk_cache_prune() argument
3046 HDassert(io_info->dxpl_cache); in H5D__chunk_lock()
3422 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_allocated()
3535 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_allocate()
4247 HDassert(dxpl_cache); in H5D__chunk_prune_by_extent()
[all …]
H A DH5Dpkg.h48 (io_info)->dxpl_cache = dxpl_c; \
55 (io_info)->dxpl_cache = dxpl_c; \
192 H5D_dxpl_cache_t *dxpl_cache; /* Pointer to cached DXPL info */ member
567 const H5D_dxpl_cache_t *dxpl_cache, void *_buf);
H A DH5Dfill.c245 H5D_dxpl_cache_t *dxpl_cache = &_dxpl_cache; /* Data transfer property cache */ in H5D__fill() local
270 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__fill()
278 … if(H5D__scatter_mem(tmp_buf, space, &mem_iter, (size_t)nelmts, dxpl_cache, buf/*out*/) < 0) { in H5D__fill()
H A DH5Dmpio.c172 if(io_info->dxpl_cache->xfer_mode == H5FD_MPIO_INDEPENDENT) in H5D__mpio_opt_possible()
323 io_info->dxpl_cache->xfer_mode = xfer_mode; in H5D__ioinfo_xfer_mode()
324 if(H5P_set(dx_plist, H5D_XFER_IO_XFER_MODE_NAME, &io_info->dxpl_cache->xfer_mode) < 0) in H5D__ioinfo_xfer_mode()
368 io_info->dxpl_cache->coll_opt_mode = coll_opt_mode; in H5D__ioinfo_coll_opt_mode()
369 … if(H5P_set(dx_plist, H5D_XFER_MPIO_COLLECTIVE_OPT_NAME, &io_info->dxpl_cache->coll_opt_mode) < 0) in H5D__ioinfo_coll_opt_mode()
H A DH5Dcontig.c198 H5D_dxpl_cache_t *dxpl_cache = &_dxpl_cache; /* Data transfer property cache */ in H5D__contig_fill() local
250 if(H5D__get_dxpl_cache(my_dxpl_id, &dxpl_cache) < 0) in H5D__contig_fill()
266 dxpl_cache->max_temp_buf, my_dxpl_id) < 0) in H5D__contig_fill()
274 H5D_BUILD_IO_INFO_WRT(&ioinfo, dset, dxpl_cache, my_dxpl_id, &store, fb_info.fill_buf); in H5D__contig_fill()
/dports/math/vtk6/VTK-6.2.0/ThirdParty/hdf5/vtkhdf5/src/
H A DH5Dscatgath.c54 const H5D_dxpl_cache_t *dxpl_cache, void *_tgath_buf/*out*/);
56 H5S_sel_iter_t *iter, const H5D_dxpl_cache_t *dxpl_cache,
127 if(tmp_io_info.dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__scatter_file()
228 if(tmp_io_info.dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__gather_file()
315 if(dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__scatter_mem()
402 if(dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__gather_mem()
461 …const H5D_dxpl_cache_t *dxpl_cache = io_info->dxpl_cache; /* Local pointer to dataset transfer… in H5D__scatgath_read() local
593 …const H5D_dxpl_cache_t *dxpl_cache = io_info->dxpl_cache; /* Local pointer to dataset transfer… in H5D__scatgath_write() local
643 dxpl_cache, type_info->tconv_buf/*out*/); in H5D__scatgath_write()
760 HDassert(dxpl_cache); in H5D__compound_opt_read()
[all …]
H A DH5Dio.c66 H5D_dxpl_cache_t *dxpl_cache,
415 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__read()
634 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__write()
668 if(dxpl_cache->xfer_mode == H5FD_MPIO_COLLECTIVE) in H5D__write()
843 H5D_dxpl_cache_t *dxpl_cache, hid_t dxpl_id, in H5D__ioinfo_init()
857 io_info->dxpl_cache = dxpl_cache; in H5D__ioinfo_init()
978 type_info->need_bkg = dxpl_cache->bkgr_buf_type; in H5D__typeinfo_init()
989 target_size = dxpl_cache->max_temp_buf; in H5D__typeinfo_init()
997 && (NULL == dxpl_cache->tconv_buf) && (NULL == dxpl_cache->bkgr_buf)); in H5D__typeinfo_init()
1034 if(bkg_size < dxpl_cache->max_temp_buf) in H5D__typeinfo_init()
[all …]
H A DH5Dselect.c118 if(io_info->dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) { in H5D__select_io()
119 if(NULL == (mem_len = H5FL_SEQ_MALLOC(size_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
121 if(NULL == (mem_off = H5FL_SEQ_MALLOC(hsize_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
123 if(NULL == (file_len = H5FL_SEQ_MALLOC(size_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
125 if(NULL == (file_off = H5FL_SEQ_MALLOC(hsize_t,io_info->dxpl_cache->vec_size))) in H5D__select_io()
191 …GET_SEQ_LIST(file_space, H5S_GET_SEQ_LIST_SORTED, &file_iter, io_info->dxpl_cache->vec_size, nelmt… in H5D__select_io()
201 …if(H5S_SELECT_GET_SEQ_LIST(mem_space, 0, &mem_iter, io_info->dxpl_cache->vec_size, nelmts, &mem_ns… in H5D__select_io()
H A DH5Dchunk.c231 const H5D_dxpl_cache_t *dxpl_cache, size_t size);
373 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_direct_write()
2088 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_flush()
2469 HDassert(dxpl_cache); in H5D__chunk_flush_entry()
2622 HDassert(dxpl_cache); in H5D__chunk_cache_evict()
2678 const H5D_dxpl_cache_t *dxpl_cache, size_t size) in H5D__chunk_cache_prune() argument
2818 HDassert(io_info->dxpl_cache); in H5D__chunk_lock()
3196 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_allocated()
3850 HDassert(dxpl_cache); in H5D__chunk_prune_by_extent()
3853 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_prune_by_extent()
[all …]
H A DH5Dpkg.h50 (io_info)->dxpl_cache = dxpl_c; \
57 (io_info)->dxpl_cache = dxpl_c; \
194 H5D_dxpl_cache_t *dxpl_cache; /* Pointer to cached DXPL info */ member
568 const H5D_dxpl_cache_t *dxpl_cache, void *_buf);
H A DH5Dfill.c247 H5D_dxpl_cache_t *dxpl_cache = &_dxpl_cache; /* Data transfer property cache */ in H5D__fill() local
272 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__fill()
280 … if(H5D__scatter_mem(tmp_buf, space, &mem_iter, (size_t)nelmts, dxpl_cache, buf/*out*/) < 0) { in H5D__fill()
H A DH5Dmpio.c174 if(io_info->dxpl_cache->xfer_mode == H5FD_MPIO_INDEPENDENT) in H5D__mpio_opt_possible()
325 io_info->dxpl_cache->xfer_mode = xfer_mode; in H5D__ioinfo_xfer_mode()
326 if(H5P_set(dx_plist, H5D_XFER_IO_XFER_MODE_NAME, &io_info->dxpl_cache->xfer_mode) < 0) in H5D__ioinfo_xfer_mode()
370 io_info->dxpl_cache->coll_opt_mode = coll_opt_mode; in H5D__ioinfo_coll_opt_mode()
371 … if(H5P_set(dx_plist, H5D_XFER_MPIO_COLLECTIVE_OPT_NAME, &io_info->dxpl_cache->coll_opt_mode) < 0) in H5D__ioinfo_coll_opt_mode()
H A DH5Dcontig.c200 H5D_dxpl_cache_t *dxpl_cache = &_dxpl_cache; /* Data transfer property cache */ in H5D__contig_fill() local
252 if(H5D__get_dxpl_cache(my_dxpl_id, &dxpl_cache) < 0) in H5D__contig_fill()
268 dxpl_cache->max_temp_buf, my_dxpl_id) < 0) in H5D__contig_fill()
276 H5D_BUILD_IO_INFO_WRT(&ioinfo, dset, dxpl_cache, my_dxpl_id, &store, fb_info.fill_buf); in H5D__contig_fill()
/dports/math/vtk8/VTK-8.2.0/ThirdParty/hdf5/vtkhdf5/src/
H A DH5Dscatgath.c128 vec_size = tmp_io_info.dxpl_cache->vec_size; in H5D__scatter_file()
226 vec_size = tmp_io_info.dxpl_cache->vec_size; in H5D__gather_file()
309 if(dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) in H5D__scatter_mem()
310 vec_size = dxpl_cache->vec_size; in H5D__scatter_mem()
393 if(dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) in H5D__gather_mem()
394 vec_size = dxpl_cache->vec_size; in H5D__gather_mem()
450 …const H5D_dxpl_cache_t *dxpl_cache = io_info->dxpl_cache; /* Local pointer to dataset transfer… in H5D__scatgath_read() local
586 …const H5D_dxpl_cache_t *dxpl_cache = io_info->dxpl_cache; /* Local pointer to dataset transfer… in H5D__scatgath_write() local
757 HDassert(dxpl_cache); in H5D__compound_opt_read()
765 if(dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) in H5D__compound_opt_read()
[all …]
H A DH5Dio.c61 H5D_dxpl_cache_t *dxpl_cache,
411 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__read()
421 if(dxpl_cache->xfer_mode == H5FD_MPIO_COLLECTIVE && in H5D__read()
641 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__write()
675 if(dxpl_cache->xfer_mode == H5FD_MPIO_COLLECTIVE) in H5D__write()
862 H5D_dxpl_cache_t *dxpl_cache, hid_t dxpl_id, in H5D__ioinfo_init()
888 io_info->dxpl_cache = dxpl_cache; in H5D__ioinfo_init()
1024 target_size = dxpl_cache->max_temp_buf; in H5D__typeinfo_init()
1032 && (NULL == dxpl_cache->tconv_buf) && (NULL == dxpl_cache->bkgr_buf)); in H5D__typeinfo_init()
1069 if(bkg_size < dxpl_cache->max_temp_buf) in H5D__typeinfo_init()
[all …]
H A DH5Dchunk.c296 const H5D_dxpl_cache_t *dxpl_cache, size_t size);
2281 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_flush()
2381 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_dest()
2814 HDassert(dxpl_cache); in H5D__chunk_flush_entry()
2994 HDassert(dxpl_cache); in H5D__chunk_cache_evict()
3067 const H5D_dxpl_cache_t *dxpl_cache, size_t size) in H5D__chunk_cache_prune() argument
3207 HDassert(io_info->dxpl_cache); in H5D__chunk_lock()
3726 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_allocated()
4276 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__chunk_update_old_edge_chunks()
4806 HDassert(dxpl_cache); in H5D__chunk_prune_by_extent()
[all …]
H A DH5Dselect.c116 if(io_info->dxpl_cache->vec_size > H5D_IO_VECTOR_SIZE) in H5D__select_io()
117 vec_size = io_info->dxpl_cache->vec_size; in H5D__select_io()
H A DH5Dfill.c251 H5D_dxpl_cache_t *dxpl_cache = &_dxpl_cache; /* Data transfer property cache */ in H5D__fill() local
275 if(H5D__get_dxpl_cache(dxpl_id, &dxpl_cache) < 0) in H5D__fill()
288 … if(H5D__scatter_mem(tmp_buf, space, mem_iter, (size_t)nelmts, dxpl_cache, buf/*out*/) < 0) in H5D__fill()
H A DH5Dpkg.h50 (io_info)->dxpl_cache = dxpl_c; \
58 (io_info)->dxpl_cache = dxpl_c; \
216 H5D_dxpl_cache_t *dxpl_cache; /* Pointer to cached DXPL info */ member
618 const H5D_dxpl_cache_t *dxpl_cache, void *_buf);
H A DH5Dmpio.c172 if(io_info->dxpl_cache->xfer_mode == H5FD_MPIO_INDEPENDENT) in H5D__mpio_opt_possible()
323 io_info->dxpl_cache->xfer_mode = xfer_mode; in H5D__ioinfo_xfer_mode()
324 if(H5P_set(dx_plist, H5D_XFER_IO_XFER_MODE_NAME, &io_info->dxpl_cache->xfer_mode) < 0) in H5D__ioinfo_xfer_mode()
368 io_info->dxpl_cache->coll_opt_mode = coll_opt_mode; in H5D__ioinfo_coll_opt_mode()
369 … if(H5P_set(dx_plist, H5D_XFER_MPIO_COLLECTIVE_OPT_NAME, &io_info->dxpl_cache->coll_opt_mode) < 0) in H5D__ioinfo_coll_opt_mode()
H A DH5Dcontig.c202 H5D_dxpl_cache_t *dxpl_cache = &_dxpl_cache; /* Data transfer property cache */ in H5D__contig_fill() local
247 if(H5D__get_dxpl_cache(raw_dxpl_id, &dxpl_cache) < 0) in H5D__contig_fill()
263 dxpl_cache->max_temp_buf, md_dxpl_id) < 0) in H5D__contig_fill()
271 …H5D_BUILD_IO_INFO_WRT(&ioinfo, dset, dxpl_cache, H5AC_ind_read_dxpl_id, raw_dxpl_id, &store, fb_in… in H5D__contig_fill()