1 /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
2 * Copyright by The HDF Group. *
3 * Copyright by the Board of Trustees of the University of Illinois. *
4 * All rights reserved. *
5 * *
6 * This file is part of HDF5. The full HDF5 copyright notice, including *
7 * terms governing use, modification, and redistribution, is contained in *
8 * the COPYING file, which can be found at the root of the source code *
9 * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
10 * If you do not have access to either file, you may request a copy from *
11 * help@hdfgroup.org. *
12 * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
13
14 /*-------------------------------------------------------------------------
15 *
16 * Created: H5B2int.c
17 * Feb 27 2006
18 * Quincey Koziol <koziol@ncsa.uiuc.edu>
19 *
20 * Purpose: Internal routines for managing v2 B-trees.
21 *
22 *-------------------------------------------------------------------------
23 */
24
25 /****************/
26 /* Module Setup */
27 /****************/
28
29 #include "H5B2module.h" /* This source code file is part of the H5B2 module */
30
31
32 /***********/
33 /* Headers */
34 /***********/
35 #include "H5private.h" /* Generic Functions */
36 #include "H5B2pkg.h" /* v2 B-trees */
37 #include "H5Eprivate.h" /* Error handling */
38 #include "H5VMprivate.h" /* Vectors and arrays */
39
40
41 /****************/
42 /* Local Macros */
43 /****************/
44
45
46 /******************/
47 /* Local Typedefs */
48 /******************/
49
50
51 /********************/
52 /* Package Typedefs */
53 /********************/
54
55
56 /********************/
57 /* Local Prototypes */
58 /********************/
59 static herr_t H5B2__update_child_flush_depends(H5B2_hdr_t *hdr,
60 unsigned depth, const H5B2_node_ptr_t *node_ptrs, unsigned start_idx,
61 unsigned end_idx, void *old_parent, void *new_parent);
62
63
64 /*********************/
65 /* Package Variables */
66 /*********************/
67
68
69 /*****************************/
70 /* Library Private Variables */
71 /*****************************/
72
73
74 /*******************/
75 /* Local Variables */
76 /*******************/
77
78 /* Declare a free list to manage the 'H5B2_node_info_t' sequence information */
79 H5FL_SEQ_EXTERN(H5B2_node_info_t);
80
81
82
83 /*-------------------------------------------------------------------------
84 * Function: H5B2__locate_record
85 *
86 * Purpose: Performs a binary search to locate a record in a sorted
87 * array of records.
88 *
89 * Sets *IDX to location of record greater than or equal to
90 * record to locate.
91 *
92 * Return: Comparison value for insertion location. Negative for record
93 * to locate being less than value in *IDX. Zero for record to
94 * locate equal to value in *IDX. Positive for record to locate
95 * being greater than value in *IDX (which should only happen when
96 * record to locate is greater than all records to search).
97 *
98 * Programmer: Quincey Koziol
99 * koziol@ncsa.uiuc.edu
100 * Feb 3 2005
101 *
102 *-------------------------------------------------------------------------
103 */
104 herr_t
H5B2__locate_record(const H5B2_class_t * type,unsigned nrec,size_t * rec_off,const uint8_t * native,const void * udata,unsigned * idx,int * cmp)105 H5B2__locate_record(const H5B2_class_t *type, unsigned nrec, size_t *rec_off,
106 const uint8_t *native, const void *udata, unsigned *idx, int *cmp)
107 {
108 unsigned lo = 0, hi; /* Low & high index values */
109 unsigned my_idx = 0; /* Final index value */
110 herr_t ret_value = SUCCEED;
111
112 FUNC_ENTER_PACKAGE
113
114 *cmp = -1;
115
116 hi = nrec;
117 while(lo < hi && *cmp) {
118 my_idx = (lo + hi) / 2;
119 if((type->compare)(udata, native + rec_off[my_idx], cmp) < 0)
120 HGOTO_ERROR(H5E_BTREE, H5E_CANTCOMPARE, FAIL, "can't compare btree2 records")
121 if(*cmp < 0)
122 hi = my_idx;
123 else
124 lo = my_idx + 1;
125 } /* end while */
126
127 *idx = my_idx;
128
129 done:
130 FUNC_LEAVE_NOAPI(ret_value)
131 } /* end H5B2__locate_record */
132
133
134 /*-------------------------------------------------------------------------
135 * Function: H5B2__split1
136 *
137 * Purpose: Perform a 1->2 node split
138 *
139 * Return: Success: Non-negative
140 * Failure: Negative
141 *
142 * Programmer: Quincey Koziol
143 * koziol@hdfgroup.org
144 * Aug 28 2006
145 *
146 *-------------------------------------------------------------------------
147 */
148 herr_t
H5B2__split1(H5B2_hdr_t * hdr,uint16_t depth,H5B2_node_ptr_t * curr_node_ptr,unsigned * parent_cache_info_flags_ptr,H5B2_internal_t * internal,unsigned * internal_flags_ptr,unsigned idx)149 H5B2__split1(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node_ptr,
150 unsigned *parent_cache_info_flags_ptr, H5B2_internal_t *internal,
151 unsigned *internal_flags_ptr, unsigned idx)
152 {
153 const H5AC_class_t *child_class; /* Pointer to child node's class info */
154 haddr_t left_addr, right_addr; /* Addresses of left & right child nodes */
155 void *left_child = NULL, *right_child = NULL; /* Pointers to child nodes */
156 uint16_t *left_nrec, *right_nrec; /* Pointers to child # of records */
157 uint8_t *left_native, *right_native;/* Pointers to childs' native records */
158 H5B2_node_ptr_t *left_node_ptrs = NULL, *right_node_ptrs = NULL;/* Pointers to childs' node pointer info */
159 uint16_t mid_record; /* Index of "middle" record in current node */
160 uint16_t old_node_nrec; /* Number of records in internal node split */
161 unsigned left_child_flags = H5AC__NO_FLAGS_SET, right_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
162 herr_t ret_value = SUCCEED; /* Return value */
163
164 FUNC_ENTER_PACKAGE
165
166 /* Check arguments. */
167 HDassert(hdr);
168 HDassert(internal);
169 HDassert(internal_flags_ptr);
170
171 /* Slide records in parent node up one space, to make room for promoted record */
172 if(idx < internal->nrec) {
173 HDmemmove(H5B2_INT_NREC(internal, hdr, idx + 1), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size * (internal->nrec - idx));
174 HDmemmove(&(internal->node_ptrs[idx + 2]), &(internal->node_ptrs[idx + 1]), sizeof(H5B2_node_ptr_t) * (internal->nrec - idx));
175 } /* end if */
176
177 /* Check for the kind of B-tree node to split */
178 if(depth > 1) {
179 H5B2_internal_t *left_int = NULL, *right_int = NULL; /* Pointers to old & new internal nodes */
180
181 /* Create new internal node */
182 internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec = 0;
183 if(H5B2__create_internal(hdr, internal, &(internal->node_ptrs[idx + 1]), (uint16_t)(depth - 1)) < 0)
184 HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create new internal node")
185
186 /* Setup information for unlocking child nodes */
187 child_class = H5AC_BT2_INT;
188
189 /* Protect both leaves */
190 /* (Shadow left node if doing SWMR writes) */
191 if(NULL == (left_int = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
192 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
193 left_addr = internal->node_ptrs[idx].addr;
194 if(NULL == (right_int = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1], (uint16_t)(depth - 1), FALSE, H5AC__NO_FLAGS_SET)))
195 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
196 right_addr = internal->node_ptrs[idx + 1].addr;
197
198 /* More setup for child nodes */
199 left_child = left_int;
200 right_child = right_int;
201 left_nrec = &(left_int->nrec);
202 right_nrec = &(right_int->nrec);
203 left_native = left_int->int_native;
204 right_native = right_int->int_native;
205 left_node_ptrs = left_int->node_ptrs;
206 right_node_ptrs = right_int->node_ptrs;
207 } /* end if */
208 else {
209 H5B2_leaf_t *left_leaf = NULL, *right_leaf = NULL; /* Pointers to old & new leaf nodes */
210
211 /* Create new leaf node */
212 internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec = 0;
213 if(H5B2__create_leaf(hdr, internal, &(internal->node_ptrs[idx + 1])) < 0)
214 HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create new leaf node")
215
216 /* Setup information for unlocking child nodes */
217 child_class = H5AC_BT2_LEAF;
218
219 /* Protect both leaves */
220 /* (Shadow the left node if doing SWMR writes) */
221 if(NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
222 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
223 left_addr = internal->node_ptrs[idx].addr;
224 if(NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], FALSE, H5AC__NO_FLAGS_SET)))
225 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
226 right_addr = internal->node_ptrs[idx + 1].addr;
227
228 /* More setup for child nodes */
229 left_child = left_leaf;
230 right_child = right_leaf;
231 left_nrec = &(left_leaf->nrec);
232 right_nrec = &(right_leaf->nrec);
233 left_native = left_leaf->leaf_native;
234 right_native = right_leaf->leaf_native;
235 } /* end if */
236
237 /* Get the number of records in node to split */
238 old_node_nrec = internal->node_ptrs[idx].node_nrec;
239
240 /* Determine "middle" record to promote to internal node */
241 mid_record = old_node_nrec / 2;
242
243 /* Copy "upper half" of records to new child */
244 HDmemcpy(H5B2_NAT_NREC(right_native, hdr, 0),
245 H5B2_NAT_NREC(left_native, hdr, mid_record + (unsigned)1),
246 hdr->cls->nrec_size * (old_node_nrec - (mid_record + (unsigned)1)));
247
248 /* Copy "upper half" of node pointers, if the node is an internal node */
249 if(depth > 1)
250 HDmemcpy(&(right_node_ptrs[0]), &(left_node_ptrs[mid_record + (unsigned)1]),
251 sizeof(H5B2_node_ptr_t) * (size_t)(old_node_nrec - mid_record));
252
253 /* Copy "middle" record to internal node */
254 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(left_native, hdr, mid_record), hdr->cls->nrec_size);
255
256 /* Mark nodes as dirty */
257 left_child_flags |= H5AC__DIRTIED_FLAG;
258 right_child_flags |= H5AC__DIRTIED_FLAG;
259
260 /* Update record counts in child nodes */
261 internal->node_ptrs[idx].node_nrec = *left_nrec = mid_record;
262 internal->node_ptrs[idx + 1].node_nrec = *right_nrec = (uint16_t)(old_node_nrec - (mid_record + 1));
263
264 /* Determine total number of records in new child nodes */
265 if(depth > 1) {
266 unsigned u; /* Local index variable */
267 hsize_t new_left_all_nrec; /* New total number of records in left child */
268 hsize_t new_right_all_nrec; /* New total number of records in right child */
269
270 /* Compute total of all records in each child node */
271 new_left_all_nrec = internal->node_ptrs[idx].node_nrec;
272 for(u = 0; u < (*left_nrec + (unsigned)1); u++)
273 new_left_all_nrec += left_node_ptrs[u].all_nrec;
274
275 new_right_all_nrec = internal->node_ptrs[idx + 1].node_nrec;
276 for(u = 0; u < (*right_nrec + (unsigned)1); u++)
277 new_right_all_nrec += right_node_ptrs[u].all_nrec;
278
279 internal->node_ptrs[idx].all_nrec = new_left_all_nrec;
280 internal->node_ptrs[idx + 1].all_nrec = new_right_all_nrec;
281 } /* end if */
282 else {
283 internal->node_ptrs[idx].all_nrec = internal->node_ptrs[idx].node_nrec;
284 internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec;
285 } /* end else */
286
287 /* Update # of records in parent node */
288 internal->nrec++;
289
290 /* Mark parent as dirty */
291 *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
292
293 /* Update grandparent info */
294 curr_node_ptr->node_nrec++;
295
296 /* Mark grandparent as dirty, if given */
297 if(parent_cache_info_flags_ptr)
298 *parent_cache_info_flags_ptr |= H5AC__DIRTIED_FLAG;
299
300 /* Update flush dependencies for grandchildren, if using SWMR */
301 if(hdr->swmr_write && depth > 1)
302 if(H5B2__update_child_flush_depends(hdr, depth, right_node_ptrs,
303 0, (unsigned)(*right_nrec + 1), left_child, right_child) < 0)
304 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
305
306 #ifdef H5B2_DEBUG
307 H5B2__assert_internal((hsize_t)0, hdr, internal);
308 if(depth > 1) {
309 H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child, (H5B2_internal_t *)right_child);
310 H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child, (H5B2_internal_t *)left_child);
311 } /* end if */
312 else {
313 H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)right_child);
314 H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
315 } /* end else */
316 #endif /* H5B2_DEBUG */
317
318 done:
319 /* Release child nodes (marked as dirty) */
320 if(left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
321 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree leaf node")
322 if(right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
323 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree leaf node")
324
325 FUNC_LEAVE_NOAPI(ret_value)
326 } /* end H5B2__split1() */
327
328
329 /*-------------------------------------------------------------------------
330 * Function: H5B2__split_root
331 *
332 * Purpose: Split the root node
333 *
334 * Return: Success: Non-negative
335 * Failure: Negative
336 *
337 * Programmer: Quincey Koziol
338 * koziol@ncsa.uiuc.edu
339 * Feb 3 2005
340 *
341 *-------------------------------------------------------------------------
342 */
343 herr_t
H5B2__split_root(H5B2_hdr_t * hdr)344 H5B2__split_root(H5B2_hdr_t *hdr)
345 {
346 H5B2_internal_t *new_root = NULL; /* Pointer to new root node */
347 unsigned new_root_flags = H5AC__NO_FLAGS_SET; /* Cache flags for new root node */
348 H5B2_node_ptr_t old_root_ptr; /* Old node pointer to root node in B-tree */
349 size_t sz_max_nrec; /* Temporary variable for range checking */
350 unsigned u_max_nrec_size; /* Temporary variable for range checking */
351 herr_t ret_value = SUCCEED; /* Return value */
352
353 FUNC_ENTER_PACKAGE
354
355 /* Check arguments. */
356 HDassert(hdr);
357
358 /* Update depth of B-tree */
359 hdr->depth++;
360
361 /* Re-allocate array of node info structs */
362 if(NULL == (hdr->node_info = H5FL_SEQ_REALLOC(H5B2_node_info_t, hdr->node_info, (size_t)(hdr->depth + 1))))
363 HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed")
364
365 /* Update node info for new depth of tree */
366 sz_max_nrec = H5B2_NUM_INT_REC(hdr, hdr->depth);
367 H5_CHECKED_ASSIGN(hdr->node_info[hdr->depth].max_nrec, unsigned, sz_max_nrec, size_t)
368 hdr->node_info[hdr->depth].split_nrec = (hdr->node_info[hdr->depth].max_nrec * hdr->split_percent) / 100;
369 hdr->node_info[hdr->depth].merge_nrec = (hdr->node_info[hdr->depth].max_nrec * hdr->merge_percent) / 100;
370 hdr->node_info[hdr->depth].cum_max_nrec = ((hdr->node_info[hdr->depth].max_nrec + 1) *
371 hdr->node_info[hdr->depth - 1].cum_max_nrec) + hdr->node_info[hdr->depth].max_nrec;
372 u_max_nrec_size = H5VM_limit_enc_size((uint64_t)hdr->node_info[hdr->depth].cum_max_nrec);
373 H5_CHECKED_ASSIGN(hdr->node_info[hdr->depth].cum_max_nrec_size, uint8_t, u_max_nrec_size, unsigned)
374 if(NULL == (hdr->node_info[hdr->depth].nat_rec_fac = H5FL_fac_init(hdr->cls->nrec_size * hdr->node_info[hdr->depth].max_nrec)))
375 HGOTO_ERROR(H5E_RESOURCE, H5E_CANTINIT, FAIL, "can't create node native key block factory")
376 if(NULL == (hdr->node_info[hdr->depth].node_ptr_fac = H5FL_fac_init(sizeof(H5B2_node_ptr_t) * (hdr->node_info[hdr->depth].max_nrec + 1))))
377 HGOTO_ERROR(H5E_RESOURCE, H5E_CANTINIT, FAIL, "can't create internal 'branch' node node pointer block factory")
378
379 /* Keep old root node pointer info */
380 old_root_ptr = hdr->root;
381
382 /* Create new internal node to use as root */
383 hdr->root.node_nrec = 0;
384 if(H5B2__create_internal(hdr, hdr, &(hdr->root), hdr->depth) < 0)
385 HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create new internal node")
386
387 /* Protect new root node */
388 if(NULL == (new_root = H5B2__protect_internal(hdr, hdr, &hdr->root, hdr->depth, FALSE, H5AC__NO_FLAGS_SET)))
389 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
390
391 /* Set first node pointer in root node to old root node pointer info */
392 new_root->node_ptrs[0] = old_root_ptr;
393
394 /* Split original root node */
395 if(H5B2__split1(hdr, hdr->depth, &(hdr->root), NULL, new_root, &new_root_flags, 0) < 0)
396 HGOTO_ERROR(H5E_BTREE, H5E_CANTSPLIT, FAIL, "unable to split old root node")
397
398 done:
399 /* Release new root node (marked as dirty) */
400 if(new_root && H5AC_unprotect(hdr->f, H5AC_BT2_INT, hdr->root.addr, new_root, new_root_flags) < 0)
401 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree internal node")
402
403 FUNC_LEAVE_NOAPI(ret_value)
404 } /* end H5B2__split_root() */
405
406
407 /*-------------------------------------------------------------------------
408 * Function: H5B2__redistribute2
409 *
410 * Purpose: Redistribute records between two nodes
411 *
412 * Return: Success: Non-negative
413 * Failure: Negative
414 *
415 * Programmer: Quincey Koziol
416 * koziol@ncsa.uiuc.edu
417 * Feb 9 2005
418 *
419 *-------------------------------------------------------------------------
420 */
421 herr_t
H5B2__redistribute2(H5B2_hdr_t * hdr,uint16_t depth,H5B2_internal_t * internal,unsigned idx)422 H5B2__redistribute2(H5B2_hdr_t *hdr, uint16_t depth, H5B2_internal_t *internal,
423 unsigned idx)
424 {
425 const H5AC_class_t *child_class; /* Pointer to child node's class info */
426 haddr_t left_addr, right_addr; /* Addresses of left & right child nodes */
427 void *left_child = NULL, *right_child = NULL; /* Pointers to child nodes */
428 uint16_t *left_nrec, *right_nrec; /* Pointers to child # of records */
429 uint8_t *left_native, *right_native; /* Pointers to childs' native records */
430 H5B2_node_ptr_t *left_node_ptrs = NULL, *right_node_ptrs = NULL;/* Pointers to childs' node pointer info */
431 hssize_t left_moved_nrec = 0, right_moved_nrec = 0; /* Number of records moved, for internal redistrib */
432 unsigned left_child_flags = H5AC__NO_FLAGS_SET, right_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
433 herr_t ret_value = SUCCEED; /* Return value */
434
435 FUNC_ENTER_PACKAGE
436
437 /* Check arguments. */
438 HDassert(hdr);
439 HDassert(internal);
440
441 /* Check for the kind of B-tree node to redistribute */
442 if(depth > 1) {
443 H5B2_internal_t *left_internal; /* Pointer to left internal node */
444 H5B2_internal_t *right_internal; /* Pointer to right internal node */
445
446 /* Setup information for unlocking child nodes */
447 child_class = H5AC_BT2_INT;
448
449 /* Lock left & right B-tree child nodes */
450 /* (Shadow both nodes if doing SWMR writes) */
451 if(NULL == (left_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
452 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
453 left_addr = internal->node_ptrs[idx].addr;
454 if(NULL == (right_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
455 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
456 right_addr = internal->node_ptrs[idx + 1].addr;
457
458 /* More setup for child nodes */
459 left_child = left_internal;
460 right_child = right_internal;
461 left_nrec = &(left_internal->nrec);
462 right_nrec = &(right_internal->nrec);
463 left_native = left_internal->int_native;
464 right_native = right_internal->int_native;
465 left_node_ptrs = left_internal->node_ptrs;
466 right_node_ptrs = right_internal->node_ptrs;
467 } /* end if */
468 else {
469 H5B2_leaf_t *left_leaf; /* Pointer to left leaf node */
470 H5B2_leaf_t *right_leaf; /* Pointer to right leaf node */
471
472 /* Setup information for unlocking child nodes */
473 child_class = H5AC_BT2_LEAF;
474
475 /* Lock left & right B-tree child nodes */
476 /* (Shadow both nodes if doing SWMR writes) */
477 if(NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
478 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
479 left_addr = internal->node_ptrs[idx].addr;
480 if(NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
481 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
482 right_addr = internal->node_ptrs[idx + 1].addr;
483
484 /* More setup for child nodes */
485 left_child = left_leaf;
486 right_child = right_leaf;
487 left_nrec = &(left_leaf->nrec);
488 right_nrec = &(right_leaf->nrec);
489 left_native = left_leaf->leaf_native;
490 right_native = right_leaf->leaf_native;
491 } /* end else */
492
493 #ifdef H5B2_DEBUG
494 H5B2__assert_internal((hsize_t)0, hdr, internal);
495 if(depth > 1) {
496 H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child, (H5B2_internal_t *)right_child);
497 H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child, (H5B2_internal_t *)left_child);
498 } /* end if */
499 else {
500 H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)right_child);
501 H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
502 } /* end else */
503 #endif /* H5B2_DEBUG */
504
505 /* Determine whether to shuffle records left or right */
506 if(*left_nrec < *right_nrec) {
507 /* Moving record from right node to left */
508
509 uint16_t new_right_nrec = (uint16_t)(*left_nrec + *right_nrec) / 2; /* New number of records for right child */
510 uint16_t move_nrec = (uint16_t)(*right_nrec - new_right_nrec); /* Number of records to move from right node to left */
511
512 /* Copy record from parent node down into left child */
513 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
514
515 /* See if we need to move records from right node */
516 if(move_nrec > 1)
517 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, (*left_nrec + 1)), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (size_t)(move_nrec - 1));
518
519 /* Move record from right node into parent node */
520 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(right_native, hdr, (move_nrec - 1)), hdr->cls->nrec_size);
521
522 /* Slide records in right node down */
523 HDmemmove(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(right_native, hdr, move_nrec), hdr->cls->nrec_size * new_right_nrec);
524
525 /* Handle node pointers, if we have an internal node */
526 if(depth > 1) {
527 hsize_t moved_nrec = move_nrec; /* Total number of records moved, for internal redistrib */
528 unsigned u; /* Local index variable */
529
530 /* Count the number of records being moved */
531 for(u = 0; u < move_nrec; u++)
532 moved_nrec += right_node_ptrs[u].all_nrec;
533 H5_CHECKED_ASSIGN(left_moved_nrec, hssize_t, moved_nrec, hsize_t)
534 right_moved_nrec -= (hssize_t)moved_nrec;
535
536 /* Copy node pointers from right node to left */
537 HDmemcpy(&(left_node_ptrs[*left_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * move_nrec);
538
539 /* Slide node pointers in right node down */
540 HDmemmove(&(right_node_ptrs[0]), &(right_node_ptrs[move_nrec]), sizeof(H5B2_node_ptr_t) * (new_right_nrec + (unsigned)1));
541 } /* end if */
542
543 /* Update flush dependencies for grandchildren, if using SWMR */
544 if(hdr->swmr_write && depth > 1)
545 if(H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs,
546 (unsigned)(*left_nrec + 1), (unsigned)(*left_nrec + move_nrec + 1), right_child, left_child) < 0)
547 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
548
549 /* Update number of records in child nodes */
550 *left_nrec = (uint16_t)(*left_nrec + move_nrec);
551 *right_nrec = new_right_nrec;
552
553 /* Mark nodes as dirty */
554 left_child_flags |= H5AC__DIRTIED_FLAG;
555 right_child_flags |= H5AC__DIRTIED_FLAG;
556 } /* end if */
557 else {
558 /* Moving record from left node to right */
559
560 uint16_t new_left_nrec = (uint16_t)(*left_nrec + *right_nrec) / 2; /* New number of records for left child */
561 uint16_t move_nrec = (uint16_t)(*left_nrec - new_left_nrec); /* Number of records to move from left node to right */
562
563 /* Sanity check */
564 HDassert(*left_nrec > *right_nrec);
565
566 /* Slide records in right node up */
567 HDmemmove(H5B2_NAT_NREC(right_native, hdr, move_nrec),
568 H5B2_NAT_NREC(right_native, hdr, 0),
569 hdr->cls->nrec_size * (*right_nrec));
570
571 /* Copy record from parent node down into right child */
572 HDmemcpy(H5B2_NAT_NREC(right_native, hdr, (move_nrec - 1)), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
573
574 /* See if we need to move records from left node */
575 if(move_nrec > 1)
576 HDmemcpy(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(left_native, hdr, ((*left_nrec - move_nrec) + 1)), hdr->cls->nrec_size * (size_t)(move_nrec - 1));
577
578 /* Move record from left node into parent node */
579 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(left_native, hdr, (*left_nrec - move_nrec)), hdr->cls->nrec_size);
580
581 /* Handle node pointers, if we have an internal node */
582 if(depth > 1) {
583 hsize_t moved_nrec = move_nrec; /* Total number of records moved, for internal redistrib */
584 unsigned u; /* Local index variable */
585
586 /* Slide node pointers in right node up */
587 HDmemmove(&(right_node_ptrs[move_nrec]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
588
589 /* Copy node pointers from left node to right */
590 HDmemcpy(&(right_node_ptrs[0]), &(left_node_ptrs[new_left_nrec + 1]), sizeof(H5B2_node_ptr_t) * move_nrec);
591
592 /* Count the number of records being moved */
593 for(u = 0; u < move_nrec; u++)
594 moved_nrec += right_node_ptrs[u].all_nrec;
595 left_moved_nrec -= (hssize_t)moved_nrec;
596 H5_CHECKED_ASSIGN(right_moved_nrec, hssize_t, moved_nrec, hsize_t)
597 } /* end if */
598
599 /* Update flush dependencies for grandchildren, if using SWMR */
600 if(hdr->swmr_write && depth > 1)
601 if(H5B2__update_child_flush_depends(hdr, depth, right_node_ptrs,
602 0, (unsigned)move_nrec, left_child, right_child) < 0)
603 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
604
605 /* Update number of records in child nodes */
606 *left_nrec = new_left_nrec;
607 *right_nrec = (uint16_t)(*right_nrec + move_nrec);
608
609 /* Mark nodes as dirty */
610 left_child_flags |= H5AC__DIRTIED_FLAG;
611 right_child_flags |= H5AC__DIRTIED_FLAG;
612 } /* end else */
613
614 /* Update # of records in child nodes */
615 internal->node_ptrs[idx].node_nrec = *left_nrec;
616 internal->node_ptrs[idx + 1].node_nrec = *right_nrec;
617
618 /* Update total # of records in child B-trees */
619 if(depth > 1) {
620 internal->node_ptrs[idx].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx].all_nrec + left_moved_nrec);
621 internal->node_ptrs[idx + 1].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx + 1].all_nrec + right_moved_nrec);
622 } /* end if */
623 else {
624 internal->node_ptrs[idx].all_nrec = internal->node_ptrs[idx].node_nrec;
625 internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec;
626 } /* end else */
627
628 #ifdef H5B2_DEBUG
629 H5B2__assert_internal((hsize_t)0, hdr, internal);
630 if(depth > 1) {
631 H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child, (H5B2_internal_t *)right_child);
632 H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child, (H5B2_internal_t *)left_child);
633 } /* end if */
634 else {
635 H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)right_child);
636 H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
637 } /* end else */
638 #endif /* H5B2_DEBUG */
639
640 done:
641 /* Release child nodes (marked as dirty) */
642 if(left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
643 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
644 if(right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
645 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
646
647 FUNC_LEAVE_NOAPI(ret_value)
648 } /* end H5B2__redistribute2() */
649
650
651 /*-------------------------------------------------------------------------
652 * Function: H5B2__redistribute3
653 *
654 * Purpose: Redistribute records between three nodes
655 *
656 * Return: Success: Non-negative
657 * Failure: Negative
658 *
659 * Programmer: Quincey Koziol
660 * koziol@ncsa.uiuc.edu
661 * Feb 9 2005
662 *
663 *-------------------------------------------------------------------------
664 */
665 herr_t
H5B2__redistribute3(H5B2_hdr_t * hdr,uint16_t depth,H5B2_internal_t * internal,unsigned * internal_flags_ptr,unsigned idx)666 H5B2__redistribute3(H5B2_hdr_t *hdr, uint16_t depth, H5B2_internal_t *internal,
667 unsigned *internal_flags_ptr, unsigned idx)
668 {
669 H5B2_node_ptr_t *left_node_ptrs = NULL, *right_node_ptrs = NULL; /* Pointers to childs' node pointer info */
670 H5B2_node_ptr_t *middle_node_ptrs = NULL; /* Pointers to childs' node pointer info */
671 const H5AC_class_t *child_class; /* Pointer to child node's class info */
672 haddr_t left_addr, right_addr; /* Addresses of left & right child nodes */
673 haddr_t middle_addr; /* Address of middle child node */
674 void *left_child = NULL, *right_child = NULL; /* Pointers to child nodes */
675 void *middle_child = NULL; /* Pointers to middle child node */
676 uint16_t *left_nrec, *right_nrec; /* Pointers to child # of records */
677 uint16_t *middle_nrec; /* Pointers to middle child # of records */
678 uint8_t *left_native, *right_native; /* Pointers to childs' native records */
679 uint8_t *middle_native; /* Pointers to middle child's native records */
680 hssize_t left_moved_nrec = 0, right_moved_nrec = 0; /* Number of records moved, for internal split */
681 hssize_t middle_moved_nrec = 0; /* Number of records moved, for internal split */
682 unsigned left_child_flags = H5AC__NO_FLAGS_SET, right_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
683 unsigned middle_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
684 herr_t ret_value = SUCCEED; /* Return value */
685
686 FUNC_ENTER_PACKAGE
687
688 /* Check arguments. */
689 HDassert(hdr);
690 HDassert(internal);
691 HDassert(internal_flags_ptr);
692
693 /* Check for the kind of B-tree node to redistribute */
694 if(depth > 1) {
695 H5B2_internal_t *left_internal; /* Pointer to left internal node */
696 H5B2_internal_t *middle_internal; /* Pointer to middle internal node */
697 H5B2_internal_t *right_internal; /* Pointer to right internal node */
698
699 /* Setup information for unlocking child nodes */
700 child_class = H5AC_BT2_INT;
701
702 /* Lock B-tree child nodes */
703 /* (Shadow all nodes if doing SWMR writes) */
704 if(NULL == (left_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx - 1], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
705 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
706 left_addr = internal->node_ptrs[idx - 1].addr;
707 if(NULL == (middle_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
708 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
709 middle_addr = internal->node_ptrs[idx].addr;
710 if(NULL == (right_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
711 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
712 right_addr = internal->node_ptrs[idx + 1].addr;
713
714 /* More setup for child nodes */
715 left_child = left_internal;
716 middle_child = middle_internal;
717 right_child = right_internal;
718 left_nrec = &(left_internal->nrec);
719 middle_nrec = &(middle_internal->nrec);
720 right_nrec = &(right_internal->nrec);
721 left_native = left_internal->int_native;
722 middle_native = middle_internal->int_native;
723 right_native = right_internal->int_native;
724 left_node_ptrs = left_internal->node_ptrs;
725 middle_node_ptrs = middle_internal->node_ptrs;
726 right_node_ptrs = right_internal->node_ptrs;
727 } /* end if */
728 else {
729 H5B2_leaf_t *left_leaf; /* Pointer to left leaf node */
730 H5B2_leaf_t *middle_leaf; /* Pointer to middle leaf node */
731 H5B2_leaf_t *right_leaf; /* Pointer to right leaf node */
732
733 /* Setup information for unlocking child nodes */
734 child_class = H5AC_BT2_LEAF;
735
736 /* Lock B-tree child nodes */
737 /* (Shadow all nodes if doing SWMR writes) */
738 if(NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx - 1], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
739 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
740 left_addr = internal->node_ptrs[idx - 1].addr;
741 if(NULL == (middle_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
742 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
743 middle_addr = internal->node_ptrs[idx].addr;
744 if(NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
745 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
746 right_addr = internal->node_ptrs[idx + 1].addr;
747
748 /* More setup for child nodes */
749 left_child = left_leaf;
750 middle_child = middle_leaf;
751 right_child = right_leaf;
752 left_nrec = &(left_leaf->nrec);
753 middle_nrec = &(middle_leaf->nrec);
754 right_nrec = &(right_leaf->nrec);
755 left_native = left_leaf->leaf_native;
756 middle_native = middle_leaf->leaf_native;
757 right_native = right_leaf->leaf_native;
758 } /* end else */
759
760 /* Redistribute records */
761 {
762 /* Compute new # of records in each node */
763 unsigned total_nrec = (unsigned)(*left_nrec + *middle_nrec + *right_nrec + 2);
764 uint16_t new_middle_nrec = (uint16_t)(total_nrec - 2) / 3;
765 uint16_t new_left_nrec = (uint16_t)((total_nrec - 2) - new_middle_nrec) / 2;
766 uint16_t new_right_nrec = (uint16_t)((total_nrec - 2) - (unsigned)(new_left_nrec + new_middle_nrec));
767 uint16_t curr_middle_nrec = *middle_nrec;
768
769 /* Sanity check rounding */
770 HDassert(new_middle_nrec <= new_left_nrec);
771 HDassert(new_middle_nrec <= new_right_nrec);
772
773 /* Move records into left node */
774 if(new_left_nrec > *left_nrec) {
775 uint16_t moved_middle_nrec = 0; /* Number of records moved into left node */
776
777 /* Move left parent record down to left node */
778 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx - 1), hdr->cls->nrec_size);
779
780 /* Move records from middle node into left node */
781 if((new_left_nrec - 1) > *left_nrec) {
782 moved_middle_nrec = (uint16_t)(new_left_nrec - (*left_nrec + 1));
783 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec + 1), H5B2_NAT_NREC(middle_native, hdr, 0), hdr->cls->nrec_size * moved_middle_nrec);
784 } /* end if */
785
786 /* Move record from middle node up to parent node */
787 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx - 1), H5B2_NAT_NREC(middle_native, hdr, moved_middle_nrec), hdr->cls->nrec_size);
788 moved_middle_nrec++;
789
790 /* Slide records in middle node down */
791 HDmemmove(H5B2_NAT_NREC(middle_native, hdr, 0), H5B2_NAT_NREC(middle_native, hdr, moved_middle_nrec), hdr->cls->nrec_size * (size_t)(*middle_nrec - moved_middle_nrec));
792
793 /* Move node pointers also if this is an internal node */
794 if(depth > 1) {
795 hsize_t moved_nrec; /* Total number of records moved, for internal redistrib */
796 unsigned move_nptrs; /* Number of node pointers to move */
797 unsigned u; /* Local index variable */
798
799 /* Move middle node pointers into left node */
800 move_nptrs = (unsigned)(new_left_nrec - *left_nrec);
801 HDmemcpy(&(left_node_ptrs[*left_nrec + 1]), &(middle_node_ptrs[0]), sizeof(H5B2_node_ptr_t)*move_nptrs);
802
803 /* Count the number of records being moved into the left node */
804 for(u = 0, moved_nrec = 0; u < move_nptrs; u++)
805 moved_nrec += middle_node_ptrs[u].all_nrec;
806 left_moved_nrec = (hssize_t)(moved_nrec + move_nptrs);
807 middle_moved_nrec -= (hssize_t)(moved_nrec + move_nptrs);
808
809 /* Slide the node pointers in middle node down */
810 HDmemmove(&(middle_node_ptrs[0]), &(middle_node_ptrs[move_nptrs]), sizeof(H5B2_node_ptr_t) * ((*middle_nrec - move_nptrs) + 1));
811 } /* end if */
812
813 /* Update flush dependencies for grandchildren, if using SWMR */
814 if(hdr->swmr_write && depth > 1)
815 if(H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs,
816 (unsigned)(*left_nrec + 1), (unsigned)(*left_nrec + moved_middle_nrec + 1), middle_child, left_child) < 0)
817 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
818
819 /* Update the current number of records in middle node */
820 curr_middle_nrec = (uint16_t)(curr_middle_nrec - moved_middle_nrec);
821
822 /* Mark nodes as dirty */
823 left_child_flags |= H5AC__DIRTIED_FLAG;
824 middle_child_flags |= H5AC__DIRTIED_FLAG;
825 } /* end if */
826
827 /* Move records into right node */
828 if(new_right_nrec > *right_nrec) {
829 unsigned right_nrec_move = (unsigned)(new_right_nrec - *right_nrec); /* Number of records to move out of right node */
830
831 /* Slide records in right node up */
832 HDmemmove(H5B2_NAT_NREC(right_native, hdr, right_nrec_move), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (*right_nrec));
833
834 /* Move right parent record down to right node */
835 HDmemcpy(H5B2_NAT_NREC(right_native, hdr, right_nrec_move - 1), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
836
837 /* Move records from middle node into right node */
838 if(right_nrec_move > 1)
839 HDmemcpy(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(middle_native, hdr, ((curr_middle_nrec - right_nrec_move) + 1)), hdr->cls->nrec_size * (right_nrec_move - 1));
840
841 /* Move record from middle node up to parent node */
842 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(middle_native, hdr, (curr_middle_nrec - right_nrec_move)), hdr->cls->nrec_size);
843
844 /* Move node pointers also if this is an internal node */
845 if(depth > 1) {
846 hsize_t moved_nrec; /* Total number of records moved, for internal redistrib */
847 unsigned u; /* Local index variable */
848
849 /* Slide the node pointers in right node up */
850 HDmemmove(&(right_node_ptrs[right_nrec_move]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
851
852 /* Move middle node pointers into right node */
853 HDmemcpy(&(right_node_ptrs[0]), &(middle_node_ptrs[(curr_middle_nrec - right_nrec_move) + 1]), sizeof(H5B2_node_ptr_t) * right_nrec_move);
854
855 /* Count the number of records being moved into the right node */
856 for(u = 0, moved_nrec = 0; u < right_nrec_move; u++)
857 moved_nrec += right_node_ptrs[u].all_nrec;
858 right_moved_nrec = (hssize_t)(moved_nrec + right_nrec_move);
859 middle_moved_nrec -= (hssize_t)(moved_nrec + right_nrec_move);
860 } /* end if */
861
862 /* Update flush dependencies for grandchildren, if using SWMR */
863 if(hdr->swmr_write && depth > 1)
864 if(H5B2__update_child_flush_depends(hdr, depth, right_node_ptrs,
865 0, (unsigned)right_nrec_move, middle_child, right_child) < 0)
866 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
867
868 /* Update the current number of records in middle node */
869 curr_middle_nrec = (uint16_t)(curr_middle_nrec - right_nrec_move);
870
871 /* Mark nodes as dirty */
872 middle_child_flags |= H5AC__DIRTIED_FLAG;
873 right_child_flags |= H5AC__DIRTIED_FLAG;
874 } /* end if */
875
876 /* Move records out of left node */
877 if(new_left_nrec < *left_nrec) {
878 unsigned left_nrec_move = (unsigned)(*left_nrec - new_left_nrec); /* Number of records to move out of left node */
879
880 /* Slide middle records up */
881 HDmemmove(H5B2_NAT_NREC(middle_native, hdr, left_nrec_move), H5B2_NAT_NREC(middle_native, hdr, 0), hdr->cls->nrec_size * curr_middle_nrec);
882
883 /* Move left parent record down to middle node */
884 HDmemcpy(H5B2_NAT_NREC(middle_native, hdr, left_nrec_move - 1), H5B2_INT_NREC(internal, hdr, idx - 1), hdr->cls->nrec_size);
885
886 /* Move left records to middle node */
887 if(left_nrec_move > 1)
888 HDmemmove(H5B2_NAT_NREC(middle_native, hdr, 0), H5B2_NAT_NREC(left_native, hdr, new_left_nrec + 1), hdr->cls->nrec_size * (left_nrec_move - 1));
889
890 /* Move left parent record up from left node */
891 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx - 1), H5B2_NAT_NREC(left_native, hdr, new_left_nrec), hdr->cls->nrec_size);
892
893 /* Move node pointers also if this is an internal node */
894 if(depth > 1) {
895 hsize_t moved_nrec; /* Total number of records moved, for internal redistrib */
896 unsigned u; /* Local index variable */
897
898 /* Slide the node pointers in middle node up */
899 HDmemmove(&(middle_node_ptrs[left_nrec_move]), &(middle_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(curr_middle_nrec + 1));
900
901 /* Move left node pointers into middle node */
902 HDmemcpy(&(middle_node_ptrs[0]), &(left_node_ptrs[new_left_nrec + 1]), sizeof(H5B2_node_ptr_t) * left_nrec_move);
903
904 /* Count the number of records being moved into the left node */
905 for(u = 0, moved_nrec = 0; u < left_nrec_move; u++)
906 moved_nrec += middle_node_ptrs[u].all_nrec;
907 left_moved_nrec -= (hssize_t)(moved_nrec + left_nrec_move);
908 middle_moved_nrec += (hssize_t)(moved_nrec + left_nrec_move);
909 } /* end if */
910
911 /* Update flush dependencies for grandchildren, if using SWMR */
912 if(hdr->swmr_write && depth > 1)
913 if(H5B2__update_child_flush_depends(hdr, depth, middle_node_ptrs,
914 0, (unsigned)left_nrec_move, left_child, middle_child) < 0)
915 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
916
917 /* Update the current number of records in middle node */
918 curr_middle_nrec = (uint16_t)(curr_middle_nrec + left_nrec_move);
919
920 /* Mark nodes as dirty */
921 left_child_flags |= H5AC__DIRTIED_FLAG;
922 middle_child_flags |= H5AC__DIRTIED_FLAG;
923 } /* end if */
924
925 /* Move records out of right node */
926 if(new_right_nrec < *right_nrec) {
927 unsigned right_nrec_move = (unsigned)(*right_nrec - new_right_nrec); /* Number of records to move out of right node */
928
929 /* Move right parent record down to middle node */
930 HDmemcpy(H5B2_NAT_NREC(middle_native, hdr, curr_middle_nrec), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
931
932 /* Move right records to middle node */
933 HDmemmove(H5B2_NAT_NREC(middle_native, hdr, (curr_middle_nrec + 1)), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (right_nrec_move - 1));
934
935 /* Move right parent record up from right node */
936 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx), H5B2_NAT_NREC(right_native, hdr, right_nrec_move - 1), hdr->cls->nrec_size);
937
938 /* Slide right records down */
939 HDmemmove(H5B2_NAT_NREC(right_native, hdr, 0), H5B2_NAT_NREC(right_native, hdr, right_nrec_move), hdr->cls->nrec_size * new_right_nrec);
940
941 /* Move node pointers also if this is an internal node */
942 if(depth > 1) {
943 hsize_t moved_nrec; /* Total number of records moved, for internal redistrib */
944 unsigned u; /* Local index variable */
945
946 /* Move right node pointers into middle node */
947 HDmemcpy(&(middle_node_ptrs[curr_middle_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * right_nrec_move);
948
949 /* Count the number of records being moved into the right node */
950 for(u = 0, moved_nrec = 0; u < right_nrec_move; u++)
951 moved_nrec += right_node_ptrs[u].all_nrec;
952 right_moved_nrec -= (hssize_t)(moved_nrec + right_nrec_move);
953 middle_moved_nrec += (hssize_t)(moved_nrec + right_nrec_move);
954
955 /* Slide the node pointers in right node down */
956 HDmemmove(&(right_node_ptrs[0]), &(right_node_ptrs[right_nrec_move]), sizeof(H5B2_node_ptr_t) * (size_t)(new_right_nrec + 1));
957 } /* end if */
958
959 /* Update flush dependencies for grandchildren, if using SWMR */
960 if(hdr->swmr_write && depth > 1)
961 if(H5B2__update_child_flush_depends(hdr, depth, middle_node_ptrs,
962 (unsigned)(curr_middle_nrec + 1), (unsigned)(curr_middle_nrec + right_nrec_move + 1), right_child, middle_child) < 0)
963 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
964
965 /* Mark nodes as dirty */
966 middle_child_flags |= H5AC__DIRTIED_FLAG;
967 right_child_flags |= H5AC__DIRTIED_FLAG;
968 } /* end if */
969
970 /* Update # of records in nodes */
971 *left_nrec = new_left_nrec;
972 *middle_nrec = new_middle_nrec;
973 *right_nrec = new_right_nrec;
974 } /* end block */
975
976 /* Update # of records in child nodes */
977 internal->node_ptrs[idx - 1].node_nrec = *left_nrec;
978 internal->node_ptrs[idx].node_nrec = *middle_nrec;
979 internal->node_ptrs[idx + 1].node_nrec = *right_nrec;
980
981 /* Update total # of records in child B-trees */
982 if(depth > 1) {
983 internal->node_ptrs[idx - 1].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx - 1].all_nrec + left_moved_nrec);
984 internal->node_ptrs[idx].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx].all_nrec + middle_moved_nrec);
985 internal->node_ptrs[idx + 1].all_nrec = (hsize_t)((hssize_t)internal->node_ptrs[idx + 1].all_nrec + right_moved_nrec);
986 } /* end if */
987 else {
988 internal->node_ptrs[idx - 1].all_nrec = internal->node_ptrs[idx - 1].node_nrec;
989 internal->node_ptrs[idx].all_nrec = internal->node_ptrs[idx].node_nrec;
990 internal->node_ptrs[idx + 1].all_nrec = internal->node_ptrs[idx + 1].node_nrec;
991 } /* end else */
992
993 /* Mark parent as dirty */
994 *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
995
996 #ifdef H5B2_DEBUG
997 H5B2__assert_internal((hsize_t)0, hdr, internal);
998 if(depth > 1) {
999 H5B2__assert_internal2(internal->node_ptrs[idx - 1].all_nrec, hdr, (H5B2_internal_t *)left_child, (H5B2_internal_t *)middle_child);
1000 H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)middle_child, (H5B2_internal_t *)left_child);
1001 H5B2__assert_internal2(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)middle_child, (H5B2_internal_t *)right_child);
1002 H5B2__assert_internal2(internal->node_ptrs[idx + 1].all_nrec, hdr, (H5B2_internal_t *)right_child, (H5B2_internal_t *)middle_child);
1003 } /* end if */
1004 else {
1005 H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)middle_child);
1006 H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)middle_child, (H5B2_leaf_t *)right_child);
1007 H5B2__assert_leaf(hdr, (H5B2_leaf_t *)right_child);
1008 } /* end else */
1009 #endif /* H5B2_DEBUG */
1010
1011 done:
1012 /* Unlock child nodes (marked as dirty) */
1013 if(left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
1014 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1015 if(middle_child && H5AC_unprotect(hdr->f, child_class, middle_addr, middle_child, middle_child_flags) < 0)
1016 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1017 if(right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
1018 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1019
1020 FUNC_LEAVE_NOAPI(ret_value)
1021 } /* end H5B2__redistribute3() */
1022
1023
1024 /*-------------------------------------------------------------------------
1025 * Function: H5B2__merge2
1026 *
1027 * Purpose: Perform a 2->1 node merge
1028 *
1029 * Return: Success: Non-negative
1030 *
1031 * Failure: Negative
1032 *
1033 * Programmer: Quincey Koziol
1034 * koziol@ncsa.uiuc.edu
1035 * Mar 4 2005
1036 *
1037 *-------------------------------------------------------------------------
1038 */
1039 herr_t
H5B2__merge2(H5B2_hdr_t * hdr,uint16_t depth,H5B2_node_ptr_t * curr_node_ptr,unsigned * parent_cache_info_flags_ptr,H5B2_internal_t * internal,unsigned * internal_flags_ptr,unsigned idx)1040 H5B2__merge2(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node_ptr,
1041 unsigned *parent_cache_info_flags_ptr, H5B2_internal_t *internal,
1042 unsigned *internal_flags_ptr, unsigned idx)
1043 {
1044 const H5AC_class_t *child_class; /* Pointer to child node's class info */
1045 haddr_t left_addr, right_addr; /* Addresses of left & right child nodes */
1046 void *left_child = NULL, *right_child = NULL; /* Pointers to left & right child nodes */
1047 uint16_t *left_nrec, *right_nrec; /* Pointers to left & right child # of records */
1048 uint8_t *left_native, *right_native; /* Pointers to left & right children's native records */
1049 H5B2_node_ptr_t *left_node_ptrs = NULL, *right_node_ptrs = NULL;/* Pointers to childs' node pointer info */
1050 unsigned left_child_flags = H5AC__NO_FLAGS_SET, right_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
1051 herr_t ret_value = SUCCEED; /* Return value */
1052
1053 FUNC_ENTER_PACKAGE
1054
1055 /* Check arguments. */
1056 HDassert(hdr);
1057 HDassert(curr_node_ptr);
1058 HDassert(internal);
1059 HDassert(internal_flags_ptr);
1060
1061 /* Check for the kind of B-tree node to split */
1062 if(depth > 1) {
1063 H5B2_internal_t *left_internal; /* Pointer to left internal node */
1064 H5B2_internal_t *right_internal; /* Pointer to right internal node */
1065
1066 /* Setup information for unlocking child nodes */
1067 child_class = H5AC_BT2_INT;
1068
1069 /* Lock left & right B-tree child nodes */
1070 /* (Shadow the left node if doing SWMR writes) */
1071 if(NULL == (left_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1072 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1073 left_addr = internal->node_ptrs[idx].addr;
1074 if(NULL == (right_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1], (uint16_t)(depth - 1), FALSE, H5AC__NO_FLAGS_SET)))
1075 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1076 right_addr = internal->node_ptrs[idx + 1].addr;
1077
1078 /* More setup for accessing child node information */
1079 left_child = left_internal;
1080 right_child = right_internal;
1081 left_nrec = &(left_internal->nrec);
1082 right_nrec = &(right_internal->nrec);
1083 left_native = left_internal->int_native;
1084 right_native = right_internal->int_native;
1085 left_node_ptrs = left_internal->node_ptrs;
1086 right_node_ptrs = right_internal->node_ptrs;
1087 } /* end if */
1088 else {
1089 H5B2_leaf_t *left_leaf; /* Pointer to left leaf node */
1090 H5B2_leaf_t *right_leaf; /* Pointer to right leaf node */
1091
1092 /* Setup information for unlocking child nodes */
1093 child_class = H5AC_BT2_LEAF;
1094
1095 /* Lock left & right B-tree child nodes */
1096 /* (Shadow the left node if doing SWMR writes) */
1097 if(NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1098 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1099 left_addr = internal->node_ptrs[idx].addr;
1100 if(NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], FALSE, H5AC__NO_FLAGS_SET)))
1101 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1102 right_addr = internal->node_ptrs[idx + 1].addr;
1103
1104 /* More setup for accessing child node information */
1105 left_child = left_leaf;
1106 right_child = right_leaf;
1107 left_nrec = &(left_leaf->nrec);
1108 right_nrec = &(right_leaf->nrec);
1109 left_native = left_leaf->leaf_native;
1110 right_native = right_leaf->leaf_native;
1111 } /* end else */
1112
1113 /* Redistribute records into left node */
1114 {
1115 /* Copy record from parent node to proper location */
1116 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
1117
1118 /* Copy records from right node to left node */
1119 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec + 1), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (*right_nrec));
1120
1121 /* Copy node pointers from right node into left node */
1122 if(depth > 1)
1123 HDmemcpy(&(left_node_ptrs[*left_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
1124
1125 /* Update flush dependencies for grandchildren, if using SWMR */
1126 if(hdr->swmr_write && depth > 1)
1127 if(H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs,
1128 (unsigned)(*left_nrec + 1), (unsigned)(*left_nrec + *right_nrec + 2), right_child, left_child) < 0)
1129 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
1130
1131 /* Update # of records in left node */
1132 *left_nrec = (uint16_t)(*left_nrec + *right_nrec + 1);
1133
1134 /* Mark nodes as dirty */
1135 left_child_flags |= H5AC__DIRTIED_FLAG;
1136 right_child_flags |= H5AC__DELETED_FLAG;
1137 if(!(hdr->swmr_write))
1138 right_child_flags |= H5AC__DIRTIED_FLAG | H5AC__FREE_FILE_SPACE_FLAG;
1139 } /* end block */
1140
1141 /* Update # of records in child nodes */
1142 internal->node_ptrs[idx].node_nrec = *left_nrec;
1143
1144 /* Update total # of records in child B-trees */
1145 internal->node_ptrs[idx].all_nrec += internal->node_ptrs[idx + 1].all_nrec + 1;
1146
1147 /* Slide records in parent node down, to eliminate demoted record */
1148 if((idx + 1) < internal->nrec) {
1149 HDmemmove(H5B2_INT_NREC(internal, hdr, idx), H5B2_INT_NREC(internal, hdr, idx + 1), hdr->cls->nrec_size * (internal->nrec - (idx + 1)));
1150 HDmemmove(&(internal->node_ptrs[idx + 1]), &(internal->node_ptrs[idx + 2]), sizeof(H5B2_node_ptr_t) * (internal->nrec - (idx + 1)));
1151 } /* end if */
1152
1153 /* Update # of records in parent node */
1154 internal->nrec--;
1155
1156 /* Mark parent as dirty */
1157 *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
1158
1159 /* Update grandparent info */
1160 curr_node_ptr->node_nrec--;
1161
1162 /* Mark grandparent as dirty, if given */
1163 if(parent_cache_info_flags_ptr)
1164 *parent_cache_info_flags_ptr |= H5AC__DIRTIED_FLAG;
1165
1166 #ifdef H5B2_DEBUG
1167 H5B2__assert_internal((hsize_t)0, hdr, internal);
1168 if(depth > 1)
1169 H5B2__assert_internal(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)left_child);
1170 else
1171 H5B2__assert_leaf(hdr, (H5B2_leaf_t *)left_child);
1172 #endif /* H5B2_DEBUG */
1173
1174 done:
1175 /* Unlock left node (marked as dirty) */
1176 if(left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
1177 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1178
1179 /* Delete right node & remove from cache (marked as dirty) */
1180 if(right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
1181 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1182
1183 FUNC_LEAVE_NOAPI(ret_value)
1184 } /* end H5B2__merge2() */
1185
1186
1187 /*-------------------------------------------------------------------------
1188 * Function: H5B2__merge3
1189 *
1190 * Purpose: Perform a 3->2 node merge
1191 *
1192 * Return: Success: Non-negative
1193 *
1194 * Failure: Negative
1195 *
1196 * Programmer: Quincey Koziol
1197 * koziol@ncsa.uiuc.edu
1198 * Mar 4 2005
1199 *
1200 *-------------------------------------------------------------------------
1201 */
1202 herr_t
H5B2__merge3(H5B2_hdr_t * hdr,uint16_t depth,H5B2_node_ptr_t * curr_node_ptr,unsigned * parent_cache_info_flags_ptr,H5B2_internal_t * internal,unsigned * internal_flags_ptr,unsigned idx)1203 H5B2__merge3(H5B2_hdr_t *hdr, uint16_t depth, H5B2_node_ptr_t *curr_node_ptr,
1204 unsigned *parent_cache_info_flags_ptr, H5B2_internal_t *internal,
1205 unsigned *internal_flags_ptr, unsigned idx)
1206 {
1207 const H5AC_class_t *child_class; /* Pointer to child node's class info */
1208 haddr_t left_addr, right_addr; /* Addresses of left & right child nodes */
1209 haddr_t middle_addr; /* Address of middle child node */
1210 void *left_child = NULL, *right_child = NULL; /* Pointers to left & right child nodes */
1211 void *middle_child = NULL; /* Pointer to middle child node */
1212 uint16_t *left_nrec, *right_nrec; /* Pointers to left & right child # of records */
1213 uint16_t *middle_nrec; /* Pointer to middle child # of records */
1214 uint8_t *left_native, *right_native; /* Pointers to left & right children's native records */
1215 uint8_t *middle_native; /* Pointer to middle child's native records */
1216 H5B2_node_ptr_t *left_node_ptrs = NULL, *right_node_ptrs = NULL;/* Pointers to childs' node pointer info */
1217 H5B2_node_ptr_t *middle_node_ptrs = NULL;/* Pointer to child's node pointer info */
1218 hsize_t middle_moved_nrec; /* Number of records moved, for internal split */
1219 unsigned left_child_flags = H5AC__NO_FLAGS_SET, right_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
1220 unsigned middle_child_flags = H5AC__NO_FLAGS_SET; /* Flags for unprotecting child nodes */
1221 herr_t ret_value = SUCCEED; /* Return value */
1222
1223 FUNC_ENTER_PACKAGE
1224
1225 /* Check arguments. */
1226 HDassert(hdr);
1227 HDassert(curr_node_ptr);
1228 HDassert(internal);
1229 HDassert(internal_flags_ptr);
1230
1231 /* Check for the kind of B-tree node to split */
1232 if(depth > 1) {
1233 H5B2_internal_t *left_internal; /* Pointer to left internal node */
1234 H5B2_internal_t *middle_internal; /* Pointer to middle internal node */
1235 H5B2_internal_t *right_internal; /* Pointer to right internal node */
1236
1237 /* Setup information for unlocking child nodes */
1238 child_class = H5AC_BT2_INT;
1239
1240 /* Lock B-tree child nodes */
1241 /* (Shadow left and middle nodes if doing SWMR writes) */
1242 if(NULL == (left_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx - 1], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1243 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1244 left_addr = internal->node_ptrs[idx - 1].addr;
1245 if(NULL == (middle_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx], (uint16_t)(depth - 1), hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1246 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1247 middle_addr = internal->node_ptrs[idx].addr;
1248 if(NULL == (right_internal = H5B2__protect_internal(hdr, internal, &internal->node_ptrs[idx + 1], (uint16_t)(depth - 1), FALSE, H5AC__NO_FLAGS_SET)))
1249 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1250 right_addr = internal->node_ptrs[idx + 1].addr;
1251
1252 /* More setup for accessing child node information */
1253 left_child = left_internal;
1254 middle_child = middle_internal;
1255 right_child = right_internal;
1256 left_nrec = &(left_internal->nrec);
1257 middle_nrec = &(middle_internal->nrec);
1258 right_nrec = &(right_internal->nrec);
1259 left_native = left_internal->int_native;
1260 middle_native = middle_internal->int_native;
1261 right_native = right_internal->int_native;
1262 left_node_ptrs = left_internal->node_ptrs;
1263 middle_node_ptrs = middle_internal->node_ptrs;
1264 right_node_ptrs = right_internal->node_ptrs;
1265 } /* end if */
1266 else {
1267 H5B2_leaf_t *left_leaf; /* Pointer to left leaf node */
1268 H5B2_leaf_t *middle_leaf; /* Pointer to middle leaf node */
1269 H5B2_leaf_t *right_leaf; /* Pointer to right leaf node */
1270
1271 /* Setup information for unlocking child nodes */
1272 child_class = H5AC_BT2_LEAF;
1273
1274 /* Lock B-tree child nodes */
1275 /* (Shadow left and middle nodes if doing SWMR writes) */
1276 if(NULL == (left_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx - 1], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1277 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1278 left_addr = internal->node_ptrs[idx - 1].addr;
1279 if(NULL == (middle_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx], hdr->swmr_write, H5AC__NO_FLAGS_SET)))
1280 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1281 middle_addr = internal->node_ptrs[idx].addr;
1282 if(NULL == (right_leaf = H5B2__protect_leaf(hdr, internal, &internal->node_ptrs[idx + 1], FALSE, H5AC__NO_FLAGS_SET)))
1283 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1284 right_addr = internal->node_ptrs[idx + 1].addr;
1285
1286 /* More setup for accessing child node information */
1287 left_child = left_leaf;
1288 middle_child = middle_leaf;
1289 right_child = right_leaf;
1290 left_nrec = &(left_leaf->nrec);
1291 middle_nrec = &(middle_leaf->nrec);
1292 right_nrec = &(right_leaf->nrec);
1293 left_native = left_leaf->leaf_native;
1294 middle_native = middle_leaf->leaf_native;
1295 right_native = right_leaf->leaf_native;
1296 } /* end else */
1297
1298 /* Redistribute records into left node */
1299 {
1300 unsigned total_nrec = (unsigned)(*left_nrec + *middle_nrec + *right_nrec + 2);
1301 unsigned middle_nrec_move = ((total_nrec - 1) / 2) - *left_nrec;
1302
1303 /* Set the base number of records moved from middle node */
1304 middle_moved_nrec = middle_nrec_move;
1305
1306 /* Copy record from parent node to proper location in left node */
1307 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec), H5B2_INT_NREC(internal, hdr, idx - 1), hdr->cls->nrec_size);
1308
1309 /* Copy records from middle node to left node */
1310 HDmemcpy(H5B2_NAT_NREC(left_native, hdr, *left_nrec + 1), H5B2_NAT_NREC(middle_native, hdr, 0), hdr->cls->nrec_size * (middle_nrec_move - 1));
1311
1312 /* Copy record from middle node to proper location in parent node */
1313 HDmemcpy(H5B2_INT_NREC(internal, hdr, idx - 1), H5B2_NAT_NREC(middle_native, hdr, (middle_nrec_move - 1)), hdr->cls->nrec_size);
1314
1315 /* Slide records in middle node down */
1316 HDmemmove(H5B2_NAT_NREC(middle_native, hdr, 0), H5B2_NAT_NREC(middle_native, hdr, middle_nrec_move), hdr->cls->nrec_size * (*middle_nrec - middle_nrec_move));
1317
1318 /* Move node pointers also if this is an internal node */
1319 if(depth > 1) {
1320 unsigned u; /* Local index variable */
1321
1322 /* Copy node pointers from middle node into left node */
1323 HDmemcpy(&(left_node_ptrs[*left_nrec + 1]), &(middle_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * middle_nrec_move);
1324
1325 /* Count the number of records being moved into the left node */
1326 for(u = 0; u < middle_nrec_move; u++)
1327 middle_moved_nrec += middle_node_ptrs[u].all_nrec;
1328
1329 /* Slide the node pointers in middle node down */
1330 HDmemmove(&(middle_node_ptrs[0]), &(middle_node_ptrs[middle_nrec_move]), sizeof(H5B2_node_ptr_t) * (size_t)((unsigned)(*middle_nrec + 1) - middle_nrec_move));
1331 } /* end if */
1332
1333 /* Update flush dependencies for grandchildren, if using SWMR */
1334 if(hdr->swmr_write && depth > 1)
1335 if(H5B2__update_child_flush_depends(hdr, depth, left_node_ptrs,
1336 (unsigned)(*left_nrec + 1), (unsigned)(*left_nrec + middle_nrec_move + 1), middle_child, left_child) < 0)
1337 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
1338
1339 /* Update # of records in left & middle nodes */
1340 *left_nrec = (uint16_t)(*left_nrec + middle_nrec_move);
1341 *middle_nrec = (uint16_t)(*middle_nrec - middle_nrec_move);
1342
1343 /* Mark nodes as dirty */
1344 left_child_flags |= H5AC__DIRTIED_FLAG;
1345 middle_child_flags |= H5AC__DIRTIED_FLAG;
1346 } /* end block */
1347
1348 /* Redistribute records into middle node */
1349 {
1350 /* Copy record from parent node to proper location in middle node */
1351 HDmemcpy(H5B2_NAT_NREC(middle_native, hdr, *middle_nrec), H5B2_INT_NREC(internal, hdr, idx), hdr->cls->nrec_size);
1352
1353 /* Copy records from right node to middle node */
1354 HDmemcpy(H5B2_NAT_NREC(middle_native, hdr, *middle_nrec + 1), H5B2_NAT_NREC(right_native, hdr, 0), hdr->cls->nrec_size * (*right_nrec));
1355
1356 /* Move node pointers also if this is an internal node */
1357 if(depth > 1)
1358 /* Copy node pointers from right node into middle node */
1359 HDmemcpy(&(middle_node_ptrs[*middle_nrec + 1]), &(right_node_ptrs[0]), sizeof(H5B2_node_ptr_t) * (size_t)(*right_nrec + 1));
1360
1361 /* Update flush dependencies for grandchildren, if using SWMR */
1362 if(hdr->swmr_write && depth > 1)
1363 if(H5B2__update_child_flush_depends(hdr, depth, middle_node_ptrs,
1364 (unsigned)(*middle_nrec + 1), (unsigned)(*middle_nrec + *right_nrec + 2), right_child, middle_child) < 0)
1365 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child nodes to new parent")
1366
1367 /* Update # of records in middle node */
1368 *middle_nrec = (uint16_t)(*middle_nrec + (*right_nrec + 1));
1369
1370 /* Mark nodes as dirty */
1371 middle_child_flags |= H5AC__DIRTIED_FLAG;
1372 right_child_flags |= H5AC__DELETED_FLAG;
1373 if(!(hdr->swmr_write))
1374 right_child_flags |= H5AC__DIRTIED_FLAG | H5AC__FREE_FILE_SPACE_FLAG;
1375 } /* end block */
1376
1377 /* Update # of records in child nodes */
1378 internal->node_ptrs[idx - 1].node_nrec = *left_nrec;
1379 internal->node_ptrs[idx].node_nrec = *middle_nrec;
1380
1381 /* Update total # of records in child B-trees */
1382 internal->node_ptrs[idx - 1].all_nrec += middle_moved_nrec;
1383 internal->node_ptrs[idx].all_nrec += (internal->node_ptrs[idx + 1].all_nrec + 1) - middle_moved_nrec;
1384
1385 /* Slide records in parent node down, to eliminate demoted record */
1386 if((idx + 1) < internal->nrec) {
1387 HDmemmove(H5B2_INT_NREC(internal, hdr, idx), H5B2_INT_NREC(internal, hdr, idx + 1), hdr->cls->nrec_size * (internal->nrec - (idx + 1)));
1388 HDmemmove(&(internal->node_ptrs[idx + 1]), &(internal->node_ptrs[idx + 2]), sizeof(H5B2_node_ptr_t) * (internal->nrec - (idx + 1)));
1389 } /* end if */
1390
1391 /* Update # of records in parent node */
1392 internal->nrec--;
1393
1394 /* Mark parent as dirty */
1395 *internal_flags_ptr |= H5AC__DIRTIED_FLAG;
1396
1397 /* Update grandparent info */
1398 curr_node_ptr->node_nrec--;
1399
1400 /* Mark grandparent as dirty, if given */
1401 if(parent_cache_info_flags_ptr)
1402 *parent_cache_info_flags_ptr |= H5AC__DIRTIED_FLAG;
1403
1404 #ifdef H5B2_DEBUG
1405 H5B2__assert_internal((hsize_t)0, hdr, internal);
1406 if(depth > 1) {
1407 H5B2__assert_internal2(internal->node_ptrs[idx - 1].all_nrec, hdr, (H5B2_internal_t *)left_child, (H5B2_internal_t *)middle_child);
1408 H5B2__assert_internal(internal->node_ptrs[idx].all_nrec, hdr, (H5B2_internal_t *)middle_child);
1409 } /* end if */
1410 else {
1411 H5B2__assert_leaf2(hdr, (H5B2_leaf_t *)left_child, (H5B2_leaf_t *)middle_child);
1412 H5B2__assert_leaf(hdr, (H5B2_leaf_t *)middle_child);
1413 } /* end else */
1414 #endif /* H5B2_DEBUG */
1415
1416 done:
1417 /* Unlock left & middle nodes (marked as dirty) */
1418 if(left_child && H5AC_unprotect(hdr->f, child_class, left_addr, left_child, left_child_flags) < 0)
1419 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1420 if(middle_child && H5AC_unprotect(hdr->f, child_class, middle_addr, middle_child, middle_child_flags) < 0)
1421 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1422
1423 /* Delete right node & remove from cache (marked as dirty) */
1424 if(right_child && H5AC_unprotect(hdr->f, child_class, right_addr, right_child, right_child_flags) < 0)
1425 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree child node")
1426
1427 FUNC_LEAVE_NOAPI(ret_value)
1428 } /* end H5B2__merge3() */
1429
1430
1431 /*-------------------------------------------------------------------------
1432 * Function: H5B2__insert
1433 *
1434 * Purpose: Adds a new record to the B-tree.
1435 *
1436 * Return: Non-negative on success/Negative on failure
1437 *
1438 * Programmer: Quincey Koziol
1439 * koziol@hdfgroup.org
1440 * Dec 23 2015
1441 *
1442 *-------------------------------------------------------------------------
1443 */
1444 herr_t
H5B2__insert(H5B2_hdr_t * hdr,void * udata)1445 H5B2__insert(H5B2_hdr_t *hdr, void *udata)
1446 {
1447 herr_t ret_value = SUCCEED; /* Return value */
1448
1449 FUNC_ENTER_PACKAGE
1450
1451 /* Check arguments. */
1452 HDassert(hdr);
1453 HDassert(udata);
1454
1455 /* Check if the root node is allocated yet */
1456 if(!H5F_addr_defined(hdr->root.addr)) {
1457 /* Create root node as leaf node in B-tree */
1458 if(H5B2__create_leaf(hdr, hdr, &(hdr->root)) < 0)
1459 HGOTO_ERROR(H5E_BTREE, H5E_CANTINIT, FAIL, "unable to create root node")
1460 } /* end if */
1461 /* Check if we need to split the root node (equiv. to a 1->2 node split) */
1462 else if(hdr->root.node_nrec == hdr->node_info[hdr->depth].split_nrec) {
1463 /* Split root node */
1464 if(H5B2__split_root(hdr) < 0)
1465 HGOTO_ERROR(H5E_BTREE, H5E_CANTSPLIT, FAIL, "unable to split root node")
1466 } /* end if */
1467
1468 /* Attempt to insert record into B-tree */
1469 if(hdr->depth > 0) {
1470 if(H5B2__insert_internal(hdr, hdr->depth, NULL, &hdr->root, H5B2_POS_ROOT, hdr, udata) < 0)
1471 HGOTO_ERROR(H5E_BTREE, H5E_CANTINSERT, FAIL, "unable to insert record into B-tree internal node")
1472 } /* end if */
1473 else {
1474 if(H5B2__insert_leaf(hdr, &hdr->root, H5B2_POS_ROOT, hdr, udata) < 0)
1475 HGOTO_ERROR(H5E_BTREE, H5E_CANTINSERT, FAIL, "unable to insert record into B-tree leaf node")
1476 } /* end else */
1477
1478 /* Mark B-tree header as dirty */
1479 if(H5B2__hdr_dirty(hdr) < 0)
1480 HGOTO_ERROR(H5E_BTREE, H5E_CANTMARKDIRTY, FAIL, "unable to mark B-tree header dirty")
1481
1482 done:
1483 FUNC_LEAVE_NOAPI(ret_value)
1484 } /* H5B2__insert() */
1485
1486
1487 /*-------------------------------------------------------------------------
1488 * Function: H5B2__iterate_node
1489 *
1490 * Purpose: Iterate over all the records from a B-tree node, in "in-order"
1491 * order, making a callback for each record.
1492 *
1493 * If the callback returns non-zero, the iteration breaks out
1494 * without finishing all the records.
1495 *
1496 * Return: Value from callback, non-negative on success, negative on error
1497 *
1498 * Programmer: Quincey Koziol
1499 * koziol@ncsa.uiuc.edu
1500 * Feb 11 2005
1501 *
1502 *-------------------------------------------------------------------------
1503 */
1504 herr_t
H5B2__iterate_node(H5B2_hdr_t * hdr,uint16_t depth,const H5B2_node_ptr_t * curr_node,void * parent,H5B2_operator_t op,void * op_data)1505 H5B2__iterate_node(H5B2_hdr_t *hdr, uint16_t depth, const H5B2_node_ptr_t *curr_node,
1506 void *parent, H5B2_operator_t op, void *op_data)
1507 {
1508 const H5AC_class_t *curr_node_class = NULL; /* Pointer to current node's class info */
1509 void *node = NULL; /* Pointers to current node */
1510 uint8_t *node_native; /* Pointers to node's native records */
1511 uint8_t *native = NULL; /* Pointers to copy of node's native records */
1512 H5B2_node_ptr_t *node_ptrs = NULL; /* Pointers to node's node pointers */
1513 hbool_t node_pinned = FALSE; /* Whether node is pinned */
1514 unsigned u; /* Local index */
1515 herr_t ret_value = H5_ITER_CONT; /* Iterator return value */
1516
1517 FUNC_ENTER_PACKAGE
1518
1519 /* Check arguments. */
1520 HDassert(hdr);
1521 HDassert(curr_node);
1522 HDassert(op);
1523
1524 /* Protect current node & set up variables */
1525 if(depth > 0) {
1526 H5B2_internal_t *internal; /* Pointer to internal node */
1527
1528 /* Lock the current B-tree node */
1529 if(NULL == (internal = H5B2__protect_internal(hdr, parent, (H5B2_node_ptr_t *)curr_node, depth, FALSE, H5AC__READ_ONLY_FLAG))) /* Casting away const OK -QAK */
1530 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1531
1532 /* Set up information about current node */
1533 curr_node_class = H5AC_BT2_INT;
1534 node = internal;
1535 node_native = internal->int_native;
1536
1537 /* Allocate space for the node pointers in memory */
1538 if(NULL == (node_ptrs = (H5B2_node_ptr_t *)H5FL_FAC_MALLOC(hdr->node_info[depth].node_ptr_fac)))
1539 HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for B-tree internal node pointers")
1540
1541 /* Copy the node pointers */
1542 HDmemcpy(node_ptrs, internal->node_ptrs, (sizeof(H5B2_node_ptr_t) * (size_t)(curr_node->node_nrec + 1)));
1543 } /* end if */
1544 else {
1545 H5B2_leaf_t *leaf; /* Pointer to leaf node */
1546
1547 /* Lock the current B-tree node */
1548 if(NULL == (leaf = H5B2__protect_leaf(hdr, parent, (H5B2_node_ptr_t *)curr_node, FALSE, H5AC__READ_ONLY_FLAG))) /* Casting away const OK -QAK */
1549 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1550
1551 /* Set up information about current node */
1552 curr_node_class = H5AC_BT2_LEAF;
1553 node = leaf;
1554 node_native = leaf->leaf_native;
1555 } /* end else */
1556
1557 /* Allocate space for the native keys in memory */
1558 if(NULL == (native = (uint8_t *)H5FL_FAC_MALLOC(hdr->node_info[depth].nat_rec_fac)))
1559 HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for B-tree internal native keys")
1560
1561 /* Copy the native keys */
1562 HDmemcpy(native, node_native, (hdr->cls->nrec_size * curr_node->node_nrec));
1563
1564 /* Unlock the node */
1565 if(H5AC_unprotect(hdr->f, curr_node_class, curr_node->addr, node, (unsigned)(hdr->swmr_write ? H5AC__PIN_ENTRY_FLAG : H5AC__NO_FLAGS_SET)) < 0)
1566 HGOTO_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node")
1567 if(hdr->swmr_write)
1568 node_pinned = TRUE;
1569 else
1570 node = NULL;
1571
1572 /* Iterate through records, in order */
1573 for(u = 0; u < curr_node->node_nrec && !ret_value; u++) {
1574 /* Descend into child node, if current node is an internal node */
1575 if(depth > 0) {
1576 if((ret_value = H5B2__iterate_node(hdr, (uint16_t)(depth - 1), &(node_ptrs[u]), node, op, op_data)) < 0)
1577 HERROR(H5E_BTREE, H5E_CANTLIST, "node iteration failed");
1578 } /* end if */
1579
1580 /* Make callback for current record */
1581 if(!ret_value) {
1582 if((ret_value = (op)(H5B2_NAT_NREC(native, hdr, u), op_data)) < 0)
1583 HERROR(H5E_BTREE, H5E_CANTLIST, "iterator function failed");
1584 } /* end if */
1585 } /* end for */
1586
1587 /* Descend into last child node, if current node is an internal node */
1588 if(!ret_value && depth > 0) {
1589 if((ret_value = H5B2__iterate_node(hdr, (uint16_t)(depth - 1), &(node_ptrs[u]), node, op, op_data)) < 0)
1590 HERROR(H5E_BTREE, H5E_CANTLIST, "node iteration failed");
1591 } /* end if */
1592
1593 done:
1594 /* Unpin the node if it was pinned */
1595 if(node_pinned && H5AC_unpin_entry(node) < 0)
1596 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPIN, FAIL, "can't unpin node")
1597
1598 /* Release the node pointers & native records, if they were copied */
1599 if(node_ptrs)
1600 node_ptrs = (H5B2_node_ptr_t *)H5FL_FAC_FREE(hdr->node_info[depth].node_ptr_fac, node_ptrs);
1601 if(native)
1602 native = (uint8_t *)H5FL_FAC_FREE(hdr->node_info[depth].nat_rec_fac, native);
1603
1604 FUNC_LEAVE_NOAPI(ret_value)
1605 } /* H5B2__iterate_node() */
1606
1607
1608 /*-------------------------------------------------------------------------
1609 * Function: H5B2__delete_node
1610 *
1611 * Purpose: Iterate over all the nodes in a B-tree node deleting them
1612 * after they no longer have any children
1613 *
1614 * Return: Value from callback, non-negative on success, negative on error
1615 *
1616 * Programmer: Quincey Koziol
1617 * koziol@ncsa.uiuc.edu
1618 * Mar 9 2005
1619 *
1620 *-------------------------------------------------------------------------
1621 */
1622 herr_t
H5B2__delete_node(H5B2_hdr_t * hdr,uint16_t depth,const H5B2_node_ptr_t * curr_node,void * parent,H5B2_remove_t op,void * op_data)1623 H5B2__delete_node(H5B2_hdr_t *hdr, uint16_t depth, const H5B2_node_ptr_t *curr_node,
1624 void *parent, H5B2_remove_t op, void *op_data)
1625 {
1626 const H5AC_class_t *curr_node_class = NULL; /* Pointer to current node's class info */
1627 void *node = NULL; /* Pointers to current node */
1628 uint8_t *native; /* Pointers to node's native records */
1629 herr_t ret_value = SUCCEED; /* Return value */
1630
1631 FUNC_ENTER_PACKAGE
1632
1633 /* Check arguments. */
1634 HDassert(hdr);
1635 HDassert(curr_node);
1636
1637 if(depth > 0) {
1638 H5B2_internal_t *internal; /* Pointer to internal node */
1639 unsigned u; /* Local index */
1640
1641 /* Lock the current B-tree node */
1642 if(NULL == (internal = H5B2__protect_internal(hdr, parent, (H5B2_node_ptr_t *)curr_node, depth, FALSE, H5AC__NO_FLAGS_SET))) /* Casting away const OK -QAK */
1643 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1644
1645 /* Set up information about current node */
1646 curr_node_class = H5AC_BT2_INT;
1647 node = internal;
1648 native = internal->int_native;
1649
1650 /* Descend into children */
1651 for(u = 0; u < internal->nrec + (unsigned)1; u++)
1652 if(H5B2__delete_node(hdr, (uint16_t)(depth - 1), &(internal->node_ptrs[u]), internal, op, op_data) < 0)
1653 HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "node descent failed")
1654 } /* end if */
1655 else {
1656 H5B2_leaf_t *leaf; /* Pointer to leaf node */
1657
1658 /* Lock the current B-tree node */
1659 if(NULL == (leaf = H5B2__protect_leaf(hdr, parent, (H5B2_node_ptr_t *)curr_node, FALSE, H5AC__NO_FLAGS_SET))) /* Casting away const OK -QAK */
1660 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1661
1662 /* Set up information about current node */
1663 curr_node_class = H5AC_BT2_LEAF;
1664 node = leaf;
1665 native = leaf->leaf_native;
1666 } /* end else */
1667
1668 /* If there's a callback defined, iterate over the records in this node */
1669 if(op) {
1670 unsigned u; /* Local index */
1671
1672 /* Iterate through records in this node */
1673 for(u = 0; u < curr_node->node_nrec; u++) {
1674 /* Make callback for each record */
1675 if((op)(H5B2_NAT_NREC(native, hdr, u), op_data) < 0)
1676 HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "iterator function failed")
1677 } /* end for */
1678 } /* end if */
1679
1680 done:
1681 /* Unlock & delete current node */
1682 if(node && H5AC_unprotect(hdr->f, curr_node_class, curr_node->addr, node, (unsigned)(H5AC__DELETED_FLAG | (hdr->swmr_write ? 0 : H5AC__FREE_FILE_SPACE_FLAG))) < 0)
1683 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node")
1684
1685 FUNC_LEAVE_NOAPI(ret_value)
1686 } /* H5B2__delete_node() */
1687
1688
1689 /*-------------------------------------------------------------------------
1690 * Function: H5B2__node_size
1691 *
1692 * Purpose: Iterate over all the records from a B-tree node, collecting
1693 * btree storage info.
1694 *
1695 * Return: non-negative on success, negative on error
1696 *
1697 * Programmer: Vailin Choi
1698 * July 12 2007
1699 *
1700 *-------------------------------------------------------------------------
1701 */
1702 herr_t
H5B2__node_size(H5B2_hdr_t * hdr,uint16_t depth,const H5B2_node_ptr_t * curr_node,void * parent,hsize_t * btree_size)1703 H5B2__node_size(H5B2_hdr_t *hdr, uint16_t depth, const H5B2_node_ptr_t *curr_node,
1704 void *parent, hsize_t *btree_size)
1705 {
1706 H5B2_internal_t *internal = NULL; /* Pointer to internal node */
1707 herr_t ret_value = SUCCEED; /* Iterator return value */
1708
1709 FUNC_ENTER_PACKAGE
1710
1711 /* Check arguments. */
1712 HDassert(hdr);
1713 HDassert(curr_node);
1714 HDassert(btree_size);
1715 HDassert(depth > 0);
1716
1717 /* Lock the current B-tree node */
1718 if(NULL == (internal = H5B2__protect_internal(hdr, parent, (H5B2_node_ptr_t *)curr_node, depth, FALSE, H5AC__READ_ONLY_FLAG))) /* Casting away const OK -QAK */
1719 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1720
1721 /* Recursively descend into child nodes, if we are above the "twig" level in the B-tree */
1722 if(depth > 1) {
1723 unsigned u; /* Local index */
1724
1725 /* Descend into children */
1726 for(u = 0; u < internal->nrec + (unsigned)1; u++)
1727 if(H5B2__node_size(hdr, (uint16_t)(depth - 1), &(internal->node_ptrs[u]), internal, btree_size) < 0)
1728 HGOTO_ERROR(H5E_BTREE, H5E_CANTLIST, FAIL, "node iteration failed")
1729 } /* end if */
1730 else /* depth is 1: count all the leaf nodes from this node */
1731 *btree_size += (hsize_t)(internal->nrec + 1) * hdr->node_size;
1732
1733 /* Count this node */
1734 *btree_size += hdr->node_size;
1735
1736 done:
1737 if(internal && H5AC_unprotect(hdr->f, H5AC_BT2_INT, curr_node->addr, internal, H5AC__NO_FLAGS_SET) < 0)
1738 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node")
1739
1740 FUNC_LEAVE_NOAPI(ret_value)
1741 } /* H5B2__node_size() */
1742
1743
1744 /*-------------------------------------------------------------------------
1745 * Function: H5B2__create_flush_depend
1746 *
1747 * Purpose: Create a flush dependency between two data structure components
1748 *
1749 * Return: SUCCEED/FAIL
1750 *
1751 * Programmer: Dana Robinson
1752 * Fall 2012
1753 *
1754 *-------------------------------------------------------------------------
1755 */
1756 herr_t
H5B2__create_flush_depend(H5AC_info_t * parent_entry,H5AC_info_t * child_entry)1757 H5B2__create_flush_depend(H5AC_info_t *parent_entry, H5AC_info_t *child_entry)
1758 {
1759 herr_t ret_value = SUCCEED; /* Return value */
1760
1761 FUNC_ENTER_NOAPI_NOINIT
1762
1763 /* Sanity check */
1764 HDassert(parent_entry);
1765 HDassert(child_entry);
1766
1767 /* Create a flush dependency between parent and child entry */
1768 if(H5AC_create_flush_dependency(parent_entry, child_entry) < 0)
1769 HGOTO_ERROR(H5E_BTREE, H5E_CANTDEPEND, FAIL, "unable to create flush dependency")
1770
1771 done:
1772 FUNC_LEAVE_NOAPI(ret_value)
1773 } /* end H5B2__create_flush_depend() */
1774
1775
1776 /*-------------------------------------------------------------------------
1777 * Function: H5B2__update_flush_depend
1778 *
1779 * Purpose: Update flush dependencies for children of a node
1780 *
1781 * Return: SUCCEED/FAIL
1782 *
1783 * Programmer: Quincey Koziol
1784 * koziol@lbl.gov
1785 * Dec 1 2016
1786 *
1787 *-------------------------------------------------------------------------
1788 */
1789 herr_t
H5B2__update_flush_depend(H5B2_hdr_t * hdr,unsigned depth,const H5B2_node_ptr_t * node_ptr,void * old_parent,void * new_parent)1790 H5B2__update_flush_depend(H5B2_hdr_t *hdr, unsigned depth,
1791 const H5B2_node_ptr_t *node_ptr, void *old_parent, void *new_parent)
1792 {
1793 const H5AC_class_t *child_class; /* Pointer to child node's class info */
1794 void *child = NULL; /* Pointer to child node */
1795 unsigned node_status = 0; /* Node's status in the metadata cache */
1796 herr_t ret_value = SUCCEED; /* Return value */
1797
1798 FUNC_ENTER_PACKAGE
1799
1800 /* Sanity checks */
1801 HDassert(hdr);
1802 HDassert(depth > 0);
1803 HDassert(node_ptr);
1804 HDassert(old_parent);
1805 HDassert(new_parent);
1806
1807 /* Check the node's entry status in the metadata cache */
1808 if(H5AC_get_entry_status(hdr->f, node_ptr->addr, &node_status) < 0)
1809 HGOTO_ERROR(H5E_BTREE, H5E_CANTGET, FAIL, "unable to check status of B-tree node")
1810
1811 /* If the node is in the cache, check for retargeting its parent */
1812 if(node_status & H5AC_ES__IN_CACHE) {
1813 void **parent_ptr; /* Pointer to child node's parent */
1814 hbool_t update_deps = FALSE; /* Whether to update flush dependencies */
1815
1816 /* Get child node pointer */
1817 if(depth > 1) {
1818 H5B2_internal_t *child_int;
1819
1820 /* Protect child */
1821 if(NULL == (child_int = H5B2__protect_internal(hdr, new_parent, (H5B2_node_ptr_t *)node_ptr, (uint16_t)(depth - 1), FALSE, H5AC__NO_FLAGS_SET))) /* Casting away const OK -QAK */
1822 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree internal node")
1823 child_class = H5AC_BT2_INT;
1824 child = child_int;
1825
1826 if(child_int->parent == old_parent) {
1827 parent_ptr = &child_int->parent;
1828 update_deps = TRUE;
1829 } /* end if */
1830 else
1831 HDassert(child_int->parent == new_parent);
1832 } /* end if */
1833 else {
1834 H5B2_leaf_t *child_leaf;
1835
1836 /* Protect child */
1837 if(NULL == (child_leaf = H5B2__protect_leaf(hdr, new_parent, (H5B2_node_ptr_t *)node_ptr, FALSE, H5AC__NO_FLAGS_SET))) /* Casting away const OK -QAK */
1838 HGOTO_ERROR(H5E_BTREE, H5E_CANTPROTECT, FAIL, "unable to protect B-tree leaf node")
1839 child_class = H5AC_BT2_LEAF;
1840 child = child_leaf;
1841
1842 if(child_leaf->parent == old_parent) {
1843 parent_ptr = &child_leaf->parent;
1844 update_deps = TRUE;
1845 } /* end if */
1846 else
1847 HDassert(child_leaf->parent == new_parent);
1848 } /* end else */
1849
1850 /* Update flush dependencies if necessary */
1851 if(update_deps) {
1852 /* Sanity check */
1853 HDassert(parent_ptr);
1854
1855 /* Switch the flush dependency for the node */
1856 if(H5B2__destroy_flush_depend((H5AC_info_t *)old_parent, (H5AC_info_t *)child) < 0)
1857 HGOTO_ERROR(H5E_BTREE, H5E_CANTUNDEPEND, FAIL, "unable to destroy flush dependency")
1858 *parent_ptr = new_parent;
1859 if(H5B2__create_flush_depend((H5AC_info_t *)new_parent, (H5AC_info_t *)child) < 0)
1860 HGOTO_ERROR(H5E_BTREE, H5E_CANTDEPEND, FAIL, "unable to create flush dependency")
1861 } /* end if */
1862 } /* end if */
1863
1864 done:
1865 /* Unprotect the child */
1866 if(child)
1867 if(H5AC_unprotect(hdr->f, child_class, node_ptr->addr, child, H5AC__NO_FLAGS_SET) < 0)
1868 HDONE_ERROR(H5E_BTREE, H5E_CANTUNPROTECT, FAIL, "unable to release B-tree node")
1869
1870 FUNC_LEAVE_NOAPI(ret_value)
1871 } /* end H5B2__update_flush_depend() */
1872
1873
1874 /*-------------------------------------------------------------------------
1875 * Function: H5B2__update_child_flush_depends
1876 *
1877 * Purpose: Update flush dependencies for children of a node
1878 *
1879 * Return: SUCCEED/FAIL
1880 *
1881 * Programmer: Quincey Koziol
1882 * koziol@lbl.gov
1883 * Dec 1 2016
1884 *
1885 *-------------------------------------------------------------------------
1886 */
1887 static herr_t
H5B2__update_child_flush_depends(H5B2_hdr_t * hdr,unsigned depth,const H5B2_node_ptr_t * node_ptrs,unsigned start_idx,unsigned end_idx,void * old_parent,void * new_parent)1888 H5B2__update_child_flush_depends(H5B2_hdr_t *hdr, unsigned depth,
1889 const H5B2_node_ptr_t *node_ptrs, unsigned start_idx, unsigned end_idx,
1890 void *old_parent, void *new_parent)
1891 {
1892 unsigned u; /* Local index variable */
1893 herr_t ret_value = SUCCEED; /* Return value */
1894
1895 FUNC_ENTER_STATIC
1896
1897 /* Sanity checks */
1898 HDassert(hdr);
1899 HDassert(depth > 1);
1900 HDassert(node_ptrs);
1901 HDassert(start_idx <= end_idx);
1902 HDassert(old_parent);
1903 HDassert(new_parent);
1904
1905 /* Loop over children */
1906 for(u = start_idx; u < end_idx; u++)
1907 /* Update parent for children */
1908 if(H5B2__update_flush_depend(hdr, depth - 1, &node_ptrs[u], old_parent, new_parent) < 0)
1909 HGOTO_ERROR(H5E_BTREE, H5E_CANTUPDATE, FAIL, "unable to update child node to new parent")
1910
1911 done:
1912 FUNC_LEAVE_NOAPI(ret_value)
1913 } /* end H5B2__update_child_flush_depends() */
1914
1915
1916 /*-------------------------------------------------------------------------
1917 * Function: H5B2__destroy_flush_depend
1918 *
1919 * Purpose: Destroy a flush dependency between two data structure components
1920 *
1921 * Return: SUCCEED/FAIL
1922 *
1923 * Programmer: Dana Robinson
1924 * Fall 2012
1925 *
1926 *-------------------------------------------------------------------------
1927 */
1928 herr_t
H5B2__destroy_flush_depend(H5AC_info_t * parent_entry,H5AC_info_t * child_entry)1929 H5B2__destroy_flush_depend(H5AC_info_t *parent_entry, H5AC_info_t *child_entry)
1930 {
1931 herr_t ret_value = SUCCEED; /* Return value */
1932
1933 FUNC_ENTER_NOAPI_NOINIT
1934
1935 /* Sanity check */
1936 HDassert(parent_entry);
1937 HDassert(child_entry);
1938
1939 /* Destroy a flush dependency between parent and child entry */
1940 if(H5AC_destroy_flush_dependency(parent_entry, child_entry) < 0)
1941 HGOTO_ERROR(H5E_BTREE, H5E_CANTUNDEPEND, FAIL, "unable to destroy flush dependency")
1942
1943 done:
1944 FUNC_LEAVE_NOAPI(ret_value)
1945 } /* end H5B2__destroy_flush_depend() */
1946
1947