xref: /dragonfly/contrib/gdb-7/gdb/common/vec.h (revision d8082429)
1 /* Vector API for GDB.
2    Copyright (C) 2004-2013 Free Software Foundation, Inc.
3    Contributed by Nathan Sidwell <nathan@codesourcery.com>
4 
5    This file is part of GDB.
6 
7    This program is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11 
12    This program is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16 
17    You should have received a copy of the GNU General Public License
18    along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
19 
20 #if !defined (GDB_VEC_H)
21 #define GDB_VEC_H
22 
23 #include <stddef.h>
24 
25 #include "gdb_string.h"
26 #include "gdb_assert.h"
27 
28 /* The macros here implement a set of templated vector types and
29    associated interfaces.  These templates are implemented with
30    macros, as we're not in C++ land.  The interface functions are
31    typesafe and use static inline functions, sometimes backed by
32    out-of-line generic functions.
33 
34    Because of the different behavior of structure objects, scalar
35    objects and of pointers, there are three flavors, one for each of
36    these variants.  Both the structure object and pointer variants
37    pass pointers to objects around -- in the former case the pointers
38    are stored into the vector and in the latter case the pointers are
39    dereferenced and the objects copied into the vector.  The scalar
40    object variant is suitable for int-like objects, and the vector
41    elements are returned by value.
42 
43    There are both 'index' and 'iterate' accessors.  The iterator
44    returns a boolean iteration condition and updates the iteration
45    variable passed by reference.  Because the iterator will be
46    inlined, the address-of can be optimized away.
47 
48    The vectors are implemented using the trailing array idiom, thus
49    they are not resizeable without changing the address of the vector
50    object itself.  This means you cannot have variables or fields of
51    vector type -- always use a pointer to a vector.  The one exception
52    is the final field of a structure, which could be a vector type.
53    You will have to use the embedded_size & embedded_init calls to
54    create such objects, and they will probably not be resizeable (so
55    don't use the 'safe' allocation variants).  The trailing array
56    idiom is used (rather than a pointer to an array of data), because,
57    if we allow NULL to also represent an empty vector, empty vectors
58    occupy minimal space in the structure containing them.
59 
60    Each operation that increases the number of active elements is
61    available in 'quick' and 'safe' variants.  The former presumes that
62    there is sufficient allocated space for the operation to succeed
63    (it dies if there is not).  The latter will reallocate the
64    vector, if needed.  Reallocation causes an exponential increase in
65    vector size.  If you know you will be adding N elements, it would
66    be more efficient to use the reserve operation before adding the
67    elements with the 'quick' operation.  This will ensure there are at
68    least as many elements as you ask for, it will exponentially
69    increase if there are too few spare slots.  If you want reserve a
70    specific number of slots, but do not want the exponential increase
71    (for instance, you know this is the last allocation), use a
72    negative number for reservation.  You can also create a vector of a
73    specific size from the get go.
74 
75    You should prefer the push and pop operations, as they append and
76    remove from the end of the vector.  If you need to remove several
77    items in one go, use the truncate operation.  The insert and remove
78    operations allow you to change elements in the middle of the
79    vector.  There are two remove operations, one which preserves the
80    element ordering 'ordered_remove', and one which does not
81    'unordered_remove'.  The latter function copies the end element
82    into the removed slot, rather than invoke a memmove operation.  The
83    'lower_bound' function will determine where to place an item in the
84    array using insert that will maintain sorted order.
85 
86    If you need to directly manipulate a vector, then the 'address'
87    accessor will return the address of the start of the vector.  Also
88    the 'space' predicate will tell you whether there is spare capacity
89    in the vector.  You will not normally need to use these two functions.
90 
91    Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro.
92    Variables of vector type are declared using a VEC(TYPEDEF) macro.
93    The characters O, P and I indicate whether TYPEDEF is a pointer
94    (P), object (O) or integral (I) type.  Be careful to pick the
95    correct one, as you'll get an awkward and inefficient API if you
96    use the wrong one.  There is a check, which results in a
97    compile-time warning, for the P and I versions, but there is no
98    check for the O versions, as that is not possible in plain C.
99 
100    An example of their use would be,
101 
102    DEF_VEC_P(tree);   // non-managed tree vector.
103 
104    struct my_struct {
105      VEC(tree) *v;      // A (pointer to) a vector of tree pointers.
106    };
107 
108    struct my_struct *s;
109 
110    if (VEC_length(tree, s->v)) { we have some contents }
111    VEC_safe_push(tree, s->v, decl); // append some decl onto the end
112    for (ix = 0; VEC_iterate(tree, s->v, ix, elt); ix++)
113      { do something with elt }
114 
115 */
116 
117 /* Macros to invoke API calls.  A single macro works for both pointer
118    and object vectors, but the argument and return types might well be
119    different.  In each macro, T is the typedef of the vector elements.
120    Some of these macros pass the vector, V, by reference (by taking
121    its address), this is noted in the descriptions.  */
122 
123 /* Length of vector
124    unsigned VEC_T_length(const VEC(T) *v);
125 
126    Return the number of active elements in V.  V can be NULL, in which
127    case zero is returned.  */
128 
129 #define VEC_length(T,V)	(VEC_OP(T,length)(V))
130 
131 
132 /* Check if vector is empty
133    int VEC_T_empty(const VEC(T) *v);
134 
135    Return nonzero if V is an empty vector (or V is NULL), zero otherwise.  */
136 
137 #define VEC_empty(T,V)	(VEC_length (T,V) == 0)
138 
139 
140 /* Get the final element of the vector.
141    T VEC_T_last(VEC(T) *v); // Integer
142    T VEC_T_last(VEC(T) *v); // Pointer
143    T *VEC_T_last(VEC(T) *v); // Object
144 
145    Return the final element.  V must not be empty.  */
146 
147 #define VEC_last(T,V)	(VEC_OP(T,last)(V VEC_ASSERT_INFO))
148 
149 /* Index into vector
150    T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
151    T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
152    T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
153 
154    Return the IX'th element.  If IX must be in the domain of V.  */
155 
156 #define VEC_index(T,V,I) (VEC_OP(T,index)(V,I VEC_ASSERT_INFO))
157 
158 /* Iterate over vector
159    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
160    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
161    int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
162 
163    Return iteration condition and update PTR to point to the IX'th
164    element.  At the end of iteration, sets PTR to NULL.  Use this to
165    iterate over the elements of a vector as follows,
166 
167      for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
168        continue;  */
169 
170 #define VEC_iterate(T,V,I,P)	(VEC_OP(T,iterate)(V,I,&(P)))
171 
172 /* Allocate new vector.
173    VEC(T,A) *VEC_T_alloc(int reserve);
174 
175    Allocate a new vector with space for RESERVE objects.  If RESERVE
176    is zero, NO vector is created.  */
177 
178 #define VEC_alloc(T,N)	(VEC_OP(T,alloc)(N))
179 
180 /* Free a vector.
181    void VEC_T_free(VEC(T,A) *&);
182 
183    Free a vector and set it to NULL.  */
184 
185 #define VEC_free(T,V)	(VEC_OP(T,free)(&V))
186 
187 /* A cleanup function for a vector.
188    void VEC_T_cleanup(void *);
189 
190    Clean up a vector.  */
191 
192 #define VEC_cleanup(T)	(VEC_OP(T,cleanup))
193 
194 /* Use these to determine the required size and initialization of a
195    vector embedded within another structure (as the final member).
196 
197    size_t VEC_T_embedded_size(int reserve);
198    void VEC_T_embedded_init(VEC(T) *v, int reserve);
199 
200    These allow the caller to perform the memory allocation.  */
201 
202 #define VEC_embedded_size(T,N)	 (VEC_OP(T,embedded_size)(N))
203 #define VEC_embedded_init(T,O,N) (VEC_OP(T,embedded_init)(VEC_BASE(O),N))
204 
205 /* Copy a vector.
206    VEC(T,A) *VEC_T_copy(VEC(T) *);
207 
208    Copy the live elements of a vector into a new vector.  The new and
209    old vectors need not be allocated by the same mechanism.  */
210 
211 #define VEC_copy(T,V) (VEC_OP(T,copy)(V))
212 
213 /* Merge two vectors.
214    VEC(T,A) *VEC_T_merge(VEC(T) *, VEC(T) *);
215 
216    Copy the live elements of both vectors into a new vector.  The new
217    and old vectors need not be allocated by the same mechanism.  */
218 #define VEC_merge(T,V1,V2) (VEC_OP(T,merge)(V1, V2))
219 
220 /* Determine if a vector has additional capacity.
221 
222    int VEC_T_space (VEC(T) *v,int reserve)
223 
224    If V has space for RESERVE additional entries, return nonzero.  You
225    usually only need to use this if you are doing your own vector
226    reallocation, for instance on an embedded vector.  This returns
227    nonzero in exactly the same circumstances that VEC_T_reserve
228    will.  */
229 
230 #define VEC_space(T,V,R) (VEC_OP(T,space)(V,R VEC_ASSERT_INFO))
231 
232 /* Reserve space.
233    int VEC_T_reserve(VEC(T,A) *&v, int reserve);
234 
235    Ensure that V has at least abs(RESERVE) slots available.  The
236    signedness of RESERVE determines the reallocation behavior.  A
237    negative value will not create additional headroom beyond that
238    requested.  A positive value will create additional headroom.  Note
239    this can cause V to be reallocated.  Returns nonzero iff
240    reallocation actually occurred.  */
241 
242 #define VEC_reserve(T,V,R) (VEC_OP(T,reserve)(&(V),R VEC_ASSERT_INFO))
243 
244 /* Push object with no reallocation
245    T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
246    T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
247    T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
248 
249    Push a new element onto the end, returns a pointer to the slot
250    filled in.  For object vectors, the new value can be NULL, in which
251    case NO initialization is performed.  There must
252    be sufficient space in the vector.  */
253 
254 #define VEC_quick_push(T,V,O) (VEC_OP(T,quick_push)(V,O VEC_ASSERT_INFO))
255 
256 /* Push object with reallocation
257    T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Integer
258    T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Pointer
259    T *VEC_T_safe_push (VEC(T,A) *&v, T *obj); // Object
260 
261    Push a new element onto the end, returns a pointer to the slot
262    filled in.  For object vectors, the new value can be NULL, in which
263    case NO initialization is performed.  Reallocates V, if needed.  */
264 
265 #define VEC_safe_push(T,V,O) (VEC_OP(T,safe_push)(&(V),O VEC_ASSERT_INFO))
266 
267 /* Pop element off end
268    T VEC_T_pop (VEC(T) *v);		// Integer
269    T VEC_T_pop (VEC(T) *v);		// Pointer
270    void VEC_T_pop (VEC(T) *v);		// Object
271 
272    Pop the last element off the end.  Returns the element popped, for
273    pointer vectors.  */
274 
275 #define VEC_pop(T,V)	(VEC_OP(T,pop)(V VEC_ASSERT_INFO))
276 
277 /* Truncate to specific length
278    void VEC_T_truncate (VEC(T) *v, unsigned len);
279 
280    Set the length as specified.  The new length must be less than or
281    equal to the current length.  This is an O(1) operation.  */
282 
283 #define VEC_truncate(T,V,I)		\
284 	(VEC_OP(T,truncate)(V,I VEC_ASSERT_INFO))
285 
286 /* Grow to a specific length.
287    void VEC_T_safe_grow (VEC(T,A) *&v, int len);
288 
289    Grow the vector to a specific length.  The LEN must be as
290    long or longer than the current length.  The new elements are
291    uninitialized.  */
292 
293 #define VEC_safe_grow(T,V,I)		\
294 	(VEC_OP(T,safe_grow)(&(V),I VEC_ASSERT_INFO))
295 
296 /* Replace element
297    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
298    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
299    T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val);  // Object
300 
301    Replace the IXth element of V with a new value, VAL.  For pointer
302    vectors returns the original value.  For object vectors returns a
303    pointer to the new value.  For object vectors the new value can be
304    NULL, in which case no overwriting of the slot is actually
305    performed.  */
306 
307 #define VEC_replace(T,V,I,O) (VEC_OP(T,replace)(V,I,O VEC_ASSERT_INFO))
308 
309 /* Insert object with no reallocation
310    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
311    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
312    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
313 
314    Insert an element, VAL, at the IXth position of V.  Return a pointer
315    to the slot created.  For vectors of object, the new value can be
316    NULL, in which case no initialization of the inserted slot takes
317    place.  There must be sufficient space.  */
318 
319 #define VEC_quick_insert(T,V,I,O) \
320 	(VEC_OP(T,quick_insert)(V,I,O VEC_ASSERT_INFO))
321 
322 /* Insert object with reallocation
323    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
324    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
325    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
326 
327    Insert an element, VAL, at the IXth position of V.  Return a pointer
328    to the slot created.  For vectors of object, the new value can be
329    NULL, in which case no initialization of the inserted slot takes
330    place.  Reallocate V, if necessary.  */
331 
332 #define VEC_safe_insert(T,V,I,O)	\
333 	(VEC_OP(T,safe_insert)(&(V),I,O VEC_ASSERT_INFO))
334 
335 /* Remove element retaining order
336    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
337    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
338    void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
339 
340    Remove an element from the IXth position of V.  Ordering of
341    remaining elements is preserved.  For pointer vectors returns the
342    removed object.  This is an O(N) operation due to a memmove.  */
343 
344 #define VEC_ordered_remove(T,V,I)	\
345 	(VEC_OP(T,ordered_remove)(V,I VEC_ASSERT_INFO))
346 
347 /* Remove element destroying order
348    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
349    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
350    void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
351 
352    Remove an element from the IXth position of V.  Ordering of
353    remaining elements is destroyed.  For pointer vectors returns the
354    removed object.  This is an O(1) operation.  */
355 
356 #define VEC_unordered_remove(T,V,I)	\
357 	(VEC_OP(T,unordered_remove)(V,I VEC_ASSERT_INFO))
358 
359 /* Remove a block of elements
360    void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
361 
362    Remove LEN elements starting at the IXth.  Ordering is retained.
363    This is an O(N) operation due to memmove.  */
364 
365 #define VEC_block_remove(T,V,I,L)	\
366 	(VEC_OP(T,block_remove)(V,I,L VEC_ASSERT_INFO))
367 
368 /* Get the address of the array of elements
369    T *VEC_T_address (VEC(T) v)
370 
371    If you need to directly manipulate the array (for instance, you
372    want to feed it to qsort), use this accessor.  */
373 
374 #define VEC_address(T,V)		(VEC_OP(T,address)(V))
375 
376 /* Find the first index in the vector not less than the object.
377    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
378                                int (*lessthan) (const T, const T)); // Integer
379    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
380                                int (*lessthan) (const T, const T)); // Pointer
381    unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
382                                int (*lessthan) (const T*, const T*)); // Object
383 
384    Find the first position in which VAL could be inserted without
385    changing the ordering of V.  LESSTHAN is a function that returns
386    true if the first argument is strictly less than the second.  */
387 
388 #define VEC_lower_bound(T,V,O,LT)    \
389        (VEC_OP(T,lower_bound)(V,O,LT VEC_ASSERT_INFO))
390 
391 /* Reallocate an array of elements with prefix.  */
392 extern void *vec_p_reserve (void *, int);
393 extern void *vec_o_reserve (void *, int, size_t, size_t);
394 #define vec_free_(V) xfree (V)
395 
396 #define VEC_ASSERT_INFO ,__FILE__,__LINE__
397 #define VEC_ASSERT_DECL ,const char *file_,unsigned line_
398 #define VEC_ASSERT_PASS ,file_,line_
399 #define vec_assert(expr, op) \
400   ((void)((expr) ? 0 : (gdb_assert_fail (op, file_, line_, \
401 					 ASSERT_FUNCTION), 0)))
402 
403 #define VEC(T) VEC_##T
404 #define VEC_OP(T,OP) VEC_##T##_##OP
405 
406 #define VEC_T(T)							  \
407 typedef struct VEC(T)							  \
408 {									  \
409   unsigned num;								  \
410   unsigned alloc;							  \
411   T vec[1];								  \
412 } VEC(T)
413 
414 /* Vector of integer-like object.  */
415 #define DEF_VEC_I(T)							  \
416 static inline void VEC_OP (T,must_be_integral_type) (void)		  \
417 {									  \
418   (void)~(T)0;								  \
419 }									  \
420 									  \
421 VEC_T(T);								  \
422 DEF_VEC_FUNC_P(T)							  \
423 DEF_VEC_ALLOC_FUNC_I(T)							  \
424 struct vec_swallow_trailing_semi
425 
426 /* Vector of pointer to object.  */
427 #define DEF_VEC_P(T)							  \
428 static inline void VEC_OP (T,must_be_pointer_type) (void)		  \
429 {									  \
430   (void)((T)1 == (void *)1);						  \
431 }									  \
432 									  \
433 VEC_T(T);								  \
434 DEF_VEC_FUNC_P(T)							  \
435 DEF_VEC_ALLOC_FUNC_P(T)							  \
436 struct vec_swallow_trailing_semi
437 
438 /* Vector of object.  */
439 #define DEF_VEC_O(T)							  \
440 VEC_T(T);								  \
441 DEF_VEC_FUNC_O(T)							  \
442 DEF_VEC_ALLOC_FUNC_O(T)							  \
443 struct vec_swallow_trailing_semi
444 
445 #define DEF_VEC_ALLOC_FUNC_I(T)						  \
446 static inline VEC(T) *VEC_OP (T,alloc)					  \
447      (int alloc_)							  \
448 {									  \
449   /* We must request exact size allocation, hence the negation.  */	  \
450   return (VEC(T) *) vec_o_reserve (NULL, -alloc_,			  \
451                                    offsetof (VEC(T),vec), sizeof (T));	  \
452 }									  \
453 									  \
454 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)			  \
455 {									  \
456   size_t len_ = vec_ ? vec_->num : 0;					  \
457   VEC (T) *new_vec_ = NULL;						  \
458 									  \
459   if (len_)								  \
460     {									  \
461       /* We must request exact size allocation, hence the negation.  */	  \
462       new_vec_ = (VEC (T) *)						  \
463 	vec_o_reserve (NULL, -len_, offsetof (VEC(T),vec), sizeof (T));	  \
464 									  \
465       new_vec_->num = len_;						  \
466       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);		  \
467     }									  \
468   return new_vec_;							  \
469 }									  \
470 									  \
471 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)	  \
472 {									  \
473   if (vec1_ && vec2_)							  \
474     {									  \
475       size_t len_ = vec1_->num + vec2_->num;				  \
476       VEC (T) *new_vec_ = NULL;						  \
477 									  \
478       /* We must request exact size allocation, hence the negation.  */	  \
479       new_vec_ = (VEC (T) *)						  \
480 	vec_o_reserve (NULL, -len_, offsetof (VEC(T),vec), sizeof (T));	  \
481 									  \
482       new_vec_->num = len_;						  \
483       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);	  \
484       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,			  \
485 	      sizeof (T) * vec2_->num);					  \
486 									  \
487       return new_vec_;							  \
488     }									  \
489   else									  \
490     return VEC_copy (T, vec1_ ? vec1_ : vec2_);				  \
491 }									  \
492 									  \
493 static inline void VEC_OP (T,free)					  \
494      (VEC(T) **vec_)							  \
495 {									  \
496   if (*vec_)								  \
497     vec_free_ (*vec_);							  \
498   *vec_ = NULL;								  \
499 }									  \
500 									  \
501 static inline void VEC_OP (T,cleanup)					  \
502      (void *arg_)							  \
503 {									  \
504   VEC(T) **vec_ = arg_;							  \
505   if (*vec_)								  \
506     vec_free_ (*vec_);							  \
507   *vec_ = NULL;								  \
508 }									  \
509 									  \
510 static inline int VEC_OP (T,reserve)					  \
511      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)			  \
512 {									  \
513   int extend = !VEC_OP (T,space)					  \
514 	(*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS);		  \
515 									  \
516   if (extend)								  \
517     *vec_ = (VEC(T) *) vec_o_reserve (*vec_, alloc_,			  \
518 				      offsetof (VEC(T),vec), sizeof (T)); \
519 									  \
520   return extend;							  \
521 }									  \
522 									  \
523 static inline void VEC_OP (T,safe_grow)					  \
524      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)				  \
525 {									  \
526   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
527 	"safe_grow");							  \
528   VEC_OP (T,reserve) (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_	  \
529 			VEC_ASSERT_PASS);				  \
530   (*vec_)->num = size_;							  \
531 }									  \
532 									  \
533 static inline T *VEC_OP (T,safe_push)					  \
534      (VEC(T) **vec_, const T obj_ VEC_ASSERT_DECL)			  \
535 {									  \
536   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);				  \
537 									  \
538   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);		  \
539 }									  \
540 									  \
541 static inline T *VEC_OP (T,safe_insert)					  \
542      (VEC(T) **vec_, unsigned ix_, const T obj_ VEC_ASSERT_DECL)	  \
543 {									  \
544   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);				  \
545 									  \
546   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);	  \
547 }
548 
549 #define DEF_VEC_FUNC_P(T)						  \
550 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_)		  \
551 {									  \
552   return vec_ ? vec_->num : 0;						  \
553 }									  \
554 									  \
555 static inline T VEC_OP (T,last)						  \
556 	(const VEC(T) *vec_ VEC_ASSERT_DECL)				  \
557 {									  \
558   vec_assert (vec_ && vec_->num, "last");				  \
559 									  \
560   return vec_->vec[vec_->num - 1];					  \
561 }									  \
562 									  \
563 static inline T VEC_OP (T,index)					  \
564      (const VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)			  \
565 {									  \
566   vec_assert (vec_ && ix_ < vec_->num, "index");			  \
567 									  \
568   return vec_->vec[ix_];						  \
569 }									  \
570 									  \
571 static inline int VEC_OP (T,iterate)					  \
572      (const VEC(T) *vec_, unsigned ix_, T *ptr)				  \
573 {									  \
574   if (vec_ && ix_ < vec_->num)						  \
575     {									  \
576       *ptr = vec_->vec[ix_];						  \
577       return 1;								  \
578     }									  \
579   else									  \
580     {									  \
581       *ptr = 0;								  \
582       return 0;								  \
583     }									  \
584 }									  \
585 									  \
586 static inline size_t VEC_OP (T,embedded_size)				  \
587      (int alloc_)							  \
588 {									  \
589   return offsetof (VEC(T),vec) + alloc_ * sizeof(T);			  \
590 }									  \
591 									  \
592 static inline void VEC_OP (T,embedded_init)				  \
593      (VEC(T) *vec_, int alloc_)						  \
594 {									  \
595   vec_->num = 0;							  \
596   vec_->alloc = alloc_;							  \
597 }									  \
598 									  \
599 static inline int VEC_OP (T,space)					  \
600      (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL)				  \
601 {									  \
602   vec_assert (alloc_ >= 0, "space");					  \
603   return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;	  \
604 }									  \
605 									  \
606 static inline T *VEC_OP (T,quick_push)					  \
607      (VEC(T) *vec_, T obj_ VEC_ASSERT_DECL)				  \
608 {									  \
609   T *slot_;								  \
610 									  \
611   vec_assert (vec_->num < vec_->alloc, "quick_push");			  \
612   slot_ = &vec_->vec[vec_->num++];					  \
613   *slot_ = obj_;							  \
614 									  \
615   return slot_;								  \
616 }									  \
617 									  \
618 static inline T VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL)		  \
619 {									  \
620   T obj_;								  \
621 									  \
622   vec_assert (vec_->num, "pop");					  \
623   obj_ = vec_->vec[--vec_->num];					  \
624 									  \
625   return obj_;								  \
626 }									  \
627 									  \
628 static inline void VEC_OP (T,truncate)					  \
629      (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL)			  \
630 {									  \
631   vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate");		  \
632   if (vec_)								  \
633     vec_->num = size_;							  \
634 }									  \
635 									  \
636 static inline T VEC_OP (T,replace)					  \
637      (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)		  \
638 {									  \
639   T old_obj_;								  \
640 									  \
641   vec_assert (ix_ < vec_->num, "replace");				  \
642   old_obj_ = vec_->vec[ix_];						  \
643   vec_->vec[ix_] = obj_;						  \
644 									  \
645   return old_obj_;							  \
646 }									  \
647 									  \
648 static inline T *VEC_OP (T,quick_insert)				  \
649      (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)		  \
650 {									  \
651   T *slot_;								  \
652 									  \
653   vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
654   slot_ = &vec_->vec[ix_];						  \
655   memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));		  \
656   *slot_ = obj_;							  \
657 									  \
658   return slot_;								  \
659 }									  \
660 									  \
661 static inline T VEC_OP (T,ordered_remove)				  \
662      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)			  \
663 {									  \
664   T *slot_;								  \
665   T obj_;								  \
666 									  \
667   vec_assert (ix_ < vec_->num, "ordered_remove");			  \
668   slot_ = &vec_->vec[ix_];						  \
669   obj_ = *slot_;							  \
670   memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));		  \
671 									  \
672   return obj_;								  \
673 }									  \
674 									  \
675 static inline T VEC_OP (T,unordered_remove)				  \
676      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)			  \
677 {									  \
678   T *slot_;								  \
679   T obj_;								  \
680 									  \
681   vec_assert (ix_ < vec_->num, "unordered_remove");			  \
682   slot_ = &vec_->vec[ix_];						  \
683   obj_ = *slot_;							  \
684   *slot_ = vec_->vec[--vec_->num];					  \
685 									  \
686   return obj_;								  \
687 }									  \
688 									  \
689 static inline void VEC_OP (T,block_remove)				  \
690      (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL)	  \
691 {									  \
692   T *slot_;								  \
693 									  \
694   vec_assert (ix_ + len_ <= vec_->num, "block_remove");			  \
695   slot_ = &vec_->vec[ix_];						  \
696   vec_->num -= len_;							  \
697   memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T));	  \
698 }									  \
699 									  \
700 static inline T *VEC_OP (T,address)					  \
701      (VEC(T) *vec_)							  \
702 {									  \
703   return vec_ ? vec_->vec : 0;						  \
704 }									  \
705 									  \
706 static inline unsigned VEC_OP (T,lower_bound)				  \
707      (VEC(T) *vec_, const T obj_,					  \
708       int (*lessthan_)(const T, const T) VEC_ASSERT_DECL)		  \
709 {									  \
710    unsigned int len_ = VEC_OP (T, length) (vec_);			  \
711    unsigned int half_, middle_;						  \
712    unsigned int first_ = 0;						  \
713    while (len_ > 0)							  \
714      {									  \
715         T middle_elem_;							  \
716         half_ = len_ >> 1;						  \
717         middle_ = first_;						  \
718         middle_ += half_;						  \
719         middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS);  \
720         if (lessthan_ (middle_elem_, obj_))				  \
721           {								  \
722              first_ = middle_;						  \
723              ++first_;							  \
724              len_ = len_ - half_ - 1;					  \
725           }								  \
726         else								  \
727           len_ = half_;							  \
728      }									  \
729    return first_;							  \
730 }
731 
732 #define DEF_VEC_ALLOC_FUNC_P(T)						  \
733 static inline VEC(T) *VEC_OP (T,alloc)					  \
734      (int alloc_)							  \
735 {									  \
736   /* We must request exact size allocation, hence the negation.  */	  \
737   return (VEC(T) *) vec_p_reserve (NULL, -alloc_);			  \
738 }									  \
739 									  \
740 static inline void VEC_OP (T,free)					  \
741      (VEC(T) **vec_)							  \
742 {									  \
743   if (*vec_)								  \
744     vec_free_ (*vec_);							  \
745   *vec_ = NULL;								  \
746 }									  \
747 									  \
748 static inline void VEC_OP (T,cleanup)					  \
749      (void *arg_)							  \
750 {									  \
751   VEC(T) **vec_ = arg_;							  \
752   if (*vec_)								  \
753     vec_free_ (*vec_);							  \
754   *vec_ = NULL;								  \
755 }									  \
756 									  \
757 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)			  \
758 {									  \
759   size_t len_ = vec_ ? vec_->num : 0;					  \
760   VEC (T) *new_vec_ = NULL;						  \
761 									  \
762   if (len_)								  \
763     {									  \
764       /* We must request exact size allocation, hence the negation.  */	  \
765       new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_));		  \
766 									  \
767       new_vec_->num = len_;						  \
768       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);		  \
769     }									  \
770   return new_vec_;							  \
771 }									  \
772 									  \
773 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)	  \
774 {									  \
775   if (vec1_ && vec2_)							  \
776     {									  \
777       size_t len_ = vec1_->num + vec2_->num;				  \
778       VEC (T) *new_vec_ = NULL;						  \
779 									  \
780       /* We must request exact size allocation, hence the negation.  */	  \
781       new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_));		  \
782 									  \
783       new_vec_->num = len_;						  \
784       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);	  \
785       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,			  \
786 	      sizeof (T) * vec2_->num);					  \
787 									  \
788       return new_vec_;							  \
789     }									  \
790   else									  \
791     return VEC_copy (T, vec1_ ? vec1_ : vec2_);				  \
792 }									  \
793 									  \
794 static inline int VEC_OP (T,reserve)					  \
795      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)			  \
796 {									  \
797   int extend = !VEC_OP (T,space)					  \
798 	(*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS);		  \
799 									  \
800   if (extend)								  \
801     *vec_ = (VEC(T) *) vec_p_reserve (*vec_, alloc_);			  \
802 									  \
803   return extend;							  \
804 }									  \
805 									  \
806 static inline void VEC_OP (T,safe_grow)					  \
807      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)				  \
808 {									  \
809   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
810 	"safe_grow");							  \
811   VEC_OP (T,reserve)							  \
812 	(vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS);  \
813   (*vec_)->num = size_;							  \
814 }									  \
815 									  \
816 static inline T *VEC_OP (T,safe_push)					  \
817      (VEC(T) **vec_, T obj_ VEC_ASSERT_DECL)				  \
818 {									  \
819   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);				  \
820 									  \
821   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);		  \
822 }									  \
823 									  \
824 static inline T *VEC_OP (T,safe_insert)					  \
825      (VEC(T) **vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)		  \
826 {									  \
827   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);				  \
828 									  \
829   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);	  \
830 }
831 
832 #define DEF_VEC_FUNC_O(T)						  \
833 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_)		  \
834 {									  \
835   return vec_ ? vec_->num : 0;						  \
836 }									  \
837 									  \
838 static inline T *VEC_OP (T,last) (VEC(T) *vec_ VEC_ASSERT_DECL)		  \
839 {									  \
840   vec_assert (vec_ && vec_->num, "last");				  \
841 									  \
842   return &vec_->vec[vec_->num - 1];					  \
843 }									  \
844 									  \
845 static inline T *VEC_OP (T,index)					  \
846      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)			  \
847 {									  \
848   vec_assert (vec_ && ix_ < vec_->num, "index");			  \
849 									  \
850   return &vec_->vec[ix_];						  \
851 }									  \
852 									  \
853 static inline int VEC_OP (T,iterate)					  \
854      (VEC(T) *vec_, unsigned ix_, T **ptr)				  \
855 {									  \
856   if (vec_ && ix_ < vec_->num)						  \
857     {									  \
858       *ptr = &vec_->vec[ix_];						  \
859       return 1;								  \
860     }									  \
861   else									  \
862     {									  \
863       *ptr = 0;								  \
864       return 0;								  \
865     }									  \
866 }									  \
867 									  \
868 static inline size_t VEC_OP (T,embedded_size)				  \
869      (int alloc_)							  \
870 {									  \
871   return offsetof (VEC(T),vec) + alloc_ * sizeof(T);			  \
872 }									  \
873 									  \
874 static inline void VEC_OP (T,embedded_init)				  \
875      (VEC(T) *vec_, int alloc_)						  \
876 {									  \
877   vec_->num = 0;							  \
878   vec_->alloc = alloc_;							  \
879 }									  \
880 									  \
881 static inline int VEC_OP (T,space)					  \
882      (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL)				  \
883 {									  \
884   vec_assert (alloc_ >= 0, "space");					  \
885   return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;	  \
886 }									  \
887 									  \
888 static inline T *VEC_OP (T,quick_push)					  \
889      (VEC(T) *vec_, const T *obj_ VEC_ASSERT_DECL)			  \
890 {									  \
891   T *slot_;								  \
892 									  \
893   vec_assert (vec_->num < vec_->alloc, "quick_push");			  \
894   slot_ = &vec_->vec[vec_->num++];					  \
895   if (obj_)								  \
896     *slot_ = *obj_;							  \
897 									  \
898   return slot_;								  \
899 }									  \
900 									  \
901 static inline void VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL)	  \
902 {									  \
903   vec_assert (vec_->num, "pop");					  \
904   --vec_->num;								  \
905 }									  \
906 									  \
907 static inline void VEC_OP (T,truncate)					  \
908      (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL)			  \
909 {									  \
910   vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate");		  \
911   if (vec_)								  \
912     vec_->num = size_;							  \
913 }									  \
914 									  \
915 static inline T *VEC_OP (T,replace)					  \
916      (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)	  \
917 {									  \
918   T *slot_;								  \
919 									  \
920   vec_assert (ix_ < vec_->num, "replace");				  \
921   slot_ = &vec_->vec[ix_];						  \
922   if (obj_)								  \
923     *slot_ = *obj_;							  \
924 									  \
925   return slot_;								  \
926 }									  \
927 									  \
928 static inline T *VEC_OP (T,quick_insert)				  \
929      (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)	  \
930 {									  \
931   T *slot_;								  \
932 									  \
933   vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
934   slot_ = &vec_->vec[ix_];						  \
935   memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));		  \
936   if (obj_)								  \
937     *slot_ = *obj_;							  \
938 									  \
939   return slot_;								  \
940 }									  \
941 									  \
942 static inline void VEC_OP (T,ordered_remove)				  \
943      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)			  \
944 {									  \
945   T *slot_;								  \
946 									  \
947   vec_assert (ix_ < vec_->num, "ordered_remove");			  \
948   slot_ = &vec_->vec[ix_];						  \
949   memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));		  \
950 }									  \
951 									  \
952 static inline void VEC_OP (T,unordered_remove)				  \
953      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)			  \
954 {									  \
955   vec_assert (ix_ < vec_->num, "unordered_remove");			  \
956   vec_->vec[ix_] = vec_->vec[--vec_->num];				  \
957 }									  \
958 									  \
959 static inline void VEC_OP (T,block_remove)				  \
960      (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL)	  \
961 {									  \
962   T *slot_;								  \
963 									  \
964   vec_assert (ix_ + len_ <= vec_->num, "block_remove");			  \
965   slot_ = &vec_->vec[ix_];						  \
966   vec_->num -= len_;							  \
967   memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T));	  \
968 }									  \
969 									  \
970 static inline T *VEC_OP (T,address)					  \
971      (VEC(T) *vec_)							  \
972 {									  \
973   return vec_ ? vec_->vec : 0;						  \
974 }									  \
975 									  \
976 static inline unsigned VEC_OP (T,lower_bound)				  \
977      (VEC(T) *vec_, const T *obj_,					  \
978       int (*lessthan_)(const T *, const T *) VEC_ASSERT_DECL)		  \
979 {									  \
980    unsigned int len_ = VEC_OP (T, length) (vec_);			  \
981    unsigned int half_, middle_;						  \
982    unsigned int first_ = 0;						  \
983    while (len_ > 0)							  \
984      {									  \
985         T *middle_elem_;						  \
986         half_ = len_ >> 1;						  \
987         middle_ = first_;						  \
988         middle_ += half_;						  \
989         middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS);  \
990         if (lessthan_ (middle_elem_, obj_))				  \
991           {								  \
992              first_ = middle_;						  \
993              ++first_;							  \
994              len_ = len_ - half_ - 1;					  \
995           }								  \
996         else								  \
997           len_ = half_;							  \
998      }									  \
999    return first_;							  \
1000 }
1001 
1002 #define DEF_VEC_ALLOC_FUNC_O(T)						  \
1003 static inline VEC(T) *VEC_OP (T,alloc)					  \
1004      (int alloc_)							  \
1005 {									  \
1006   /* We must request exact size allocation, hence the negation.  */	  \
1007   return (VEC(T) *) vec_o_reserve (NULL, -alloc_,			  \
1008                                    offsetof (VEC(T),vec), sizeof (T));	  \
1009 }									  \
1010 									  \
1011 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)			  \
1012 {									  \
1013   size_t len_ = vec_ ? vec_->num : 0;					  \
1014   VEC (T) *new_vec_ = NULL;						  \
1015 									  \
1016   if (len_)								  \
1017     {									  \
1018       /* We must request exact size allocation, hence the negation.  */	  \
1019       new_vec_ = (VEC (T) *)						  \
1020 	vec_o_reserve  (NULL, -len_, offsetof (VEC(T),vec), sizeof (T));  \
1021 									  \
1022       new_vec_->num = len_;						  \
1023       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);		  \
1024     }									  \
1025   return new_vec_;							  \
1026 }									  \
1027 									  \
1028 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)	  \
1029 {									  \
1030   if (vec1_ && vec2_)							  \
1031     {									  \
1032       size_t len_ = vec1_->num + vec2_->num;				  \
1033       VEC (T) *new_vec_ = NULL;						  \
1034 									  \
1035       /* We must request exact size allocation, hence the negation.  */	  \
1036       new_vec_ = (VEC (T) *)						  \
1037 	vec_o_reserve (NULL, -len_, offsetof (VEC(T),vec), sizeof (T));	  \
1038 									  \
1039       new_vec_->num = len_;						  \
1040       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);	  \
1041       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,			  \
1042 	      sizeof (T) * vec2_->num);					  \
1043 									  \
1044       return new_vec_;							  \
1045     }									  \
1046   else									  \
1047     return VEC_copy (T, vec1_ ? vec1_ : vec2_);				  \
1048 }									  \
1049 									  \
1050 static inline void VEC_OP (T,free)					  \
1051      (VEC(T) **vec_)							  \
1052 {									  \
1053   if (*vec_)								  \
1054     vec_free_ (*vec_);							  \
1055   *vec_ = NULL;								  \
1056 }									  \
1057 									  \
1058 static inline void VEC_OP (T,cleanup)					  \
1059      (void *arg_)							  \
1060 {									  \
1061   VEC(T) **vec_ = arg_;							  \
1062   if (*vec_)								  \
1063     vec_free_ (*vec_);							  \
1064   *vec_ = NULL;								  \
1065 }									  \
1066 									  \
1067 static inline int VEC_OP (T,reserve)					  \
1068      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)			  \
1069 {									  \
1070   int extend = !VEC_OP (T,space) (*vec_, alloc_ < 0 ? -alloc_ : alloc_	  \
1071 				  VEC_ASSERT_PASS);			  \
1072 									  \
1073   if (extend)								  \
1074     *vec_ = (VEC(T) *)							  \
1075 	vec_o_reserve (*vec_, alloc_, offsetof (VEC(T),vec), sizeof (T)); \
1076 									  \
1077   return extend;							  \
1078 }									  \
1079 									  \
1080 static inline void VEC_OP (T,safe_grow)					  \
1081      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)				  \
1082 {									  \
1083   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
1084 	"safe_grow");							  \
1085   VEC_OP (T,reserve)							  \
1086 	(vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS);  \
1087   (*vec_)->num = size_;							  \
1088 }									  \
1089 									  \
1090 static inline T *VEC_OP (T,safe_push)					  \
1091      (VEC(T) **vec_, const T *obj_ VEC_ASSERT_DECL)			  \
1092 {									  \
1093   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);				  \
1094 									  \
1095   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);		  \
1096 }									  \
1097 									  \
1098 static inline T *VEC_OP (T,safe_insert)					  \
1099      (VEC(T) **vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)	  \
1100 {									  \
1101   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);				  \
1102 									  \
1103   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);	  \
1104 }
1105 
1106 #endif /* GDB_VEC_H */
1107