xref: /illumos-gate/usr/src/cmd/sgs/rtld/common/tsort.c (revision f808c858)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2006 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 /*
29  * Utilities to handle shared object dependency graph.
30  *
31  * The algorithms used in this file are taken from the following book:
32  *	Algorithms in C
33  *		Robert Sedgewick
34  *		Addison-Wesley Publishing company
35  *		ISBN 0-201-51425-7
36  * 	From the following chapters:
37  *		Chapter 29 Elementary Graph Algorithms
38  *		Chapter 32 Directed Graph
39  */
40 #include	"_synonyms.h"
41 
42 #include	<sys/types.h>
43 #include	<stdarg.h>
44 #include	<stdio.h>
45 #include	<dlfcn.h>
46 #include	<signal.h>
47 #include	<locale.h>
48 #include	<string.h>
49 #include	<libintl.h>
50 #include	<debug.h>
51 #include	"_rtld.h"
52 #include	"msg.h"
53 
54 /*
55  * Structure for maintaining sorting state.
56  */
57 typedef struct {
58 	Rt_map		**s_lmpa;	/* link-map[] (returned to caller) */
59 	Rt_map		*s_lmp;		/* originating link-map */
60 	Rt_map		**s_stack;	/* strongly connected component stack */
61 	Alist 		*s_scc;		/* cyclic list */
62 	Alist		*s_queue;	/* depth queue for cyclic components */
63 	int		s_sndx;		/* present stack index */
64 	int 		s_lndx;		/* present link-map index */
65 	int		s_num;		/* number of objects to sort */
66 	int		s_initfirst;	/* no. of INITFIRST entries */
67 } Sort;
68 
69 #define	AL_CNT_SCC	10
70 
71 /*
72  * qsort(3c) comparison function.
73  */
74 static int
75 compare(const void * lmpp1, const void * lmpp2)
76 {
77 	Rt_map	*lmp1 = *((Rt_map **)lmpp1);
78 	Rt_map	*lmp2 = *((Rt_map **)lmpp2);
79 
80 	if (IDX(lmp1) > IDX(lmp2))
81 		return (-1);
82 	if (IDX(lmp1) < IDX(lmp2))
83 		return (1);
84 	return (0);
85 }
86 
87 /*
88  * This routine is called when a cyclic dependency is detected between strongly
89  * connected components.  The nodes within the cycle are reverse breadth-first
90  * sorted.
91  */
92 static int
93 sort_scc(Sort * sort, int fndx, int flag)
94 {
95 	static const char	*tfmt = 0, *ffmt;
96 	static int		cnt = 1;
97 	int			ndx;
98 	Rt_map			*lmp;
99 	Lm_list			*lml = LIST(sort->s_lmp);
100 	Word			lmflags = lml->lm_flags;
101 	Word			init, unref;
102 
103 	/*
104 	 * If this is the first cyclic dependency traverse the new objects that
105 	 * have been added to the link-map list and for each object establish
106 	 * a unique depth index.  We build this dynamically as we have no idea
107 	 * of the number of objects that will be inspected (logic matches that
108 	 * used by dlsym() to traverse lazy dependencies).
109 	 */
110 	if (sort->s_queue == 0) {
111 		Aliste	off;
112 		Rt_map	*lmp, **lmpp;
113 
114 		lmp = sort->s_lmp;
115 		ndx = 1;
116 
117 		if (alist_append(&(sort->s_queue), &lmp, sizeof (Rt_map *),
118 		    sort->s_num) == 0)
119 			return (0);
120 
121 		IDX(lmp) = ndx++;
122 
123 		for (ALIST_TRAVERSE(sort->s_queue, off, lmpp)) {
124 			Bnd_desc	**bdpp;
125 			Aliste		off;
126 
127 			for (ALIST_TRAVERSE(DEPENDS(*lmpp), off, bdpp)) {
128 				Rt_map	*lmp = (*bdpp)->b_depend;
129 
130 				if (IDX(lmp))
131 					continue;
132 
133 				/*
134 				 * If we're .init processing and this depend-
135 				 * encies .init has been called, skip it.
136 				 */
137 				if ((flag & RT_SORT_REV) &&
138 				    (FLAGS(lmp) & FLG_RT_INITCALL))
139 					continue;
140 
141 				if (alist_append(&(sort->s_queue), &lmp,
142 				    sizeof (Rt_map *), sort->s_num) == 0)
143 					return (0);
144 
145 				IDX(lmp) = ndx++;
146 			}
147 		}
148 	}
149 
150 	/*
151 	 * Sort the cyclics.
152 	 */
153 	qsort(&(sort->s_lmpa[fndx]), sort->s_lndx - fndx, sizeof (Rt_map *),
154 	    compare);
155 
156 	/*
157 	 * Under ldd -i, or debugging, print this cycle.  Under ldd -i/-U assign
158 	 * each object a group identifier so that cyclic dependent callers can
159 	 * be better traced (see trace_sort()), or analyzed for non-use.
160 	 */
161 	if (((init = (lmflags & LML_FLG_TRC_INIT)) == 0) &&
162 	    ((unref = (lmflags & LML_FLG_TRC_UNREF)) == 0) &&
163 	    (DBG_ENABLED == 0))
164 		return (1);
165 
166 	if (init) {
167 		if (tfmt == 0) {
168 			tfmt = MSG_INTL(MSG_LDD_INIT_FMT_01);
169 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
170 		}
171 		(void) printf(tfmt, cnt);
172 	}
173 	DBG_CALL(Dbg_util_scc_title(lml, (flag & RT_SORT_REV)));
174 
175 	/*
176 	 * Identify this cyclic group, and under ldd -i print the cycle in the
177 	 * order its components will be run.
178 	 */
179 	if (flag & RT_SORT_REV) {
180 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
181 			lmp = sort->s_lmpa[ndx];
182 			CYCGROUP(lmp) = cnt;
183 
184 			if (init)
185 				(void) printf(ffmt, NAME(lmp));
186 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
187 		}
188 		cnt++;
189 
190 	} else if (DBG_ENABLED) {
191 		for (ndx = sort->s_lndx - 1; ndx >= fndx; ndx--) {
192 			lmp = sort->s_lmpa[ndx];
193 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
194 		}
195 	}
196 
197 	/*
198 	 * If we're looking for unused dependencies determine if any of these
199 	 * cyclic components are referenced from outside of the cycle.
200 	 */
201 	if (unref || DBG_ENABLED) {
202 		Bnd_desc **	bdpp;
203 
204 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
205 			Aliste	off;
206 
207 			lmp = sort->s_lmpa[ndx];
208 
209 			/*
210 			 * If this object has a handle then it can be in use by
211 			 * anyone.
212 			 */
213 			if (HANDLES(lmp))
214 				return (1);
215 
216 			/*
217 			 * Traverse this objects callers looking for outside
218 			 * references.
219 			 */
220 			for (ALIST_TRAVERSE(CALLERS(lmp), off, bdpp)) {
221 				Bnd_desc	*bdp = *bdpp;
222 				Rt_map		*clmp = bdp->b_caller;
223 
224 				if ((bdp->b_flags & BND_REFER) == 0)
225 					continue;
226 
227 				if (CYCGROUP(lmp) != CYCGROUP(clmp))
228 					return (1);
229 			}
230 		}
231 
232 		/*
233 		 * If we're here then none of the cyclic dependents have been
234 		 * referenced from outside of the cycle, mark them as unused.
235 		 */
236 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
237 			lmp = sort->s_lmpa[ndx];
238 			FLAGS1(lmp) &= ~FL1_RT_USED;
239 		}
240 	}
241 	return (1);
242 }
243 
244 /*
245  * Take elements off of the stack and move them to the link-map array. Typically
246  * this routine just pops one strongly connected component (individual link-map)
247  * at a time.  When a cyclic dependency has been detected the stack will contain
248  * more than just the present object to process, and will trigger the later call
249  * to sort_scc() to sort these elements.
250  */
251 static int
252 visit(Lm_list *lml, Rt_map * lmp, Sort *sort, int flag)
253 {
254 	Alist		*alpp = 0;
255 	int		num = sort->s_lndx;
256 	Word		tracing = lml->lm_flags & LML_FLG_TRC_ENABLE;
257 	Rt_map		*tlmp;
258 
259 	do {
260 		tlmp = sort->s_stack[--(sort->s_sndx)];
261 		SORTVAL(tlmp) = sort->s_num;
262 		DBG_CALL(Dbg_util_collect(tlmp, sort->s_lndx, flag));
263 		sort->s_lmpa[(sort->s_lndx)++] = tlmp;
264 
265 		if (flag & RT_SORT_REV) {
266 			/*
267 			 * Indicate the object has had its .init collected.
268 			 * Note, that regardless of the object having a .init
269 			 * the object is added to the tsort list, as it's from
270 			 * this list that any post-init flags are established.
271 			 */
272 			FLAGS(tlmp) |= FLG_RT_INITCLCT;
273 			lml->lm_init--;
274 		} else {
275 			/*
276 			 * Indicate the object has had its .fini collected.
277 			 * Note, that regardless of the object having a .fini,
278 			 * the object is added to the tsort list, as it's from
279 			 * this list that any audit_objclose() activity is
280 			 * triggered.
281 			 */
282 			FLAGS(tlmp) |= FLG_RT_FINICLCT;
283 		}
284 
285 		/*
286 		 * If tracing, save the strongly connected component.
287 		 */
288 		if (tracing && (alist_append(&alpp, &tlmp, sizeof (Rt_map *),
289 		    AL_CNT_SCC) == 0))
290 			return (0);
291 	} while (tlmp != lmp);
292 
293 	/*
294 	 * Determine if there are cyclic dependencies to process.  If so, sort
295 	 * the components, and retain them for tracing output.
296 	 */
297 	if (sort->s_lndx > (num + 1)) {
298 		if (sort_scc(sort, num, flag) == 0)
299 			return (0);
300 
301 		if (tracing && (alist_append(&(sort->s_scc), &alpp,
302 		    sizeof (Alist *), AL_CNT_SCC) == 0))
303 			return (0);
304 	} else if (alpp)
305 		free(alpp);
306 
307 	return (1);
308 }
309 
310 static int
311 dep_visit(Lm_list *, Rt_map *, uint_t, Rt_map *, Sort *, int);
312 
313 static int
314 _dep_visit(Lm_list *lml, int min, Rt_map *clmp, Rt_map *dlmp, uint_t bflags,
315     Sort *sort, int flag)
316 {
317 	int	_min;
318 
319 	/*
320 	 * Only collect objects that belong to the callers link-map.  Catches
321 	 * cross dependencies (filtering) to ld.so.1.
322 	 */
323 	if (LIST(dlmp) != lml)
324 		return (min);
325 
326 	/*
327 	 * Determine if this object hasn't been inspected.
328 	 */
329 	if ((_min = SORTVAL(dlmp)) == -1) {
330 		if (flag & RT_SORT_REV) {
331 			/*
332 			 * For .init processing, only collect objects that have
333 			 * been relocated and haven't already been collected.
334 			 */
335 			if ((FLAGS(dlmp) & (FLG_RT_RELOCED |
336 			    FLG_RT_INITCLCT)) != FLG_RT_RELOCED)
337 				return (min);
338 
339 			/*
340 			 * If this object contains no .init, there's no need to
341 			 * establish a dependency.
342 			 */
343 			if ((INIT(dlmp) == 0) && (INITARRAY(dlmp) == 0))
344 				return (min);
345 		} else {
346 			/*
347 			 * For .fini processing only collect objects that have
348 			 * had their .init collected, and haven't already been
349 			 * .fini collected.
350 			 */
351 			if ((FLAGS(dlmp) & (FLG_RT_INITCLCT |
352 			    FLG_RT_FINICLCT)) != FLG_RT_INITCLCT)
353 				return (min);
354 
355 			/*
356 			 * If we're deleting a subset of objects, only collect
357 			 * those marked for deletion.
358 			 */
359 			if ((flag & RT_SORT_DELETE) &&
360 			    ((FLAGS(dlmp) & FLG_RT_DELETE) == 0))
361 				return (min);
362 
363 			/*
364 			 * If this object contains no .fini, there's no need to
365 			 * establish a dependency.
366 			 */
367 			if ((FINI(dlmp) == 0) && (FINIARRAY(dlmp) == 0))
368 				return (min);
369 		}
370 
371 		/*
372 		 * Inspect this new dependency.
373 		 */
374 		if ((_min = dep_visit(lml, clmp, bflags, dlmp,
375 		    sort, flag)) == -1)
376 			return (-1);
377 	}
378 
379 	/*
380 	 * Keep track of the smallest SORTVAL that has been encountered.  If
381 	 * this value is smaller than the present object, then the dependency
382 	 * edge has cycled back to objects that have been processed earlier
383 	 * along this dependency edge.
384 	 */
385 	if (_min < min) {
386 		DBG_CALL(Dbg_util_edge_out(clmp, sort->s_stack[_min]));
387 		return (_min);
388 	} else
389 		return (min);
390 }
391 
392 /*
393  * Visit the dependencies of each object.
394  */
395 static int
396 dep_visit(Lm_list *lml, Rt_map *clmp, uint_t cbflags, Rt_map *lmp, Sort *sort,
397     int flag)
398 {
399 	int 		min;
400 	Aliste		off;
401 	Bnd_desc	**bdpp;
402 	Dyninfo		*dip;
403 
404 	min = SORTVAL(lmp) = sort->s_sndx;
405 	sort->s_stack[(sort->s_sndx)++] = lmp;
406 
407 	if (FLAGS(lmp) & FLG_RT_INITFRST)
408 		sort->s_initfirst++;
409 
410 	DBG_CALL(Dbg_util_edge_in(lml, clmp, cbflags, lmp, min, flag));
411 
412 	/*
413 	 * Traverse both explicit and implicit dependencies.
414 	 */
415 	for (ALIST_TRAVERSE(DEPENDS(lmp), off, bdpp)) {
416 		if ((min = _dep_visit(lml, min, lmp, (*bdpp)->b_depend,
417 		    (*bdpp)->b_flags, sort, flag)) == -1)
418 			return (-1);
419 	}
420 
421 	/*
422 	 * Traverse any filtee dependencies.
423 	 */
424 	if (((dip = DYNINFO(lmp)) != 0) && (FLAGS1(lmp) & MSK_RT_FILTER)) {
425 		uint_t	cnt, max = DYNINFOCNT(lmp);
426 
427 		for (cnt = 0; cnt < max; cnt++, dip++) {
428 			Pnode	*pnp = (Pnode *)dip->di_info;
429 
430 			if ((pnp == 0) ||
431 			    ((dip->di_flags & MSK_DI_FILTER) == 0))
432 				continue;
433 
434 			for (; pnp; pnp = pnp->p_next) {
435 				Grp_hdl		*ghp = (Grp_hdl *)dip->di_info;
436 				Grp_desc	*gdp;
437 
438 				if ((pnp->p_len == 0) ||
439 				    ((ghp = (Grp_hdl *)pnp->p_info) == 0))
440 					continue;
441 
442 				for (ALIST_TRAVERSE(ghp->gh_depends, off,
443 				    gdp)) {
444 
445 					if (gdp->gd_depend == lmp)
446 						continue;
447 					if ((min = _dep_visit(lml, min, lmp,
448 					    gdp->gd_depend, BND_FILTER,
449 					    sort, flag)) == -1)
450 						return (-1);
451 				}
452 			}
453 		}
454 	}
455 
456 	/*
457 	 * Having returned to where the minimum SORTVAL is equivalent to the
458 	 * object that has just been processed, collect any dependencies that
459 	 * are available on the sorting stack.
460 	 */
461 	if (min == SORTVAL(lmp)) {
462 		if (visit(lml, lmp, sort, flag) == 0)
463 			return (-1);
464 	}
465 	return (min);
466 }
467 
468 
469 #ifndef	LD_BREADTH_DISABLED
470 /*
471  * Reverse LD_BREATH search (used to fire .init's the old fashioned way).
472  */
473 static void
474 rb_visit(Rt_map * lmp, Sort * sort)
475 {
476 	Rt_map *	nlmp;
477 
478 	if ((nlmp = (Rt_map *)NEXT(lmp)) != 0)
479 		rb_visit(nlmp, sort);
480 
481 	/*
482 	 * Only collect objects that have been relocated and haven't already
483 	 * been collected.
484 	 */
485 	if ((FLAGS(lmp) & (FLG_RT_RELOCED | FLG_RT_INITCLCT)) ==
486 	    FLG_RT_RELOCED) {
487 		sort->s_lmpa[(sort->s_lndx)++] = lmp;
488 		FLAGS(lmp) |= FLG_RT_INITCLCT;
489 		LIST(lmp)->lm_init--;
490 	}
491 }
492 
493 /*
494  * Forward LD_BREATH search (used to fire .fini's the old fashioned way).
495  */
496 static void
497 fb_visit(Rt_map * lmp, Sort * sort, int flag)
498 {
499 	while (lmp) {
500 		/*
501 		 * If we're called from dlclose() then we only collect those
502 		 * objects marked for deletion.
503 		 */
504 		if (!(flag & RT_SORT_DELETE) || (FLAGS(lmp) & FLG_RT_DELETE)) {
505 			/*
506 			 * Only collect objects that have had their .init
507 			 * collected, and haven't already been .fini collected.
508 			 */
509 			if ((FLAGS(lmp) &
510 			    (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
511 			    (FLG_RT_INITCLCT)) {
512 				sort->s_lmpa[(sort->s_lndx)++] = lmp;
513 				FLAGS(lmp) |= FLG_RT_FINICLCT;
514 			}
515 		}
516 		lmp = (Rt_map *)NEXT(lmp);
517 	}
518 }
519 #endif
520 
521 /*
522  * Find corresponding strongly connected component structure.
523  */
524 static Alist *
525 trace_find_scc(Sort * sort, Rt_map * lmp)
526 {
527 	Alist		**alpp;
528 	Aliste		off1;
529 
530 	for (ALIST_TRAVERSE(sort->s_scc, off1, alpp)) {
531 		Rt_map	**lmpp;
532 		Aliste	off2;
533 
534 		for (ALIST_TRAVERSE(*alpp, off2, lmpp)) {
535 			if (lmp == *lmpp)
536 				return (*alpp);
537 		}
538 	}
539 	return (NULL);
540 }
541 
542 /*
543  * Print out the .init dependency information (ldd).
544  */
545 static void
546 trace_sort(Sort * sort)
547 {
548 	int 		ndx = 0;
549 	Alist		*alp;
550 	Rt_map		*lmp1;
551 
552 	(void) printf(MSG_ORIG(MSG_STR_NL));
553 
554 	while ((lmp1 = sort->s_lmpa[ndx++]) != NULL) {
555 		static const char	*ffmt, *cfmt = 0, *sfmt = 0;
556 		Bnd_desc **		bdpp;
557 		Aliste			off1;
558 
559 		if ((INIT(lmp1) == 0) || (FLAGS(lmp1) & FLG_RT_INITCALL))
560 			continue;
561 
562 		if (sfmt == 0)
563 			sfmt = MSG_INTL(MSG_LDD_INIT_FMT_02);
564 
565 #ifndef	LD_BREADTH_DISABLED
566 		if (rtld_flags & RT_FL_BREADTH) {
567 			(void) printf(sfmt, NAME(lmp1));
568 			continue;
569 		}
570 #endif
571 		/*
572 		 * If the only component on the strongly connected list is
573 		 * this link-map, then there are no dependencies.
574 		 */
575 		if ((alp = trace_find_scc(sort, lmp1)) == NULL) {
576 			(void) printf(sfmt, NAME(lmp1));
577 			continue;
578 		}
579 
580 		/*
581 		 * Establish message formats for cyclic dependencies.
582 		 */
583 		if (cfmt == 0) {
584 			cfmt = MSG_INTL(MSG_LDD_INIT_FMT_03);
585 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
586 		}
587 
588 		(void) printf(cfmt, NAME(lmp1), CYCGROUP(lmp1));
589 
590 		for (ALIST_TRAVERSE(CALLERS(lmp1), off1, bdpp)) {
591 			Rt_map	**lmpp3, *lmp2 = (*bdpp)->b_caller;
592 			Aliste	off2;
593 
594 			for (ALIST_TRAVERSE(alp, off2, lmpp3)) {
595 				if (lmp2 != *lmpp3)
596 					continue;
597 
598 				(void) printf(ffmt, NAME(*lmpp3));
599 			}
600 		}
601 	}
602 }
603 
604 /*
605  * A reverse ordered list (for .init's) contains INITFIRST elements.  Move each
606  * of these elements to the front of the list.
607  */
608 static void
609 r_initfirst(Sort * sort, int end)
610 {
611 	Rt_map *	tlmp;
612 	int		bgn, ifst, lifst = 0;
613 
614 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
615 		for (ifst = lifst; ifst <= end; ifst++) {
616 			tlmp = sort->s_lmpa[ifst];
617 
618 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
619 				continue;
620 
621 			/*
622 			 * If the INITFIRST element is already at the front of
623 			 * the list leave it there.
624 			 */
625 			if (ifst == bgn) {
626 				lifst = ifst + 1;
627 				break;
628 			}
629 
630 			/*
631 			 * Move the elements from the front of the list up to
632 			 * the INITFIRST element, back one position.
633 			 */
634 			(void) memmove(&sort->s_lmpa[bgn + 1],
635 			    &sort->s_lmpa[bgn],
636 			    ((ifst - bgn) * sizeof (Rt_map *)));
637 
638 			/*
639 			 * Insert INITFIRST element at the front of the list.
640 			 */
641 			sort->s_lmpa[bgn] = tlmp;
642 			lifst = ifst + 1;
643 			break;
644 		}
645 	}
646 }
647 
648 /*
649  * A forward ordered list (for .fini's) contains INITFIRST elements.  Move each
650  * of these elements to the front of the list.
651  */
652 static void
653 f_initfirst(Sort * sort, int end)
654 {
655 	Rt_map *	tlmp;
656 	int		bgn, ifst, lifst = 0;
657 
658 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
659 		for (ifst = lifst; ifst <= end; ifst++) {
660 			tlmp = sort->s_lmpa[ifst];
661 
662 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
663 				continue;
664 
665 			/*
666 			 * If the INITFIRST element is already at the end of
667 			 * the list leave it there.
668 			 */
669 			if (ifst == end)
670 				break;
671 
672 			/*
673 			 * Move the elements from after the INITFIRST element
674 			 * up to the back of the list, up one position.
675 			 */
676 			(void) memmove(&sort->s_lmpa[ifst],
677 			    &sort->s_lmpa[ifst + 1],
678 			    ((end - ifst) * sizeof (Rt_map *)));
679 
680 			/*
681 			 * Insert INITFIRST element at the back of the list.
682 			 */
683 			sort->s_lmpa[end--] = tlmp;
684 			lifst = ifst;
685 			break;
686 		}
687 	}
688 }
689 
690 /*
691  * Sort the dependency
692  */
693 Rt_map **
694 tsort(Rt_map *lmp, int num, int flag)
695 {
696 	Rt_map *	_lmp;
697 	Lm_list *	lml = LIST(lmp);
698 	Word		init = lml->lm_flags & LML_FLG_TRC_INIT;
699 	Sort		sort = { 0 };
700 
701 	if (num == 0)
702 		return (0);
703 
704 	/*
705 	 * Prior to tsorting any .init sections, insure that the `environ'
706 	 * symbol is initialized for this link-map list.
707 	 */
708 	if ((flag & RT_SORT_REV) && ((lml->lm_flags &
709 	    (LML_FLG_TRC_ENABLE | LML_FLG_ENVIRON)) == 0))
710 		set_environ(lml);
711 
712 	/*
713 	 * Allocate memory for link-map list array.  Calloc the array to insure
714 	 * all elements are zero, we might find that no objects need processing.
715 	 */
716 	sort.s_lmp = lmp;
717 	sort.s_num = num + 1;
718 	if ((sort.s_lmpa = calloc(sort.s_num, sizeof (Rt_map *))) == NULL)
719 		return ((Rt_map **)S_ERROR);
720 
721 #ifndef	LD_BREADTH_DISABLED
722 	/*
723 	 * A breadth first search is easy, simply add each object to the
724 	 * link-map array.
725 	 */
726 	if (rtld_flags & RT_FL_BREADTH) {
727 		if (flag & RT_SORT_REV)
728 			rb_visit(lmp, &sort);
729 		else
730 			fb_visit(lmp, &sort, flag);
731 
732 		/*
733 		 * If tracing .init sections (only meaningful for RT_SORT_REV)
734 		 * print out the sorted dependencies.
735 		 */
736 		if (init)
737 			trace_sort(&sort);
738 
739 		return (sort.s_lmpa);
740 	}
741 #endif
742 	/*
743 	 * We need to topologically sort the dependencies.
744 	 */
745 	if ((sort.s_stack = malloc(sort.s_num * sizeof (Rt_map *))) == NULL)
746 		return ((Rt_map **)S_ERROR);
747 
748 	/*
749 	 * Determine where to start searching for tsort() candidates.  Any call
750 	 * to tsort() for .init processing is passed the link-map from which to
751 	 * start searching.  However, if new objects have dependencies on
752 	 * existing objects, or existing objects have been promoted (RTLD_LAZY
753 	 * to RTLD_NOW), then start  searching at the head of the link-map list.
754 	 * These previously loaded objects will have been tagged for inclusion
755 	 * in this tsort() pass.  They still remain on an existing tsort() list,
756 	 * which must have been prempted for control to have arrived here.
757 	 * However, they will be ignored when encountered on any previous
758 	 * tsort() list if their .init has already been called.
759 	 */
760 	if (lml->lm_flags & LML_FLG_OBJREEVAL)
761 		_lmp = lml->lm_head;
762 	else
763 		_lmp = lmp;
764 
765 	DBG_CALL(Dbg_file_bindings(_lmp, flag));
766 	lml->lm_flags &=
767 	    ~(LML_FLG_OBJREEVAL | LML_FLG_OBJADDED | LML_FLG_OBJDELETED);
768 
769 	for (; _lmp; _lmp = (Rt_map *)NEXT(_lmp)) {
770 		if (flag & RT_SORT_REV) {
771 			/*
772 			 * For .init processing, only collect objects that have
773 			 * been relocated and haven't already been collected.
774 			 */
775 			if ((FLAGS(_lmp) & (FLG_RT_RELOCED |
776 			    FLG_RT_INITCLCT)) != FLG_RT_RELOCED)
777 				continue;
778 
779 			if (dep_visit(lml, 0, 0, _lmp, &sort, flag) == -1)
780 				return ((Rt_map **)S_ERROR);
781 
782 		} else if (!(flag & RT_SORT_DELETE) ||
783 		    (FLAGS(_lmp) & FLG_RT_DELETE)) {
784 			/*
785 			 * Only collect objects that have had their .init
786 			 * collected, and haven't already been .fini collected.
787 			 */
788 			if (!((FLAGS(_lmp) &
789 			    (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
790 			    (FLG_RT_INITCLCT)))
791 				continue;
792 
793 			if (dep_visit(lml, 0, 0, _lmp, &sort, flag) == -1)
794 				return ((Rt_map **)S_ERROR);
795 		}
796 	}
797 
798 	/*
799 	 * The dependencies have been collected such that they are appropriate
800 	 * for an .init order, for .fini order reverse them.
801 	 */
802 	if (flag & RT_SORT_FWD) {
803 		int	bgn = 0, end = sort.s_lndx - 1;
804 
805 		while (bgn < end) {
806 			Rt_map *	tlmp = sort.s_lmpa[end];
807 
808 			sort.s_lmpa[end] = sort.s_lmpa[bgn];
809 			sort.s_lmpa[bgn] = tlmp;
810 
811 			bgn++, end--;
812 		}
813 	}
814 
815 	/*
816 	 * If INITFIRST objects have been collected then move them to the front
817 	 * or end of the list as appropriate.
818 	 */
819 	if (sort.s_initfirst) {
820 		if (flag & RT_SORT_REV)
821 			r_initfirst(&sort, sort.s_lndx - 1);
822 		else
823 			f_initfirst(&sort, sort.s_lndx - 1);
824 	}
825 
826 	/*
827 	 * If tracing .init sections (only meaningful for RT_SORT_REV), print
828 	 * out the sorted dependencies.
829 	 */
830 	if (init)
831 		trace_sort(&sort);
832 
833 	/*
834 	 * Clean any temporary structures prior to return.
835 	 */
836 	if (sort.s_stack)
837 		free(sort.s_stack);
838 
839 	if (sort.s_queue) {
840 		Aliste	off;
841 		Rt_map	**lmpp;
842 
843 		/*
844 		 * Traverse the link-maps collected on the sort queue and
845 		 * delete the depth index.  These link-maps may be traversed
846 		 * again to sort other components either for inits, and almost
847 		 * certainly for .finis.
848 		 */
849 		for (ALIST_TRAVERSE(sort.s_queue, off, lmpp))
850 			IDX(*lmpp) = 0;
851 
852 		free(sort.s_queue);
853 	}
854 
855 	if (sort.s_scc) {
856 		Aliste	off;
857 		Alist	**alpp;
858 
859 		for (ALIST_TRAVERSE(sort.s_scc, off, alpp))
860 			free(*alpp);
861 		free(sort.s_scc);
862 	}
863 
864 	/*
865 	 * The caller is responsible for freeing the sorted link-map list once
866 	 * the associated .init/.fini's have been fired.
867 	 */
868 	DBG_CALL(Dbg_util_nl(lml, DBG_NL_STD));
869 	return (sort.s_lmpa);
870 }
871