1# orm/strategies.py
2# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
3# <see AUTHORS file>
4#
5# This module is part of SQLAlchemy and is released under
6# the MIT License: http://www.opensource.org/licenses/mit-license.php
7
8"""sqlalchemy.orm.interfaces.LoaderStrategy
9   implementations, and related MapperOptions."""
10
11from .. import exc as sa_exc, inspect
12from .. import util, log, event
13from ..sql import util as sql_util, visitors
14from .. import sql
15from . import (
16    attributes, interfaces, exc as orm_exc, loading,
17    unitofwork, util as orm_util
18)
19from .state import InstanceState
20from .util import _none_set
21from . import properties
22from .interfaces import (
23    LoaderStrategy, StrategizedProperty
24)
25from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
26from .session import _state_session
27import itertools
28
29
30def _register_attribute(
31    prop, mapper, useobject,
32    compare_function=None,
33    typecallable=None,
34    callable_=None,
35    proxy_property=None,
36    active_history=False,
37    impl_class=None,
38    **kw
39):
40
41    attribute_ext = list(util.to_list(prop.extension, default=[]))
42
43    listen_hooks = []
44
45    uselist = useobject and prop.uselist
46
47    if useobject and prop.single_parent:
48        listen_hooks.append(single_parent_validator)
49
50    if prop.key in prop.parent.validators:
51        fn, opts = prop.parent.validators[prop.key]
52        listen_hooks.append(
53            lambda desc, prop: orm_util._validator_events(
54                desc,
55                prop.key, fn, **opts)
56        )
57
58    if useobject:
59        listen_hooks.append(unitofwork.track_cascade_events)
60
61    # need to assemble backref listeners
62    # after the singleparentvalidator, mapper validator
63    if useobject:
64        backref = prop.back_populates
65        if backref:
66            listen_hooks.append(
67                lambda desc, prop: attributes.backref_listeners(
68                    desc,
69                    backref,
70                    uselist
71                )
72            )
73
74    # a single MapperProperty is shared down a class inheritance
75    # hierarchy, so we set up attribute instrumentation and backref event
76    # for each mapper down the hierarchy.
77
78    # typically, "mapper" is the same as prop.parent, due to the way
79    # the configure_mappers() process runs, however this is not strongly
80    # enforced, and in the case of a second configure_mappers() run the
81    # mapper here might not be prop.parent; also, a subclass mapper may
82    # be called here before a superclass mapper.  That is, can't depend
83    # on mappers not already being set up so we have to check each one.
84
85    for m in mapper.self_and_descendants:
86        if prop is m._props.get(prop.key) and \
87                not m.class_manager._attr_has_impl(prop.key):
88
89            desc = attributes.register_attribute_impl(
90                m.class_,
91                prop.key,
92                parent_token=prop,
93                uselist=uselist,
94                compare_function=compare_function,
95                useobject=useobject,
96                extension=attribute_ext,
97                trackparent=useobject and (
98                    prop.single_parent or
99                    prop.direction is interfaces.ONETOMANY),
100                typecallable=typecallable,
101                callable_=callable_,
102                active_history=active_history,
103                impl_class=impl_class,
104                send_modified_events=not useobject or not prop.viewonly,
105                doc=prop.doc,
106                **kw
107            )
108
109            for hook in listen_hooks:
110                hook(desc, prop)
111
112
113@properties.ColumnProperty.strategy_for(instrument=False, deferred=False)
114class UninstrumentedColumnLoader(LoaderStrategy):
115    """Represent a non-instrumented MapperProperty.
116
117    The polymorphic_on argument of mapper() often results in this,
118    if the argument is against the with_polymorphic selectable.
119
120    """
121    __slots__ = 'columns',
122
123    def __init__(self, parent, strategy_key):
124        super(UninstrumentedColumnLoader, self).__init__(parent, strategy_key)
125        self.columns = self.parent_property.columns
126
127    def setup_query(
128            self, context, entity, path, loadopt, adapter,
129            column_collection=None, **kwargs):
130        for c in self.columns:
131            if adapter:
132                c = adapter.columns[c]
133            column_collection.append(c)
134
135    def create_row_processor(
136            self, context, path, loadopt,
137            mapper, result, adapter, populators):
138        pass
139
140
141@log.class_logger
142@properties.ColumnProperty.strategy_for(instrument=True, deferred=False)
143class ColumnLoader(LoaderStrategy):
144    """Provide loading behavior for a :class:`.ColumnProperty`."""
145
146    __slots__ = 'columns', 'is_composite'
147
148    def __init__(self, parent, strategy_key):
149        super(ColumnLoader, self).__init__(parent, strategy_key)
150        self.columns = self.parent_property.columns
151        self.is_composite = hasattr(self.parent_property, 'composite_class')
152
153    def setup_query(
154            self, context, entity, path, loadopt,
155            adapter, column_collection, memoized_populators, **kwargs):
156
157        for c in self.columns:
158            if adapter:
159                c = adapter.columns[c]
160            column_collection.append(c)
161
162        fetch = self.columns[0]
163        if adapter:
164            fetch = adapter.columns[fetch]
165        memoized_populators[self.parent_property] = fetch
166
167    def init_class_attribute(self, mapper):
168        self.is_class_level = True
169        coltype = self.columns[0].type
170        # TODO: check all columns ?  check for foreign key as well?
171        active_history = self.parent_property.active_history or \
172            self.columns[0].primary_key or \
173            mapper.version_id_col in set(self.columns)
174
175        _register_attribute(
176            self.parent_property, mapper, useobject=False,
177            compare_function=coltype.compare_values,
178            active_history=active_history
179        )
180
181    def create_row_processor(
182            self, context, path,
183            loadopt, mapper, result, adapter, populators):
184        # look through list of columns represented here
185        # to see which, if any, is present in the row.
186        for col in self.columns:
187            if adapter:
188                col = adapter.columns[col]
189            getter = result._getter(col, False)
190            if getter:
191                populators["quick"].append((self.key, getter))
192                break
193        else:
194            populators["expire"].append((self.key, True))
195
196
197@log.class_logger
198@properties.ColumnProperty.strategy_for(deferred=True, instrument=True)
199class DeferredColumnLoader(LoaderStrategy):
200    """Provide loading behavior for a deferred :class:`.ColumnProperty`."""
201
202    __slots__ = 'columns', 'group'
203
204    def __init__(self, parent, strategy_key):
205        super(DeferredColumnLoader, self).__init__(parent, strategy_key)
206        if hasattr(self.parent_property, 'composite_class'):
207            raise NotImplementedError("Deferred loading for composite "
208                                      "types not implemented yet")
209        self.columns = self.parent_property.columns
210        self.group = self.parent_property.group
211
212    def create_row_processor(
213            self, context, path, loadopt,
214            mapper, result, adapter, populators):
215
216        # this path currently does not check the result
217        # for the column; this is because in most cases we are
218        # working just with the setup_query() directive which does
219        # not support this, and the behavior here should be consistent.
220        if not self.is_class_level:
221            set_deferred_for_local_state = \
222                self.parent_property._deferred_column_loader
223            populators["new"].append((self.key, set_deferred_for_local_state))
224        else:
225            populators["expire"].append((self.key, False))
226
227    def init_class_attribute(self, mapper):
228        self.is_class_level = True
229
230        _register_attribute(
231            self.parent_property, mapper, useobject=False,
232            compare_function=self.columns[0].type.compare_values,
233            callable_=self._load_for_state,
234            expire_missing=False
235        )
236
237    def setup_query(
238            self, context, entity, path, loadopt,
239            adapter, column_collection, memoized_populators,
240            only_load_props=None, **kw):
241
242        if (
243            (
244                loadopt and
245                'undefer_pks' in loadopt.local_opts and
246                set(self.columns).intersection(
247                    self.parent._should_undefer_in_wildcard)
248            )
249            or
250            (
251                loadopt and
252                self.group and
253                loadopt.local_opts.get('undefer_group_%s' % self.group, False)
254            )
255            or
256            (
257                only_load_props and self.key in only_load_props
258            )
259        ):
260            self.parent_property._get_strategy(
261                (("deferred", False), ("instrument", True))
262            ).setup_query(
263                context, entity,
264                path, loadopt, adapter,
265                column_collection, memoized_populators, **kw)
266        elif self.is_class_level:
267            memoized_populators[self.parent_property] = _SET_DEFERRED_EXPIRED
268        else:
269            memoized_populators[self.parent_property] = _DEFER_FOR_STATE
270
271    def _load_for_state(self, state, passive):
272        if not state.key:
273            return attributes.ATTR_EMPTY
274
275        if not passive & attributes.SQL_OK:
276            return attributes.PASSIVE_NO_RESULT
277
278        localparent = state.manager.mapper
279
280        if self.group:
281            toload = [
282                p.key for p in
283                localparent.iterate_properties
284                if isinstance(p, StrategizedProperty) and
285                isinstance(p.strategy, DeferredColumnLoader) and
286                p.group == self.group
287            ]
288        else:
289            toload = [self.key]
290
291        # narrow the keys down to just those which have no history
292        group = [k for k in toload if k in state.unmodified]
293
294        session = _state_session(state)
295        if session is None:
296            raise orm_exc.DetachedInstanceError(
297                "Parent instance %s is not bound to a Session; "
298                "deferred load operation of attribute '%s' cannot proceed" %
299                (orm_util.state_str(state), self.key)
300            )
301
302        query = session.query(localparent)
303        if loading.load_on_ident(
304                query, state.key,
305                only_load_props=group, refresh_state=state) is None:
306            raise orm_exc.ObjectDeletedError(state)
307
308        return attributes.ATTR_WAS_SET
309
310
311class LoadDeferredColumns(object):
312    """serializable loader object used by DeferredColumnLoader"""
313
314    def __init__(self, key):
315        self.key = key
316
317    def __call__(self, state, passive=attributes.PASSIVE_OFF):
318        key = self.key
319
320        localparent = state.manager.mapper
321        prop = localparent._props[key]
322        strategy = prop._strategies[DeferredColumnLoader]
323        return strategy._load_for_state(state, passive)
324
325
326class AbstractRelationshipLoader(LoaderStrategy):
327    """LoaderStratgies which deal with related objects."""
328
329    __slots__ = 'mapper', 'target', 'uselist'
330
331    def __init__(self, parent, strategy_key):
332        super(AbstractRelationshipLoader, self).__init__(parent, strategy_key)
333        self.mapper = self.parent_property.mapper
334        self.target = self.parent_property.target
335        self.uselist = self.parent_property.uselist
336
337
338@log.class_logger
339@properties.RelationshipProperty.strategy_for(lazy="noload")
340@properties.RelationshipProperty.strategy_for(lazy=None)
341class NoLoader(AbstractRelationshipLoader):
342    """Provide loading behavior for a :class:`.RelationshipProperty`
343    with "lazy=None".
344
345    """
346
347    __slots__ = ()
348
349    def init_class_attribute(self, mapper):
350        self.is_class_level = True
351
352        _register_attribute(
353            self.parent_property, mapper,
354            useobject=True,
355            typecallable=self.parent_property.collection_class,
356        )
357
358    def create_row_processor(
359            self, context, path, loadopt, mapper,
360            result, adapter, populators):
361        def invoke_no_load(state, dict_, row):
362            if self.uselist:
363                state.manager.get_impl(self.key).initialize(state, dict_)
364            else:
365                dict_[self.key] = None
366        populators["new"].append((self.key, invoke_no_load))
367
368
369@log.class_logger
370@properties.RelationshipProperty.strategy_for(lazy=True)
371@properties.RelationshipProperty.strategy_for(lazy="select")
372@properties.RelationshipProperty.strategy_for(lazy="raise")
373@properties.RelationshipProperty.strategy_for(lazy="raise_on_sql")
374class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots):
375    """Provide loading behavior for a :class:`.RelationshipProperty`
376    with "lazy=True", that is loads when first accessed.
377
378    """
379
380    __slots__ = (
381        '_lazywhere', '_rev_lazywhere', 'use_get', '_bind_to_col',
382        '_equated_columns', '_rev_bind_to_col', '_rev_equated_columns',
383        '_simple_lazy_clause', '_raise_always', '_raise_on_sql')
384
385    def __init__(self, parent, strategy_key):
386        super(LazyLoader, self).__init__(parent, strategy_key)
387        self._raise_always = self.strategy_opts["lazy"] == "raise"
388        self._raise_on_sql = self.strategy_opts["lazy"] == "raise_on_sql"
389
390        join_condition = self.parent_property._join_condition
391        self._lazywhere, \
392            self._bind_to_col, \
393            self._equated_columns = join_condition.create_lazy_clause()
394
395        self._rev_lazywhere, \
396            self._rev_bind_to_col, \
397            self._rev_equated_columns = join_condition.create_lazy_clause(
398                reverse_direction=True)
399
400        self.logger.info("%s lazy loading clause %s", self, self._lazywhere)
401
402        # determine if our "lazywhere" clause is the same as the mapper's
403        # get() clause.  then we can just use mapper.get()
404        self.use_get = not self.uselist and \
405            self.mapper._get_clause[0].compare(
406                self._lazywhere,
407                use_proxies=True,
408                equivalents=self.mapper._equivalent_columns
409            )
410
411        if self.use_get:
412            for col in list(self._equated_columns):
413                if col in self.mapper._equivalent_columns:
414                    for c in self.mapper._equivalent_columns[col]:
415                        self._equated_columns[c] = self._equated_columns[col]
416
417            self.logger.info("%s will use query.get() to "
418                             "optimize instance loads", self)
419
420    def init_class_attribute(self, mapper):
421        self.is_class_level = True
422
423        active_history = (
424            self.parent_property.active_history or
425            self.parent_property.direction is not interfaces.MANYTOONE or
426            not self.use_get
427        )
428
429        # MANYTOONE currently only needs the
430        # "old" value for delete-orphan
431        # cascades.  the required _SingleParentValidator
432        # will enable active_history
433        # in that case.  otherwise we don't need the
434        # "old" value during backref operations.
435        _register_attribute(
436            self.parent_property,
437            mapper,
438            useobject=True,
439            callable_=self._load_for_state,
440            typecallable=self.parent_property.collection_class,
441            active_history=active_history
442        )
443
444    def _memoized_attr__simple_lazy_clause(self):
445        criterion, bind_to_col = (
446            self._lazywhere,
447            self._bind_to_col
448        )
449
450        params = []
451
452        def visit_bindparam(bindparam):
453            bindparam.unique = False
454            if bindparam._identifying_key in bind_to_col:
455                params.append((
456                    bindparam.key, bind_to_col[bindparam._identifying_key],
457                    None))
458            elif bindparam.callable is None:
459                params.append((bindparam.key, None, bindparam.value))
460
461        criterion = visitors.cloned_traverse(
462            criterion, {}, {'bindparam': visit_bindparam}
463        )
464
465        return criterion, params
466
467    def _generate_lazy_clause(self, state, passive):
468        criterion, param_keys = self._simple_lazy_clause
469
470        if state is None:
471            return sql_util.adapt_criterion_to_null(
472                criterion, [key for key, ident, value in param_keys])
473
474        mapper = self.parent_property.parent
475
476        o = state.obj()  # strong ref
477        dict_ = attributes.instance_dict(o)
478
479        if passive & attributes.INIT_OK:
480            passive ^= attributes.INIT_OK
481
482        params = {}
483        for key, ident, value in param_keys:
484            if ident is not None:
485                if passive and passive & attributes.LOAD_AGAINST_COMMITTED:
486                    value = mapper._get_committed_state_attr_by_column(
487                        state, dict_, ident, passive)
488                else:
489                    value = mapper._get_state_attr_by_column(
490                        state, dict_, ident, passive)
491
492            params[key] = value
493
494        return criterion, params
495
496    def _invoke_raise_load(self, state, passive, lazy):
497        raise sa_exc.InvalidRequestError(
498            "'%s' is not available due to lazy='%s'" % (self, lazy)
499        )
500
501    def _load_for_state(self, state, passive):
502
503        if not state.key and (
504                (
505                    not self.parent_property.load_on_pending
506                    and not state._load_pending
507                )
508                or not state.session_id
509        ):
510            return attributes.ATTR_EMPTY
511
512        pending = not state.key
513        ident_key = None
514
515        if (
516            (not passive & attributes.SQL_OK and not self.use_get)
517            or
518            (not passive & attributes.NON_PERSISTENT_OK and pending)
519        ):
520            return attributes.PASSIVE_NO_RESULT
521
522        if self._raise_always:
523            self._invoke_raise_load(state, passive, "raise")
524
525        session = _state_session(state)
526        if not session:
527            raise orm_exc.DetachedInstanceError(
528                "Parent instance %s is not bound to a Session; "
529                "lazy load operation of attribute '%s' cannot proceed" %
530                (orm_util.state_str(state), self.key)
531            )
532
533        # if we have a simple primary key load, check the
534        # identity map without generating a Query at all
535        if self.use_get:
536            ident = self._get_ident_for_use_get(
537                session,
538                state,
539                passive
540            )
541            if attributes.PASSIVE_NO_RESULT in ident:
542                return attributes.PASSIVE_NO_RESULT
543            elif attributes.NEVER_SET in ident:
544                return attributes.NEVER_SET
545
546            if _none_set.issuperset(ident):
547                return None
548
549            ident_key = self.mapper.identity_key_from_primary_key(ident)
550            instance = loading.get_from_identity(session, ident_key, passive)
551            if instance is not None:
552                return instance
553            elif not passive & attributes.SQL_OK or \
554                    not passive & attributes.RELATED_OBJECT_OK:
555                return attributes.PASSIVE_NO_RESULT
556
557        return self._emit_lazyload(session, state, ident_key, passive)
558
559    def _get_ident_for_use_get(self, session, state, passive):
560        instance_mapper = state.manager.mapper
561
562        if passive & attributes.LOAD_AGAINST_COMMITTED:
563            get_attr = instance_mapper._get_committed_state_attr_by_column
564        else:
565            get_attr = instance_mapper._get_state_attr_by_column
566
567        dict_ = state.dict
568
569        return [
570            get_attr(
571                state,
572                dict_,
573                self._equated_columns[pk],
574                passive=passive)
575            for pk in self.mapper.primary_key
576        ]
577
578    @util.dependencies("sqlalchemy.orm.strategy_options")
579    def _emit_lazyload(
580            self, strategy_options, session, state, ident_key, passive):
581
582        q = session.query(self.mapper)._adapt_all_clauses()
583        if self.parent_property.secondary is not None:
584            q = q.select_from(self.mapper, self.parent_property.secondary)
585
586        q = q._with_invoke_all_eagers(False)
587
588        pending = not state.key
589
590        # don't autoflush on pending
591        if pending or passive & attributes.NO_AUTOFLUSH:
592            q = q.autoflush(False)
593
594        if state.load_path:
595            q = q._with_current_path(state.load_path[self.parent_property])
596
597        if state.load_options:
598            q = q._conditional_options(*state.load_options)
599
600        if self.use_get:
601            if self._raise_on_sql:
602                self._invoke_raise_load(state, passive, "raise_on_sql")
603            return loading.load_on_ident(q, ident_key)
604
605        if self.parent_property.order_by:
606            q = q.order_by(*util.to_list(self.parent_property.order_by))
607
608        for rev in self.parent_property._reverse_property:
609            # reverse props that are MANYTOONE are loading *this*
610            # object from get(), so don't need to eager out to those.
611            if rev.direction is interfaces.MANYTOONE and \
612                rev._use_get and \
613                    not isinstance(rev.strategy, LazyLoader):
614                q = q.options(
615                    strategy_options.Load.for_existing_path(
616                        q._current_path[rev.parent]
617                    ).lazyload(rev.key)
618                )
619
620        lazy_clause, params = self._generate_lazy_clause(
621            state, passive=passive)
622
623        if pending:
624            if util.has_intersection(
625                    orm_util._none_set, params.values()):
626                return None
627        elif util.has_intersection(orm_util._never_set, params.values()):
628            return None
629
630        if self._raise_on_sql:
631            self._invoke_raise_load(state, passive, "raise_on_sql")
632
633        q = q.filter(lazy_clause).params(params)
634
635        result = q.all()
636        if self.uselist:
637            return result
638        else:
639            l = len(result)
640            if l:
641                if l > 1:
642                    util.warn(
643                        "Multiple rows returned with "
644                        "uselist=False for lazily-loaded attribute '%s' "
645                        % self.parent_property)
646
647                return result[0]
648            else:
649                return None
650
651    def create_row_processor(
652            self, context, path, loadopt,
653            mapper, result, adapter, populators):
654        key = self.key
655        if not self.is_class_level:
656            # we are not the primary manager for this attribute
657            # on this class - set up a
658            # per-instance lazyloader, which will override the
659            # class-level behavior.
660            # this currently only happens when using a
661            # "lazyload" option on a "no load"
662            # attribute - "eager" attributes always have a
663            # class-level lazyloader installed.
664            set_lazy_callable = InstanceState._instance_level_callable_processor(
665                mapper.class_manager,
666                LoadLazyAttribute(key, self), key)
667
668            populators["new"].append((self.key, set_lazy_callable))
669        elif context.populate_existing or mapper.always_refresh:
670            def reset_for_lazy_callable(state, dict_, row):
671                # we are the primary manager for this attribute on
672                # this class - reset its
673                # per-instance attribute state, so that the class-level
674                # lazy loader is
675                # executed when next referenced on this instance.
676                # this is needed in
677                # populate_existing() types of scenarios to reset
678                # any existing state.
679                state._reset(dict_, key)
680
681            populators["new"].append((self.key, reset_for_lazy_callable))
682
683
684class LoadLazyAttribute(object):
685    """serializable loader object used by LazyLoader"""
686
687    def __init__(self, key, initiating_strategy):
688        self.key = key
689        self.strategy_key = initiating_strategy.strategy_key
690
691    def __call__(self, state, passive=attributes.PASSIVE_OFF):
692        key = self.key
693        instance_mapper = state.manager.mapper
694        prop = instance_mapper._props[key]
695        strategy = prop._strategies[self.strategy_key]
696
697        return strategy._load_for_state(state, passive)
698
699
700@properties.RelationshipProperty.strategy_for(lazy="immediate")
701class ImmediateLoader(AbstractRelationshipLoader):
702    __slots__ = ()
703
704    def init_class_attribute(self, mapper):
705        self.parent_property.\
706            _get_strategy((("lazy", "select"),)).\
707            init_class_attribute(mapper)
708
709    def setup_query(
710            self, context, entity,
711            path, loadopt, adapter, column_collection=None,
712            parentmapper=None, **kwargs):
713        pass
714
715    def create_row_processor(
716            self, context, path, loadopt,
717            mapper, result, adapter, populators):
718        def load_immediate(state, dict_, row):
719            state.get_impl(self.key).get(state, dict_)
720
721        populators["delayed"].append((self.key, load_immediate))
722
723
724@log.class_logger
725@properties.RelationshipProperty.strategy_for(lazy="subquery")
726class SubqueryLoader(AbstractRelationshipLoader):
727    __slots__ = 'join_depth',
728
729    def __init__(self, parent, strategy_key):
730        super(SubqueryLoader, self).__init__(parent, strategy_key)
731        self.join_depth = self.parent_property.join_depth
732
733    def init_class_attribute(self, mapper):
734        self.parent_property.\
735            _get_strategy((("lazy", "select"),)).\
736            init_class_attribute(mapper)
737
738    def setup_query(
739            self, context, entity,
740            path, loadopt, adapter,
741            column_collection=None,
742            parentmapper=None, **kwargs):
743
744        if not context.query._enable_eagerloads:
745            return
746        elif context.query._yield_per:
747            context.query._no_yield_per("subquery")
748
749        path = path[self.parent_property]
750
751        # build up a path indicating the path from the leftmost
752        # entity to the thing we're subquery loading.
753        with_poly_info = path.get(
754            context.attributes,
755            "path_with_polymorphic", None)
756        if with_poly_info is not None:
757            effective_entity = with_poly_info.entity
758        else:
759            effective_entity = self.mapper
760
761        subq_path = context.attributes.get(
762            ('subquery_path', None),
763            orm_util.PathRegistry.root)
764
765        subq_path = subq_path + path
766
767        # if not via query option, check for
768        # a cycle
769        if not path.contains(context.attributes, "loader"):
770            if self.join_depth:
771                if path.length / 2 > self.join_depth:
772                    return
773            elif subq_path.contains_mapper(self.mapper):
774                return
775
776        leftmost_mapper, leftmost_attr, leftmost_relationship = \
777            self._get_leftmost(subq_path)
778
779        orig_query = context.attributes.get(
780            ("orig_query", SubqueryLoader),
781            context.query)
782
783        # generate a new Query from the original, then
784        # produce a subquery from it.
785        left_alias = self._generate_from_original_query(
786            orig_query, leftmost_mapper,
787            leftmost_attr, leftmost_relationship,
788            entity.entity_zero
789        )
790
791        # generate another Query that will join the
792        # left alias to the target relationships.
793        # basically doing a longhand
794        # "from_self()".  (from_self() itself not quite industrial
795        # strength enough for all contingencies...but very close)
796        q = orig_query.session.query(effective_entity)
797        q._attributes = {
798            ("orig_query", SubqueryLoader): orig_query,
799            ('subquery_path', None): subq_path
800        }
801
802        q = q._set_enable_single_crit(False)
803        to_join, local_attr, parent_alias = \
804            self._prep_for_joins(left_alias, subq_path)
805        q = q.order_by(*local_attr)
806        q = q.add_columns(*local_attr)
807        q = self._apply_joins(
808            q, to_join, left_alias,
809            parent_alias, effective_entity)
810
811        q = self._setup_options(q, subq_path, orig_query, effective_entity)
812        q = self._setup_outermost_orderby(q)
813
814        # add new query to attributes to be picked up
815        # by create_row_processor
816        path.set(context.attributes, "subquery", q)
817
818    def _get_leftmost(self, subq_path):
819        subq_path = subq_path.path
820        subq_mapper = orm_util._class_to_mapper(subq_path[0])
821
822        # determine attributes of the leftmost mapper
823        if self.parent.isa(subq_mapper) and \
824                self.parent_property is subq_path[1]:
825            leftmost_mapper, leftmost_prop = \
826                self.parent, self.parent_property
827        else:
828            leftmost_mapper, leftmost_prop = \
829                subq_mapper, \
830                subq_path[1]
831
832        leftmost_cols = leftmost_prop.local_columns
833
834        leftmost_attr = [
835            getattr(
836                subq_path[0].entity,
837                leftmost_mapper._columntoproperty[c].key)
838            for c in leftmost_cols
839        ]
840
841        return leftmost_mapper, leftmost_attr, leftmost_prop
842
843    def _generate_from_original_query(
844        self,
845        orig_query, leftmost_mapper,
846        leftmost_attr, leftmost_relationship, orig_entity
847    ):
848        # reformat the original query
849        # to look only for significant columns
850        q = orig_query._clone().correlate(None)
851
852        # set the query's "FROM" list explicitly to what the
853        # FROM list would be in any case, as we will be limiting
854        # the columns in the SELECT list which may no longer include
855        # all entities mentioned in things like WHERE, JOIN, etc.
856        if not q._from_obj:
857            q._set_select_from(
858                list(set([
859                    ent['entity'] for ent in orig_query.column_descriptions
860                    if ent['entity'] is not None
861                ])),
862                False
863            )
864
865        # select from the identity columns of the outer (specifically, these
866        # are the 'local_cols' of the property).  This will remove
867        # other columns from the query that might suggest the right entity
868        # which is why we do _set_select_from above.
869        target_cols = q._adapt_col_list(leftmost_attr)
870        q._set_entities(target_cols)
871
872        distinct_target_key = leftmost_relationship.distinct_target_key
873
874        if distinct_target_key is True:
875            q._distinct = True
876        elif distinct_target_key is None:
877            # if target_cols refer to a non-primary key or only
878            # part of a composite primary key, set the q as distinct
879            for t in set(c.table for c in target_cols):
880                if not set(target_cols).issuperset(t.primary_key):
881                    q._distinct = True
882                    break
883
884        if q._order_by is False:
885            q._order_by = leftmost_mapper.order_by
886
887        # don't need ORDER BY if no limit/offset
888        if q._limit is None and q._offset is None:
889            q._order_by = None
890
891        # the original query now becomes a subquery
892        # which we'll join onto.
893
894        embed_q = q.with_labels().subquery()
895        left_alias = orm_util.AliasedClass(
896            leftmost_mapper, embed_q,
897            use_mapper_path=True)
898        return left_alias
899
900    def _prep_for_joins(self, left_alias, subq_path):
901        # figure out what's being joined.  a.k.a. the fun part
902        to_join = []
903        pairs = list(subq_path.pairs())
904
905        for i, (mapper, prop) in enumerate(pairs):
906            if i > 0:
907                # look at the previous mapper in the chain -
908                # if it is as or more specific than this prop's
909                # mapper, use that instead.
910                # note we have an assumption here that
911                # the non-first element is always going to be a mapper,
912                # not an AliasedClass
913
914                prev_mapper = pairs[i - 1][1].mapper
915                to_append = prev_mapper if prev_mapper.isa(mapper) else mapper
916            else:
917                to_append = mapper
918
919            to_join.append((to_append, prop.key))
920
921        # determine the immediate parent class we are joining from,
922        # which needs to be aliased.
923
924        if len(to_join) < 2:
925            # in the case of a one level eager load, this is the
926            # leftmost "left_alias".
927            parent_alias = left_alias
928        else:
929            info = inspect(to_join[-1][0])
930            if info.is_aliased_class:
931                parent_alias = info.entity
932            else:
933                # alias a plain mapper as we may be
934                # joining multiple times
935                parent_alias = orm_util.AliasedClass(
936                    info.entity,
937                    use_mapper_path=True)
938
939        local_cols = self.parent_property.local_columns
940
941        local_attr = [
942            getattr(parent_alias, self.parent._columntoproperty[c].key)
943            for c in local_cols
944        ]
945        return to_join, local_attr, parent_alias
946
947    def _apply_joins(
948            self, q, to_join, left_alias, parent_alias,
949            effective_entity):
950
951        ltj = len(to_join)
952        if ltj == 1:
953            to_join = [
954                getattr(left_alias, to_join[0][1]).of_type(effective_entity)
955            ]
956        elif ltj == 2:
957            to_join = [
958                getattr(left_alias, to_join[0][1]).of_type(parent_alias),
959                getattr(parent_alias, to_join[-1][1]).of_type(effective_entity)
960            ]
961        elif ltj > 2:
962            middle = [
963                (
964                    orm_util.AliasedClass(item[0])
965                    if not inspect(item[0]).is_aliased_class
966                    else item[0].entity,
967                    item[1]
968                ) for item in to_join[1:-1]
969            ]
970            inner = []
971
972            while middle:
973                item = middle.pop(0)
974                attr = getattr(item[0], item[1])
975                if middle:
976                    attr = attr.of_type(middle[0][0])
977                else:
978                    attr = attr.of_type(parent_alias)
979
980                inner.append(attr)
981
982            to_join = [
983                getattr(left_alias, to_join[0][1]).of_type(inner[0].parent)
984            ] + inner + [
985                getattr(parent_alias, to_join[-1][1]).of_type(effective_entity)
986            ]
987
988        for attr in to_join:
989            q = q.join(attr, from_joinpoint=True)
990        return q
991
992    def _setup_options(self, q, subq_path, orig_query, effective_entity):
993        # propagate loader options etc. to the new query.
994        # these will fire relative to subq_path.
995        q = q._with_current_path(subq_path)
996        q = q._conditional_options(*orig_query._with_options)
997        if orig_query._populate_existing:
998            q._populate_existing = orig_query._populate_existing
999
1000        return q
1001
1002    def _setup_outermost_orderby(self, q):
1003        if self.parent_property.order_by:
1004            # if there's an ORDER BY, alias it the same
1005            # way joinedloader does, but we have to pull out
1006            # the "eagerjoin" from the query.
1007            # this really only picks up the "secondary" table
1008            # right now.
1009            eagerjoin = q._from_obj[0]
1010            eager_order_by = \
1011                eagerjoin._target_adapter.\
1012                copy_and_process(
1013                    util.to_list(
1014                        self.parent_property.order_by
1015                    )
1016                )
1017            q = q.order_by(*eager_order_by)
1018        return q
1019
1020    class _SubqCollections(object):
1021        """Given a :class:`.Query` used to emit the "subquery load",
1022        provide a load interface that executes the query at the
1023        first moment a value is needed.
1024
1025        """
1026        _data = None
1027
1028        def __init__(self, subq):
1029            self.subq = subq
1030
1031        def get(self, key, default):
1032            if self._data is None:
1033                self._load()
1034            return self._data.get(key, default)
1035
1036        def _load(self):
1037            self._data = dict(
1038                (k, [vv[0] for vv in v])
1039                for k, v in itertools.groupby(
1040                    self.subq,
1041                    lambda x: x[1:]
1042                )
1043            )
1044
1045        def loader(self, state, dict_, row):
1046            if self._data is None:
1047                self._load()
1048
1049    def create_row_processor(
1050            self, context, path, loadopt,
1051            mapper, result, adapter, populators):
1052        if not self.parent.class_manager[self.key].impl.supports_population:
1053            raise sa_exc.InvalidRequestError(
1054                "'%s' does not support object "
1055                "population - eager loading cannot be applied." %
1056                self)
1057
1058        path = path[self.parent_property]
1059
1060        subq = path.get(context.attributes, 'subquery')
1061
1062        if subq is None:
1063            return
1064
1065        assert subq.session is context.session, (
1066            "Subquery session doesn't refer to that of "
1067            "our context.  Are there broken context caching "
1068            "schemes being used?"
1069        )
1070
1071        local_cols = self.parent_property.local_columns
1072
1073        # cache the loaded collections in the context
1074        # so that inheriting mappers don't re-load when they
1075        # call upon create_row_processor again
1076        collections = path.get(context.attributes, "collections")
1077        if collections is None:
1078            collections = self._SubqCollections(subq)
1079            path.set(context.attributes, 'collections', collections)
1080
1081        if adapter:
1082            local_cols = [adapter.columns[c] for c in local_cols]
1083
1084        if self.uselist:
1085            self._create_collection_loader(
1086                context, collections, local_cols, populators)
1087        else:
1088            self._create_scalar_loader(
1089                context, collections, local_cols, populators)
1090
1091    def _create_collection_loader(
1092            self, context, collections, local_cols, populators):
1093        def load_collection_from_subq(state, dict_, row):
1094            collection = collections.get(
1095                tuple([row[col] for col in local_cols]),
1096                ()
1097            )
1098            state.get_impl(self.key).\
1099                set_committed_value(state, dict_, collection)
1100
1101        def load_collection_from_subq_existing_row(state, dict_, row):
1102            if self.key not in dict_:
1103                load_collection_from_subq(state, dict_, row)
1104
1105        populators["new"].append(
1106            (self.key, load_collection_from_subq))
1107        populators["existing"].append(
1108            (self.key, load_collection_from_subq_existing_row))
1109
1110        if context.invoke_all_eagers:
1111            populators["eager"].append((self.key, collections.loader))
1112
1113    def _create_scalar_loader(
1114            self, context, collections, local_cols, populators):
1115        def load_scalar_from_subq(state, dict_, row):
1116            collection = collections.get(
1117                tuple([row[col] for col in local_cols]),
1118                (None,)
1119            )
1120            if len(collection) > 1:
1121                util.warn(
1122                    "Multiple rows returned with "
1123                    "uselist=False for eagerly-loaded attribute '%s' "
1124                    % self)
1125
1126            scalar = collection[0]
1127            state.get_impl(self.key).\
1128                set_committed_value(state, dict_, scalar)
1129
1130        def load_scalar_from_subq_existing_row(state, dict_, row):
1131            if self.key not in dict_:
1132                load_scalar_from_subq(state, dict_, row)
1133
1134        populators["new"].append(
1135            (self.key, load_scalar_from_subq))
1136        populators["existing"].append(
1137            (self.key, load_scalar_from_subq_existing_row))
1138        if context.invoke_all_eagers:
1139            populators["eager"].append((self.key, collections.loader))
1140
1141
1142@log.class_logger
1143@properties.RelationshipProperty.strategy_for(lazy="joined")
1144@properties.RelationshipProperty.strategy_for(lazy=False)
1145class JoinedLoader(AbstractRelationshipLoader):
1146    """Provide loading behavior for a :class:`.RelationshipProperty`
1147    using joined eager loading.
1148
1149    """
1150
1151    __slots__ = 'join_depth', '_aliased_class_pool'
1152
1153    def __init__(self, parent, strategy_key):
1154        super(JoinedLoader, self).__init__(parent, strategy_key)
1155        self.join_depth = self.parent_property.join_depth
1156        self._aliased_class_pool = []
1157
1158    def init_class_attribute(self, mapper):
1159        self.parent_property.\
1160            _get_strategy((("lazy", "select"),)).init_class_attribute(mapper)
1161
1162    def setup_query(
1163            self, context, entity, path, loadopt, adapter,
1164            column_collection=None, parentmapper=None,
1165            chained_from_outerjoin=False,
1166            **kwargs):
1167        """Add a left outer join to the statement that's being constructed."""
1168
1169        if not context.query._enable_eagerloads:
1170            return
1171        elif context.query._yield_per and self.uselist:
1172            context.query._no_yield_per("joined collection")
1173
1174        path = path[self.parent_property]
1175
1176        with_polymorphic = None
1177
1178        user_defined_adapter = self._init_user_defined_eager_proc(
1179            loadopt, context) if loadopt else False
1180
1181        if user_defined_adapter is not False:
1182            clauses, adapter, add_to_collection = \
1183                self._setup_query_on_user_defined_adapter(
1184                    context, entity, path, adapter,
1185                    user_defined_adapter
1186                )
1187        else:
1188            # if not via query option, check for
1189            # a cycle
1190            if not path.contains(context.attributes, "loader"):
1191                if self.join_depth:
1192                    if path.length / 2 > self.join_depth:
1193                        return
1194                elif path.contains_mapper(self.mapper):
1195                    return
1196
1197            clauses, adapter, add_to_collection, chained_from_outerjoin = \
1198                self._generate_row_adapter(
1199                    context, entity, path, loadopt, adapter,
1200                    column_collection, parentmapper, chained_from_outerjoin
1201                )
1202
1203        with_poly_info = path.get(
1204            context.attributes,
1205            "path_with_polymorphic",
1206            None
1207        )
1208        if with_poly_info is not None:
1209            with_polymorphic = with_poly_info.with_polymorphic_mappers
1210        else:
1211            with_polymorphic = None
1212
1213        path = path[self.mapper]
1214
1215        loading._setup_entity_query(
1216            context, self.mapper, entity,
1217            path, clauses, add_to_collection,
1218            with_polymorphic=with_polymorphic,
1219            parentmapper=self.mapper,
1220            chained_from_outerjoin=chained_from_outerjoin)
1221
1222        if with_poly_info is not None and \
1223                None in set(context.secondary_columns):
1224            raise sa_exc.InvalidRequestError(
1225                "Detected unaliased columns when generating joined "
1226                "load.  Make sure to use aliased=True or flat=True "
1227                "when using joined loading with with_polymorphic()."
1228            )
1229
1230    def _init_user_defined_eager_proc(self, loadopt, context):
1231
1232        # check if the opt applies at all
1233        if "eager_from_alias" not in loadopt.local_opts:
1234            # nope
1235            return False
1236
1237        path = loadopt.path.parent
1238
1239        # the option applies.  check if the "user_defined_eager_row_processor"
1240        # has been built up.
1241        adapter = path.get(
1242            context.attributes,
1243            "user_defined_eager_row_processor", False)
1244        if adapter is not False:
1245            # just return it
1246            return adapter
1247
1248        # otherwise figure it out.
1249        alias = loadopt.local_opts["eager_from_alias"]
1250
1251        root_mapper, prop = path[-2:]
1252
1253        #from .mapper import Mapper
1254        #from .interfaces import MapperProperty
1255        #assert isinstance(root_mapper, Mapper)
1256        #assert isinstance(prop, MapperProperty)
1257
1258        if alias is not None:
1259            if isinstance(alias, str):
1260                alias = prop.target.alias(alias)
1261            adapter = sql_util.ColumnAdapter(
1262                alias,
1263                equivalents=prop.mapper._equivalent_columns)
1264        else:
1265            if path.contains(context.attributes, "path_with_polymorphic"):
1266                with_poly_info = path.get(
1267                    context.attributes,
1268                    "path_with_polymorphic")
1269                adapter = orm_util.ORMAdapter(
1270                    with_poly_info.entity,
1271                    equivalents=prop.mapper._equivalent_columns)
1272            else:
1273                adapter = context.query._polymorphic_adapters.get(
1274                    prop.mapper, None)
1275        path.set(
1276            context.attributes,
1277            "user_defined_eager_row_processor",
1278            adapter)
1279
1280        return adapter
1281
1282    def _setup_query_on_user_defined_adapter(
1283            self, context, entity,
1284            path, adapter, user_defined_adapter):
1285
1286        # apply some more wrapping to the "user defined adapter"
1287        # if we are setting up the query for SQL render.
1288        adapter = entity._get_entity_clauses(context.query, context)
1289
1290        if adapter and user_defined_adapter:
1291            user_defined_adapter = user_defined_adapter.wrap(adapter)
1292            path.set(
1293                context.attributes, "user_defined_eager_row_processor",
1294                user_defined_adapter)
1295        elif adapter:
1296            user_defined_adapter = adapter
1297            path.set(
1298                context.attributes, "user_defined_eager_row_processor",
1299                user_defined_adapter)
1300
1301        add_to_collection = context.primary_columns
1302        return user_defined_adapter, adapter, add_to_collection
1303
1304    def _gen_pooled_aliased_class(self, context):
1305        # keep a local pool of AliasedClass objects that get re-used.
1306        # we need one unique AliasedClass per query per appearance of our
1307        # entity in the query.
1308
1309        key = ('joinedloader_ac', self)
1310        if key not in context.attributes:
1311            context.attributes[key] = idx = 0
1312        else:
1313            context.attributes[key] = idx = context.attributes[key] + 1
1314
1315        if idx >= len(self._aliased_class_pool):
1316            to_adapt = orm_util.AliasedClass(
1317                self.mapper,
1318                flat=True,
1319                use_mapper_path=True)
1320            # load up the .columns collection on the Alias() before
1321            # the object becomes shared among threads.  this prevents
1322            # races for column identities.
1323            inspect(to_adapt).selectable.c
1324
1325            self._aliased_class_pool.append(to_adapt)
1326
1327        return self._aliased_class_pool[idx]
1328
1329    def _generate_row_adapter(
1330            self,
1331            context, entity, path, loadopt, adapter,
1332            column_collection, parentmapper, chained_from_outerjoin):
1333        with_poly_info = path.get(
1334            context.attributes,
1335            "path_with_polymorphic",
1336            None
1337        )
1338        if with_poly_info:
1339            to_adapt = with_poly_info.entity
1340        else:
1341            to_adapt = self._gen_pooled_aliased_class(context)
1342
1343        clauses = inspect(to_adapt)._memo(
1344            ("joinedloader_ormadapter", self),
1345            orm_util.ORMAdapter,
1346            to_adapt,
1347            equivalents=self.mapper._equivalent_columns,
1348            adapt_required=True, allow_label_resolve=False,
1349            anonymize_labels=True
1350        )
1351
1352        assert clauses.aliased_class is not None
1353
1354        if self.parent_property.uselist:
1355            context.multi_row_eager_loaders = True
1356
1357        innerjoin = (
1358            loadopt.local_opts.get(
1359                'innerjoin', self.parent_property.innerjoin)
1360            if loadopt is not None
1361            else self.parent_property.innerjoin
1362        )
1363
1364        if not innerjoin:
1365            # if this is an outer join, all non-nested eager joins from
1366            # this path must also be outer joins
1367            chained_from_outerjoin = True
1368
1369        context.create_eager_joins.append(
1370            (
1371                self._create_eager_join, context,
1372                entity, path, adapter,
1373                parentmapper, clauses, innerjoin, chained_from_outerjoin
1374            )
1375        )
1376
1377        add_to_collection = context.secondary_columns
1378        path.set(context.attributes, "eager_row_processor", clauses)
1379
1380        return clauses, adapter, add_to_collection, chained_from_outerjoin
1381
1382    def _create_eager_join(
1383            self, context, entity,
1384            path, adapter, parentmapper,
1385            clauses, innerjoin, chained_from_outerjoin):
1386
1387        if parentmapper is None:
1388            localparent = entity.mapper
1389        else:
1390            localparent = parentmapper
1391
1392        # whether or not the Query will wrap the selectable in a subquery,
1393        # and then attach eager load joins to that (i.e., in the case of
1394        # LIMIT/OFFSET etc.)
1395        should_nest_selectable = context.multi_row_eager_loaders and \
1396            context.query._should_nest_selectable
1397
1398        entity_key = None
1399
1400        if entity not in context.eager_joins and \
1401            not should_nest_selectable and \
1402                context.from_clause:
1403            index, clause = sql_util.find_join_source(
1404                context.from_clause, entity.selectable)
1405            if clause is not None:
1406                # join to an existing FROM clause on the query.
1407                # key it to its list index in the eager_joins dict.
1408                # Query._compile_context will adapt as needed and
1409                # append to the FROM clause of the select().
1410                entity_key, default_towrap = index, clause
1411
1412        if entity_key is None:
1413            entity_key, default_towrap = entity, entity.selectable
1414
1415        towrap = context.eager_joins.setdefault(entity_key, default_towrap)
1416
1417        if adapter:
1418            if getattr(adapter, 'aliased_class', None):
1419                # joining from an adapted entity.  The adapted entity
1420                # might be a "with_polymorphic", so resolve that to our
1421                # specific mapper's entity before looking for our attribute
1422                # name on it.
1423                efm = inspect(adapter.aliased_class).\
1424                    _entity_for_mapper(
1425                        localparent
1426                        if localparent.isa(self.parent) else self.parent)
1427
1428                # look for our attribute on the adapted entity, else fall back
1429                # to our straight property
1430                onclause = getattr(
1431                    efm.entity, self.key,
1432                    self.parent_property)
1433            else:
1434                onclause = getattr(
1435                    orm_util.AliasedClass(
1436                        self.parent,
1437                        adapter.selectable,
1438                        use_mapper_path=True
1439                    ),
1440                    self.key, self.parent_property
1441                )
1442
1443        else:
1444            onclause = self.parent_property
1445
1446        assert clauses.aliased_class is not None
1447
1448        attach_on_outside = (
1449            not chained_from_outerjoin or
1450            not innerjoin or innerjoin == 'unnested')
1451
1452        if attach_on_outside:
1453            # this is the "classic" eager join case.
1454            eagerjoin = orm_util._ORMJoin(
1455                towrap,
1456                clauses.aliased_class,
1457                onclause,
1458                isouter=not innerjoin or (
1459                    chained_from_outerjoin and isinstance(towrap, sql.Join)
1460                ), _left_memo=self.parent, _right_memo=self.mapper
1461            )
1462        else:
1463            # all other cases are innerjoin=='nested' approach
1464            eagerjoin = self._splice_nested_inner_join(
1465                path, towrap, clauses, onclause)
1466
1467        context.eager_joins[entity_key] = eagerjoin
1468
1469        # send a hint to the Query as to where it may "splice" this join
1470        eagerjoin.stop_on = entity.selectable
1471
1472        if not parentmapper:
1473            # for parentclause that is the non-eager end of the join,
1474            # ensure all the parent cols in the primaryjoin are actually
1475            # in the
1476            # columns clause (i.e. are not deferred), so that aliasing applied
1477            # by the Query propagates those columns outward.
1478            # This has the effect
1479            # of "undefering" those columns.
1480            for col in sql_util._find_columns(
1481                    self.parent_property.primaryjoin):
1482                if localparent.mapped_table.c.contains_column(col):
1483                    if adapter:
1484                        col = adapter.columns[col]
1485                    context.primary_columns.append(col)
1486
1487        if self.parent_property.order_by:
1488            context.eager_order_by += eagerjoin._target_adapter.\
1489                copy_and_process(
1490                    util.to_list(
1491                        self.parent_property.order_by
1492                    )
1493                )
1494
1495    def _splice_nested_inner_join(
1496            self, path, join_obj, clauses, onclause, splicing=False):
1497
1498        if splicing is False:
1499            # first call is always handed a join object
1500            # from the outside
1501            assert isinstance(join_obj, orm_util._ORMJoin)
1502        elif isinstance(join_obj, sql.selectable.FromGrouping):
1503            return self._splice_nested_inner_join(
1504                path, join_obj.element, clauses, onclause, splicing
1505            )
1506        elif not isinstance(join_obj, orm_util._ORMJoin):
1507            if path[-2] is splicing:
1508                return orm_util._ORMJoin(
1509                    join_obj, clauses.aliased_class,
1510                    onclause, isouter=False,
1511                    _left_memo=splicing,
1512                    _right_memo=path[-1].mapper
1513                )
1514            else:
1515                # only here if splicing == True
1516                return None
1517
1518        target_join = self._splice_nested_inner_join(
1519            path, join_obj.right, clauses,
1520            onclause, join_obj._right_memo)
1521        if target_join is None:
1522            right_splice = False
1523            target_join = self._splice_nested_inner_join(
1524                path, join_obj.left, clauses,
1525                onclause, join_obj._left_memo)
1526            if target_join is None:
1527                # should only return None when recursively called,
1528                # e.g. splicing==True
1529                assert splicing is not False, \
1530                    "assertion failed attempting to produce joined eager loads"
1531                return None
1532        else:
1533            right_splice = True
1534
1535        if right_splice:
1536            # for a right splice, attempt to flatten out
1537            # a JOIN b JOIN c JOIN .. to avoid needless
1538            # parenthesis nesting
1539            if not join_obj.isouter and not target_join.isouter:
1540                eagerjoin = join_obj._splice_into_center(target_join)
1541            else:
1542                eagerjoin = orm_util._ORMJoin(
1543                    join_obj.left, target_join,
1544                    join_obj.onclause, isouter=join_obj.isouter,
1545                    _left_memo=join_obj._left_memo)
1546        else:
1547            eagerjoin = orm_util._ORMJoin(
1548                target_join, join_obj.right,
1549                join_obj.onclause, isouter=join_obj.isouter,
1550                _right_memo=join_obj._right_memo)
1551
1552        eagerjoin._target_adapter = target_join._target_adapter
1553        return eagerjoin
1554
1555    def _create_eager_adapter(self, context, result, adapter, path, loadopt):
1556        user_defined_adapter = self._init_user_defined_eager_proc(
1557            loadopt, context) if loadopt else False
1558
1559        if user_defined_adapter is not False:
1560            decorator = user_defined_adapter
1561            # user defined eagerloads are part of the "primary"
1562            # portion of the load.
1563            # the adapters applied to the Query should be honored.
1564            if context.adapter and decorator:
1565                decorator = decorator.wrap(context.adapter)
1566            elif context.adapter:
1567                decorator = context.adapter
1568        else:
1569            decorator = path.get(context.attributes, "eager_row_processor")
1570            if decorator is None:
1571                return False
1572
1573        if self.mapper._result_has_identity_key(result, decorator):
1574            return decorator
1575        else:
1576            # no identity key - don't return a row
1577            # processor, will cause a degrade to lazy
1578            return False
1579
1580    def create_row_processor(
1581            self, context, path, loadopt, mapper,
1582            result, adapter, populators):
1583        if not self.parent.class_manager[self.key].impl.supports_population:
1584            raise sa_exc.InvalidRequestError(
1585                "'%s' does not support object "
1586                "population - eager loading cannot be applied." %
1587                self
1588            )
1589
1590        our_path = path[self.parent_property]
1591
1592        eager_adapter = self._create_eager_adapter(
1593            context,
1594            result,
1595            adapter, our_path, loadopt)
1596
1597        if eager_adapter is not False:
1598            key = self.key
1599
1600            _instance = loading._instance_processor(
1601                self.mapper,
1602                context,
1603                result,
1604                our_path[self.mapper],
1605                eager_adapter)
1606
1607            if not self.uselist:
1608                self._create_scalar_loader(context, key, _instance, populators)
1609            else:
1610                self._create_collection_loader(
1611                    context, key, _instance, populators)
1612        else:
1613            self.parent_property._get_strategy((("lazy", "select"),)).\
1614                create_row_processor(
1615                    context, path, loadopt,
1616                    mapper, result, adapter, populators)
1617
1618    def _create_collection_loader(self, context, key, _instance, populators):
1619        def load_collection_from_joined_new_row(state, dict_, row):
1620            collection = attributes.init_state_collection(
1621                state, dict_, key)
1622            result_list = util.UniqueAppender(collection,
1623                                              'append_without_event')
1624            context.attributes[(state, key)] = result_list
1625            inst = _instance(row)
1626            if inst is not None:
1627                result_list.append(inst)
1628
1629        def load_collection_from_joined_existing_row(state, dict_, row):
1630            if (state, key) in context.attributes:
1631                result_list = context.attributes[(state, key)]
1632            else:
1633                # appender_key can be absent from context.attributes
1634                # with isnew=False when self-referential eager loading
1635                # is used; the same instance may be present in two
1636                # distinct sets of result columns
1637                collection = attributes.init_state_collection(
1638                    state, dict_, key)
1639                result_list = util.UniqueAppender(
1640                    collection,
1641                    'append_without_event')
1642                context.attributes[(state, key)] = result_list
1643            inst = _instance(row)
1644            if inst is not None:
1645                result_list.append(inst)
1646
1647        def load_collection_from_joined_exec(state, dict_, row):
1648            _instance(row)
1649
1650        populators["new"].append((self.key, load_collection_from_joined_new_row))
1651        populators["existing"].append(
1652            (self.key, load_collection_from_joined_existing_row))
1653        if context.invoke_all_eagers:
1654            populators["eager"].append(
1655                (self.key, load_collection_from_joined_exec))
1656
1657    def _create_scalar_loader(self, context, key, _instance, populators):
1658        def load_scalar_from_joined_new_row(state, dict_, row):
1659            # set a scalar object instance directly on the parent
1660            # object, bypassing InstrumentedAttribute event handlers.
1661            dict_[key] = _instance(row)
1662
1663        def load_scalar_from_joined_existing_row(state, dict_, row):
1664            # call _instance on the row, even though the object has
1665            # been created, so that we further descend into properties
1666            existing = _instance(row)
1667
1668            # conflicting value already loaded, this shouldn't happen
1669            if key in dict_:
1670                if existing is not dict_[key]:
1671                    util.warn(
1672                        "Multiple rows returned with "
1673                        "uselist=False for eagerly-loaded attribute '%s' "
1674                        % self)
1675            else:
1676                # this case is when one row has multiple loads of the
1677                # same entity (e.g. via aliasing), one has an attribute
1678                # that the other doesn't.
1679                dict_[key] = existing
1680
1681        def load_scalar_from_joined_exec(state, dict_, row):
1682            _instance(row)
1683
1684        populators["new"].append((self.key, load_scalar_from_joined_new_row))
1685        populators["existing"].append(
1686            (self.key, load_scalar_from_joined_existing_row))
1687        if context.invoke_all_eagers:
1688            populators["eager"].append((self.key, load_scalar_from_joined_exec))
1689
1690
1691def single_parent_validator(desc, prop):
1692    def _do_check(state, value, oldvalue, initiator):
1693        if value is not None and initiator.key == prop.key:
1694            hasparent = initiator.hasparent(attributes.instance_state(value))
1695            if hasparent and oldvalue is not value:
1696                raise sa_exc.InvalidRequestError(
1697                    "Instance %s is already associated with an instance "
1698                    "of %s via its %s attribute, and is only allowed a "
1699                    "single parent." %
1700                    (orm_util.instance_str(value), state.class_, prop)
1701                )
1702        return value
1703
1704    def append(state, value, initiator):
1705        return _do_check(state, value, None, initiator)
1706
1707    def set_(state, value, oldvalue, initiator):
1708        return _do_check(state, value, oldvalue, initiator)
1709
1710    event.listen(
1711        desc, 'append', append, raw=True, retval=True,
1712        active_history=True)
1713    event.listen(
1714        desc, 'set', set_, raw=True, retval=True,
1715        active_history=True)
1716