first add files

This commit is contained in:
2023-10-08 20:59:00 +08:00
parent b494be364b
commit 1dac226337
991 changed files with 368151 additions and 40 deletions

View File

@@ -0,0 +1,344 @@
# orm/__init__.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""
Functional constructs for ORM configuration.
See the SQLAlchemy object relational tutorial and mapper configuration
documentation for an overview of how this module is used.
"""
from . import exc
from . import mapper as mapperlib
from . import strategy_options
from .attributes import AttributeEvent
from .attributes import InstrumentedAttribute
from .attributes import Mapped
from .attributes import QueryableAttribute
from .context import QueryContext
from .decl_api import as_declarative
from .decl_api import declarative_base
from .decl_api import declarative_mixin
from .decl_api import DeclarativeMeta
from .decl_api import declared_attr
from .decl_api import has_inherited_table
from .decl_api import registry
from .decl_api import synonym_for
from .descriptor_props import CompositeProperty
from .descriptor_props import SynonymProperty
from .identity import IdentityMap
from .instrumentation import ClassManager
from .interfaces import EXT_CONTINUE
from .interfaces import EXT_SKIP
from .interfaces import EXT_STOP
from .interfaces import InspectionAttr
from .interfaces import InspectionAttrInfo
from .interfaces import MANYTOMANY
from .interfaces import MANYTOONE
from .interfaces import MapperProperty
from .interfaces import NOT_EXTENSION
from .interfaces import ONETOMANY
from .interfaces import PropComparator
from .interfaces import UserDefinedOption
from .loading import merge_frozen_result
from .loading import merge_result
from .mapper import class_mapper
from .mapper import configure_mappers
from .mapper import Mapper
from .mapper import reconstructor
from .mapper import validates
from .properties import ColumnProperty
from .query import AliasOption
from .query import FromStatement
from .query import Query
from .relationships import foreign
from .relationships import RelationshipProperty
from .relationships import remote
from .scoping import scoped_session
from .session import close_all_sessions
from .session import make_transient
from .session import make_transient_to_detached
from .session import object_session
from .session import ORMExecuteState
from .session import Session
from .session import sessionmaker
from .session import SessionTransaction
from .state import AttributeState
from .state import InstanceState
from .strategy_options import Load
from .unitofwork import UOWTransaction
from .util import aliased
from .util import Bundle
from .util import CascadeOptions
from .util import join
from .util import LoaderCriteriaOption
from .util import object_mapper
from .util import outerjoin
from .util import polymorphic_union
from .util import was_deleted
from .util import with_parent
from .util import with_polymorphic
from .. import sql as _sql
from .. import util as _sa_util
from ..util.langhelpers import public_factory
def create_session(bind=None, **kwargs):
r"""Create a new :class:`.Session`
with no automation enabled by default.
This function is used primarily for testing. The usual
route to :class:`.Session` creation is via its constructor
or the :func:`.sessionmaker` function.
:param bind: optional, a single Connectable to use for all
database access in the created
:class:`~sqlalchemy.orm.session.Session`.
:param \*\*kwargs: optional, passed through to the
:class:`.Session` constructor.
:returns: an :class:`~sqlalchemy.orm.session.Session` instance
The defaults of create_session() are the opposite of that of
:func:`sessionmaker`; ``autoflush`` and ``expire_on_commit`` are
False, ``autocommit`` is True. In this sense the session acts
more like the "classic" SQLAlchemy 0.3 session with these.
.. deprecated:: 1.4 The "autocommit" parameter will be removed in
SQLAlchemy 2.0. :func:`_orm.create_session` will return a
:class:`_orm.Session` that does not include "autocommit' behavior
in release 2.0.
Usage::
>>> from sqlalchemy.orm import create_session
>>> session = create_session()
It is recommended to use :func:`sessionmaker` instead of
create_session().
"""
if kwargs.get("future", False):
kwargs.setdefault("autocommit", False)
else:
kwargs.setdefault("autocommit", True)
kwargs.setdefault("autoflush", False)
kwargs.setdefault("expire_on_commit", False)
return Session(bind=bind, **kwargs)
with_loader_criteria = public_factory(LoaderCriteriaOption, ".orm")
relationship = public_factory(RelationshipProperty, ".orm.relationship")
@_sa_util.deprecated_20("relation", "Please use :func:`.relationship`.")
def relation(*arg, **kw):
"""A synonym for :func:`relationship`."""
return relationship(*arg, **kw)
def dynamic_loader(argument, **kw):
"""Construct a dynamically-loading mapper property.
This is essentially the same as
using the ``lazy='dynamic'`` argument with :func:`relationship`::
dynamic_loader(SomeClass)
# is the same as
relationship(SomeClass, lazy="dynamic")
See the section :ref:`dynamic_relationship` for more details
on dynamic loading.
"""
kw["lazy"] = "dynamic"
return relationship(argument, **kw)
column_property = public_factory(ColumnProperty, ".orm.column_property")
composite = public_factory(CompositeProperty, ".orm.composite")
def backref(name, **kwargs):
"""When using the :paramref:`_orm.relationship.backref` parameter,
provides specific parameters to be used when the new
:func:`_orm.relationship` is generated.
E.g.::
'items':relationship(
SomeItem, backref=backref('parent', lazy='subquery'))
The :paramref:`_orm.relationship.backref` parameter is generally
considered to be legacy; for modern applications, using
explicit :func:`_orm.relationship` constructs linked together using
the :paramref:`_orm.relationship.back_populates` parameter should be
preferred.
.. seealso::
:ref:`relationships_backref` - background on backrefs
"""
return (name, kwargs)
def deferred(*columns, **kw):
r"""Indicate a column-based mapped attribute that by default will
not load unless accessed.
:param \*columns: columns to be mapped. This is typically a single
:class:`_schema.Column` object,
however a collection is supported in order
to support multiple columns mapped under the same attribute.
:param raiseload: boolean, if True, indicates an exception should be raised
if the load operation is to take place.
.. versionadded:: 1.4
.. seealso::
:ref:`deferred_raiseload`
:param \**kw: additional keyword arguments passed to
:class:`.ColumnProperty`.
.. seealso::
:ref:`deferred`
"""
return ColumnProperty(deferred=True, *columns, **kw)
def query_expression(default_expr=_sql.null()):
"""Indicate an attribute that populates from a query-time SQL expression.
:param default_expr: Optional SQL expression object that will be used in
all cases if not assigned later with :func:`_orm.with_expression`.
E.g.::
from sqlalchemy.sql import literal
class C(Base):
#...
my_expr = query_expression(literal(1))
.. versionadded:: 1.3.18
.. versionadded:: 1.2
.. seealso::
:ref:`mapper_querytime_expression`
"""
prop = ColumnProperty(default_expr)
prop.strategy_key = (("query_expression", True),)
return prop
mapper = public_factory(Mapper, ".orm.mapper")
synonym = public_factory(SynonymProperty, ".orm.synonym")
def clear_mappers():
"""Remove all mappers from all classes.
.. versionchanged:: 1.4 This function now locates all
:class:`_orm.registry` objects and calls upon the
:meth:`_orm.registry.dispose` method of each.
This function removes all instrumentation from classes and disposes
of their associated mappers. Once called, the classes are unmapped
and can be later re-mapped with new mappers.
:func:`.clear_mappers` is *not* for normal use, as there is literally no
valid usage for it outside of very specific testing scenarios. Normally,
mappers are permanent structural components of user-defined classes, and
are never discarded independently of their class. If a mapped class
itself is garbage collected, its mapper is automatically disposed of as
well. As such, :func:`.clear_mappers` is only for usage in test suites
that re-use the same classes with different mappings, which is itself an
extremely rare use case - the only such use case is in fact SQLAlchemy's
own test suite, and possibly the test suites of other ORM extension
libraries which intend to test various combinations of mapper construction
upon a fixed set of classes.
"""
mapperlib._dispose_registries(mapperlib._all_registries(), False)
joinedload = strategy_options.joinedload._unbound_fn
contains_eager = strategy_options.contains_eager._unbound_fn
defer = strategy_options.defer._unbound_fn
undefer = strategy_options.undefer._unbound_fn
undefer_group = strategy_options.undefer_group._unbound_fn
with_expression = strategy_options.with_expression._unbound_fn
load_only = strategy_options.load_only._unbound_fn
lazyload = strategy_options.lazyload._unbound_fn
subqueryload = strategy_options.subqueryload._unbound_fn
selectinload = strategy_options.selectinload._unbound_fn
immediateload = strategy_options.immediateload._unbound_fn
noload = strategy_options.noload._unbound_fn
raiseload = strategy_options.raiseload._unbound_fn
defaultload = strategy_options.defaultload._unbound_fn
selectin_polymorphic = strategy_options.selectin_polymorphic._unbound_fn
@_sa_util.deprecated_20("eagerload", "Please use :func:`_orm.joinedload`.")
def eagerload(*args, **kwargs):
"""A synonym for :func:`joinedload()`."""
return joinedload(*args, **kwargs)
contains_alias = public_factory(AliasOption, ".orm.contains_alias")
if True:
from .events import AttributeEvents
from .events import MapperEvents
from .events import InstanceEvents
from .events import InstrumentationEvents
from .events import QueryEvents
from .events import SessionEvents
def __go(lcls):
global __all__
global AppenderQuery
from .. import util as sa_util
from . import dynamic
from . import events
from . import loading
import inspect as _inspect
from .dynamic import AppenderQuery
__all__ = sorted(
name
for name, obj in lcls.items()
if not (name.startswith("_") or _inspect.ismodule(obj))
)
_sa_util.preloaded.import_prefix("sqlalchemy.orm")
_sa_util.preloaded.import_prefix("sqlalchemy.ext")
__go(locals())

File diff suppressed because it is too large Load Diff

572
lib/sqlalchemy/orm/base.py Normal file
View File

@@ -0,0 +1,572 @@
# orm/base.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Constants and rudimental functions used throughout the ORM.
"""
import operator
from . import exc
from .. import exc as sa_exc
from .. import inspection
from .. import util
PASSIVE_NO_RESULT = util.symbol(
"PASSIVE_NO_RESULT",
"""Symbol returned by a loader callable or other attribute/history
retrieval operation when a value could not be determined, based
on loader callable flags.
""",
)
PASSIVE_CLASS_MISMATCH = util.symbol(
"PASSIVE_CLASS_MISMATCH",
"""Symbol indicating that an object is locally present for a given
primary key identity but it is not of the requested class. The
return value is therefore None and no SQL should be emitted.""",
)
ATTR_WAS_SET = util.symbol(
"ATTR_WAS_SET",
"""Symbol returned by a loader callable to indicate the
retrieved value, or values, were assigned to their attributes
on the target object.
""",
)
ATTR_EMPTY = util.symbol(
"ATTR_EMPTY",
"""Symbol used internally to indicate an attribute had no callable.""",
)
NO_VALUE = util.symbol(
"NO_VALUE",
"""Symbol which may be placed as the 'previous' value of an attribute,
indicating no value was loaded for an attribute when it was modified,
and flags indicated we were not to load it.
""",
)
NEVER_SET = NO_VALUE
"""
Synonymous with NO_VALUE
.. versionchanged:: 1.4 NEVER_SET was merged with NO_VALUE
"""
NO_CHANGE = util.symbol(
"NO_CHANGE",
"""No callables or SQL should be emitted on attribute access
and no state should change
""",
canonical=0,
)
CALLABLES_OK = util.symbol(
"CALLABLES_OK",
"""Loader callables can be fired off if a value
is not present.
""",
canonical=1,
)
SQL_OK = util.symbol(
"SQL_OK",
"""Loader callables can emit SQL at least on scalar value attributes.""",
canonical=2,
)
RELATED_OBJECT_OK = util.symbol(
"RELATED_OBJECT_OK",
"""Callables can use SQL to load related objects as well
as scalar value attributes.
""",
canonical=4,
)
INIT_OK = util.symbol(
"INIT_OK",
"""Attributes should be initialized with a blank
value (None or an empty collection) upon get, if no other
value can be obtained.
""",
canonical=8,
)
NON_PERSISTENT_OK = util.symbol(
"NON_PERSISTENT_OK",
"""Callables can be emitted if the parent is not persistent.""",
canonical=16,
)
LOAD_AGAINST_COMMITTED = util.symbol(
"LOAD_AGAINST_COMMITTED",
"""Callables should use committed values as primary/foreign keys during a
load.
""",
canonical=32,
)
NO_AUTOFLUSH = util.symbol(
"NO_AUTOFLUSH",
"""Loader callables should disable autoflush.""",
canonical=64,
)
NO_RAISE = util.symbol(
"NO_RAISE",
"""Loader callables should not raise any assertions""",
canonical=128,
)
DEFERRED_HISTORY_LOAD = util.symbol(
"DEFERRED_HISTORY_LOAD",
"""indicates special load of the previous value of an attribute""",
canonical=256,
)
# pre-packaged sets of flags used as inputs
PASSIVE_OFF = util.symbol(
"PASSIVE_OFF",
"Callables can be emitted in all cases.",
canonical=(
RELATED_OBJECT_OK | NON_PERSISTENT_OK | INIT_OK | CALLABLES_OK | SQL_OK
),
)
PASSIVE_RETURN_NO_VALUE = util.symbol(
"PASSIVE_RETURN_NO_VALUE",
"""PASSIVE_OFF ^ INIT_OK""",
canonical=PASSIVE_OFF ^ INIT_OK,
)
PASSIVE_NO_INITIALIZE = util.symbol(
"PASSIVE_NO_INITIALIZE",
"PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK",
canonical=PASSIVE_RETURN_NO_VALUE ^ CALLABLES_OK,
)
PASSIVE_NO_FETCH = util.symbol(
"PASSIVE_NO_FETCH", "PASSIVE_OFF ^ SQL_OK", canonical=PASSIVE_OFF ^ SQL_OK
)
PASSIVE_NO_FETCH_RELATED = util.symbol(
"PASSIVE_NO_FETCH_RELATED",
"PASSIVE_OFF ^ RELATED_OBJECT_OK",
canonical=PASSIVE_OFF ^ RELATED_OBJECT_OK,
)
PASSIVE_ONLY_PERSISTENT = util.symbol(
"PASSIVE_ONLY_PERSISTENT",
"PASSIVE_OFF ^ NON_PERSISTENT_OK",
canonical=PASSIVE_OFF ^ NON_PERSISTENT_OK,
)
DEFAULT_MANAGER_ATTR = "_sa_class_manager"
DEFAULT_STATE_ATTR = "_sa_instance_state"
EXT_CONTINUE = util.symbol("EXT_CONTINUE")
EXT_STOP = util.symbol("EXT_STOP")
EXT_SKIP = util.symbol("EXT_SKIP")
ONETOMANY = util.symbol(
"ONETOMANY",
"""Indicates the one-to-many direction for a :func:`_orm.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
""",
)
MANYTOONE = util.symbol(
"MANYTOONE",
"""Indicates the many-to-one direction for a :func:`_orm.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
""",
)
MANYTOMANY = util.symbol(
"MANYTOMANY",
"""Indicates the many-to-many direction for a :func:`_orm.relationship`.
This symbol is typically used by the internals but may be exposed within
certain API features.
""",
)
NOT_EXTENSION = util.symbol(
"NOT_EXTENSION",
"""Symbol indicating an :class:`InspectionAttr` that's
not part of sqlalchemy.ext.
Is assigned to the :attr:`.InspectionAttr.extension_type`
attribute.
""",
)
_never_set = frozenset([NEVER_SET])
_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT])
_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED")
_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE")
_RAISE_FOR_STATE = util.symbol("RAISE_FOR_STATE")
def _assertions(*assertions):
@util.decorator
def generate(fn, *args, **kw):
self = args[0]
for assertion in assertions:
assertion(self, fn.__name__)
fn(self, *args[1:], **kw)
return generate
# these can be replaced by sqlalchemy.ext.instrumentation
# if augmented class instrumentation is enabled.
def manager_of_class(cls):
return cls.__dict__.get(DEFAULT_MANAGER_ATTR, None)
instance_state = operator.attrgetter(DEFAULT_STATE_ATTR)
instance_dict = operator.attrgetter("__dict__")
def instance_str(instance):
"""Return a string describing an instance."""
return state_str(instance_state(instance))
def state_str(state):
"""Return a string describing an instance via its InstanceState."""
if state is None:
return "None"
else:
return "<%s at 0x%x>" % (state.class_.__name__, id(state.obj()))
def state_class_str(state):
"""Return a string describing an instance's class via its
InstanceState.
"""
if state is None:
return "None"
else:
return "<%s>" % (state.class_.__name__,)
def attribute_str(instance, attribute):
return instance_str(instance) + "." + attribute
def state_attribute_str(state, attribute):
return state_str(state) + "." + attribute
def object_mapper(instance):
"""Given an object, return the primary Mapper associated with the object
instance.
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
if no mapping is configured.
This function is available via the inspection system as::
inspect(instance).mapper
Using the inspection system will raise
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
not part of a mapping.
"""
return object_state(instance).mapper
def object_state(instance):
"""Given an object, return the :class:`.InstanceState`
associated with the object.
Raises :class:`sqlalchemy.orm.exc.UnmappedInstanceError`
if no mapping is configured.
Equivalent functionality is available via the :func:`_sa.inspect`
function as::
inspect(instance)
Using the inspection system will raise
:class:`sqlalchemy.exc.NoInspectionAvailable` if the instance is
not part of a mapping.
"""
state = _inspect_mapped_object(instance)
if state is None:
raise exc.UnmappedInstanceError(instance)
else:
return state
@inspection._inspects(object)
def _inspect_mapped_object(instance):
try:
return instance_state(instance)
except (exc.UnmappedClassError,) + exc.NO_STATE:
return None
def _class_to_mapper(class_or_mapper):
insp = inspection.inspect(class_or_mapper, False)
if insp is not None:
return insp.mapper
else:
raise exc.UnmappedClassError(class_or_mapper)
def _mapper_or_none(entity):
"""Return the :class:`_orm.Mapper` for the given class or None if the
class is not mapped.
"""
insp = inspection.inspect(entity, False)
if insp is not None:
return insp.mapper
else:
return None
def _is_mapped_class(entity):
"""Return True if the given object is a mapped class,
:class:`_orm.Mapper`, or :class:`.AliasedClass`.
"""
insp = inspection.inspect(entity, False)
return (
insp is not None
and not insp.is_clause_element
and (insp.is_mapper or insp.is_aliased_class)
)
def _orm_columns(entity):
insp = inspection.inspect(entity, False)
if hasattr(insp, "selectable") and hasattr(insp.selectable, "c"):
return [c for c in insp.selectable.c]
else:
return [entity]
def _is_aliased_class(entity):
insp = inspection.inspect(entity, False)
return insp is not None and getattr(insp, "is_aliased_class", False)
def _entity_descriptor(entity, key):
"""Return a class attribute given an entity and string name.
May return :class:`.InstrumentedAttribute` or user-defined
attribute.
"""
insp = inspection.inspect(entity)
if insp.is_selectable:
description = entity
entity = insp.c
elif insp.is_aliased_class:
entity = insp.entity
description = entity
elif hasattr(insp, "mapper"):
description = entity = insp.mapper.class_
else:
description = entity
try:
return getattr(entity, key)
except AttributeError as err:
util.raise_(
sa_exc.InvalidRequestError(
"Entity '%s' has no property '%s'" % (description, key)
),
replace_context=err,
)
_state_mapper = util.dottedgetter("manager.mapper")
@inspection._inspects(type)
def _inspect_mapped_class(class_, configure=False):
try:
class_manager = manager_of_class(class_)
if not class_manager.is_mapped:
return None
mapper = class_manager.mapper
except exc.NO_STATE:
return None
else:
if configure:
mapper._check_configure()
return mapper
def class_mapper(class_, configure=True):
"""Given a class, return the primary :class:`_orm.Mapper` associated
with the key.
Raises :exc:`.UnmappedClassError` if no mapping is configured
on the given class, or :exc:`.ArgumentError` if a non-class
object is passed.
Equivalent functionality is available via the :func:`_sa.inspect`
function as::
inspect(some_mapped_class)
Using the inspection system will raise
:class:`sqlalchemy.exc.NoInspectionAvailable` if the class is not mapped.
"""
mapper = _inspect_mapped_class(class_, configure=configure)
if mapper is None:
if not isinstance(class_, type):
raise sa_exc.ArgumentError(
"Class object expected, got '%r'." % (class_,)
)
raise exc.UnmappedClassError(class_)
else:
return mapper
class InspectionAttr(object):
"""A base class applied to all ORM objects that can be returned
by the :func:`_sa.inspect` function.
The attributes defined here allow the usage of simple boolean
checks to test basic facts about the object returned.
While the boolean checks here are basically the same as using
the Python isinstance() function, the flags here can be used without
the need to import all of these classes, and also such that
the SQLAlchemy class system can change while leaving the flags
here intact for forwards-compatibility.
"""
__slots__ = ()
is_selectable = False
"""Return True if this object is an instance of
:class:`_expression.Selectable`."""
is_aliased_class = False
"""True if this object is an instance of :class:`.AliasedClass`."""
is_instance = False
"""True if this object is an instance of :class:`.InstanceState`."""
is_mapper = False
"""True if this object is an instance of :class:`_orm.Mapper`."""
is_bundle = False
"""True if this object is an instance of :class:`.Bundle`."""
is_property = False
"""True if this object is an instance of :class:`.MapperProperty`."""
is_attribute = False
"""True if this object is a Python :term:`descriptor`.
This can refer to one of many types. Usually a
:class:`.QueryableAttribute` which handles attributes events on behalf
of a :class:`.MapperProperty`. But can also be an extension type
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
The :attr:`.InspectionAttr.extension_type` will refer to a constant
identifying the specific subtype.
.. seealso::
:attr:`_orm.Mapper.all_orm_descriptors`
"""
_is_internal_proxy = False
"""True if this object is an internal proxy object.
.. versionadded:: 1.2.12
"""
is_clause_element = False
"""True if this object is an instance of
:class:`_expression.ClauseElement`."""
extension_type = NOT_EXTENSION
"""The extension type, if any.
Defaults to :data:`.interfaces.NOT_EXTENSION`
.. seealso::
:data:`.HYBRID_METHOD`
:data:`.HYBRID_PROPERTY`
:data:`.ASSOCIATION_PROXY`
"""
class InspectionAttrInfo(InspectionAttr):
"""Adds the ``.info`` attribute to :class:`.InspectionAttr`.
The rationale for :class:`.InspectionAttr` vs. :class:`.InspectionAttrInfo`
is that the former is compatible as a mixin for classes that specify
``__slots__``; this is essentially an implementation artifact.
"""
@util.memoized_property
def info(self):
"""Info dictionary associated with the object, allowing user-defined
data to be associated with this :class:`.InspectionAttr`.
The dictionary is generated when first accessed. Alternatively,
it can be specified as a constructor argument to the
:func:`.column_property`, :func:`_orm.relationship`, or
:func:`.composite`
functions.
.. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also
available on extension types via the
:attr:`.InspectionAttrInfo.info` attribute, so that it can apply
to a wider variety of ORM and extension constructs.
.. seealso::
:attr:`.QueryableAttribute.info`
:attr:`.SchemaItem.info`
"""
return {}
class _MappedAttribute(object):
"""Mixin for attributes which should be replaced by mapper-assigned
attributes.
"""
__slots__ = ()

View File

@@ -0,0 +1,441 @@
# ext/declarative/clsregistry.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Routines to handle the string class registry used by declarative.
This system allows specification of classes and expressions used in
:func:`_orm.relationship` using strings.
"""
import weakref
from . import attributes
from . import interfaces
from .descriptor_props import SynonymProperty
from .properties import ColumnProperty
from .util import class_mapper
from .. import exc
from .. import inspection
from .. import util
from ..sql.schema import _get_table_key
# strong references to registries which we place in
# the _decl_class_registry, which is usually weak referencing.
# the internal registries here link to classes with weakrefs and remove
# themselves when all references to contained classes are removed.
_registries = set()
def add_class(classname, cls, decl_class_registry):
"""Add a class to the _decl_class_registry associated with the
given declarative class.
"""
if classname in decl_class_registry:
# class already exists.
existing = decl_class_registry[classname]
if not isinstance(existing, _MultipleClassMarker):
existing = decl_class_registry[classname] = _MultipleClassMarker(
[cls, existing]
)
else:
decl_class_registry[classname] = cls
try:
root_module = decl_class_registry["_sa_module_registry"]
except KeyError:
decl_class_registry[
"_sa_module_registry"
] = root_module = _ModuleMarker("_sa_module_registry", None)
tokens = cls.__module__.split(".")
# build up a tree like this:
# modulename: myapp.snacks.nuts
#
# myapp->snack->nuts->(classes)
# snack->nuts->(classes)
# nuts->(classes)
#
# this allows partial token paths to be used.
while tokens:
token = tokens.pop(0)
module = root_module.get_module(token)
for token in tokens:
module = module.get_module(token)
module.add_class(classname, cls)
def remove_class(classname, cls, decl_class_registry):
if classname in decl_class_registry:
existing = decl_class_registry[classname]
if isinstance(existing, _MultipleClassMarker):
existing.remove_item(cls)
else:
del decl_class_registry[classname]
try:
root_module = decl_class_registry["_sa_module_registry"]
except KeyError:
return
tokens = cls.__module__.split(".")
while tokens:
token = tokens.pop(0)
module = root_module.get_module(token)
for token in tokens:
module = module.get_module(token)
module.remove_class(classname, cls)
def _key_is_empty(key, decl_class_registry, test):
"""test if a key is empty of a certain object.
used for unit tests against the registry to see if garbage collection
is working.
"test" is a callable that will be passed an object should return True
if the given object is the one we were looking for.
We can't pass the actual object itself b.c. this is for testing garbage
collection; the caller will have to have removed references to the
object itself.
"""
if key not in decl_class_registry:
return True
thing = decl_class_registry[key]
if isinstance(thing, _MultipleClassMarker):
for sub_thing in thing.contents:
if test(sub_thing):
return False
else:
return not test(thing)
class _MultipleClassMarker(object):
"""refers to multiple classes of the same name
within _decl_class_registry.
"""
__slots__ = "on_remove", "contents", "__weakref__"
def __init__(self, classes, on_remove=None):
self.on_remove = on_remove
self.contents = set(
[weakref.ref(item, self._remove_item) for item in classes]
)
_registries.add(self)
def remove_item(self, cls):
self._remove_item(weakref.ref(cls))
def __iter__(self):
return (ref() for ref in self.contents)
def attempt_get(self, path, key):
if len(self.contents) > 1:
raise exc.InvalidRequestError(
'Multiple classes found for path "%s" '
"in the registry of this declarative "
"base. Please use a fully module-qualified path."
% (".".join(path + [key]))
)
else:
ref = list(self.contents)[0]
cls = ref()
if cls is None:
raise NameError(key)
return cls
def _remove_item(self, ref):
self.contents.discard(ref)
if not self.contents:
_registries.discard(self)
if self.on_remove:
self.on_remove()
def add_item(self, item):
# protect against class registration race condition against
# asynchronous garbage collection calling _remove_item,
# [ticket:3208]
modules = set(
[
cls.__module__
for cls in [ref() for ref in self.contents]
if cls is not None
]
)
if item.__module__ in modules:
util.warn(
"This declarative base already contains a class with the "
"same class name and module name as %s.%s, and will "
"be replaced in the string-lookup table."
% (item.__module__, item.__name__)
)
self.contents.add(weakref.ref(item, self._remove_item))
class _ModuleMarker(object):
"""Refers to a module name within
_decl_class_registry.
"""
__slots__ = "parent", "name", "contents", "mod_ns", "path", "__weakref__"
def __init__(self, name, parent):
self.parent = parent
self.name = name
self.contents = {}
self.mod_ns = _ModNS(self)
if self.parent:
self.path = self.parent.path + [self.name]
else:
self.path = []
_registries.add(self)
def __contains__(self, name):
return name in self.contents
def __getitem__(self, name):
return self.contents[name]
def _remove_item(self, name):
self.contents.pop(name, None)
if not self.contents and self.parent is not None:
self.parent._remove_item(self.name)
_registries.discard(self)
def resolve_attr(self, key):
return getattr(self.mod_ns, key)
def get_module(self, name):
if name not in self.contents:
marker = _ModuleMarker(name, self)
self.contents[name] = marker
else:
marker = self.contents[name]
return marker
def add_class(self, name, cls):
if name in self.contents:
existing = self.contents[name]
existing.add_item(cls)
else:
existing = self.contents[name] = _MultipleClassMarker(
[cls], on_remove=lambda: self._remove_item(name)
)
def remove_class(self, name, cls):
if name in self.contents:
existing = self.contents[name]
existing.remove_item(cls)
class _ModNS(object):
__slots__ = ("__parent",)
def __init__(self, parent):
self.__parent = parent
def __getattr__(self, key):
try:
value = self.__parent.contents[key]
except KeyError:
pass
else:
if value is not None:
if isinstance(value, _ModuleMarker):
return value.mod_ns
else:
assert isinstance(value, _MultipleClassMarker)
return value.attempt_get(self.__parent.path, key)
raise NameError(
"Module %r has no mapped classes "
"registered under the name %r" % (self.__parent.name, key)
)
class _GetColumns(object):
__slots__ = ("cls",)
def __init__(self, cls):
self.cls = cls
def __getattr__(self, key):
mp = class_mapper(self.cls, configure=False)
if mp:
if key not in mp.all_orm_descriptors:
raise AttributeError(
"Class %r does not have a mapped column named %r"
% (self.cls, key)
)
desc = mp.all_orm_descriptors[key]
if desc.extension_type is interfaces.NOT_EXTENSION:
prop = desc.property
if isinstance(prop, SynonymProperty):
key = prop.name
elif not isinstance(prop, ColumnProperty):
raise exc.InvalidRequestError(
"Property %r is not an instance of"
" ColumnProperty (i.e. does not correspond"
" directly to a Column)." % key
)
return getattr(self.cls, key)
inspection._inspects(_GetColumns)(
lambda target: inspection.inspect(target.cls)
)
class _GetTable(object):
__slots__ = "key", "metadata"
def __init__(self, key, metadata):
self.key = key
self.metadata = metadata
def __getattr__(self, key):
return self.metadata.tables[_get_table_key(key, self.key)]
def _determine_container(key, value):
if isinstance(value, _MultipleClassMarker):
value = value.attempt_get([], key)
return _GetColumns(value)
class _class_resolver(object):
__slots__ = (
"cls",
"prop",
"arg",
"fallback",
"_dict",
"_resolvers",
"favor_tables",
)
def __init__(self, cls, prop, fallback, arg, favor_tables=False):
self.cls = cls
self.prop = prop
self.arg = arg
self.fallback = fallback
self._dict = util.PopulateDict(self._access_cls)
self._resolvers = ()
self.favor_tables = favor_tables
def _access_cls(self, key):
cls = self.cls
manager = attributes.manager_of_class(cls)
decl_base = manager.registry
decl_class_registry = decl_base._class_registry
metadata = decl_base.metadata
if self.favor_tables:
if key in metadata.tables:
return metadata.tables[key]
elif key in metadata._schemas:
return _GetTable(key, cls.metadata)
if key in decl_class_registry:
return _determine_container(key, decl_class_registry[key])
if not self.favor_tables:
if key in metadata.tables:
return metadata.tables[key]
elif key in metadata._schemas:
return _GetTable(key, cls.metadata)
if (
"_sa_module_registry" in decl_class_registry
and key in decl_class_registry["_sa_module_registry"]
):
registry = decl_class_registry["_sa_module_registry"]
return registry.resolve_attr(key)
elif self._resolvers:
for resolv in self._resolvers:
value = resolv(key)
if value is not None:
return value
return self.fallback[key]
def _raise_for_name(self, name, err):
util.raise_(
exc.InvalidRequestError(
"When initializing mapper %s, expression %r failed to "
"locate a name (%r). If this is a class name, consider "
"adding this relationship() to the %r class after "
"both dependent classes have been defined."
% (self.prop.parent, self.arg, name, self.cls)
),
from_=err,
)
def _resolve_name(self):
name = self.arg
d = self._dict
rval = None
try:
for token in name.split("."):
if rval is None:
rval = d[token]
else:
rval = getattr(rval, token)
except KeyError as err:
self._raise_for_name(name, err)
except NameError as n:
self._raise_for_name(n.args[0], n)
else:
if isinstance(rval, _GetColumns):
return rval.cls
else:
return rval
def __call__(self):
try:
x = eval(self.arg, globals(), self._dict)
if isinstance(x, _GetColumns):
return x.cls
else:
return x
except NameError as n:
self._raise_for_name(n.args[0], n)
_fallback_dict = None
def _resolver(cls, prop):
global _fallback_dict
if _fallback_dict is None:
import sqlalchemy
from sqlalchemy.orm import foreign, remote
_fallback_dict = util.immutabledict(sqlalchemy.__dict__).union(
{"foreign": foreign, "remote": remote}
)
def resolve_arg(arg, favor_tables=False):
return _class_resolver(
cls, prop, _fallback_dict, arg, favor_tables=favor_tables
)
def resolve_name(arg):
return _class_resolver(cls, prop, _fallback_dict, arg)._resolve_name
return resolve_name, resolve_arg

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,745 @@
# orm/descriptor_props.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Descriptor properties are more "auxiliary" properties
that exist as configurational elements, but don't participate
as actively in the load/persist ORM loop.
"""
from . import attributes
from . import util as orm_util
from .interfaces import MapperProperty
from .interfaces import PropComparator
from .util import _none_set
from .. import event
from .. import exc as sa_exc
from .. import schema
from .. import sql
from .. import util
from ..sql import expression
from ..sql import operators
class DescriptorProperty(MapperProperty):
""":class:`.MapperProperty` which proxies access to a
user-defined descriptor."""
doc = None
uses_objects = False
_links_to_entity = False
def instrument_class(self, mapper):
prop = self
class _ProxyImpl(object):
accepts_scalar_loader = False
load_on_unexpire = True
collection = False
@property
def uses_objects(self):
return prop.uses_objects
def __init__(self, key):
self.key = key
if hasattr(prop, "get_history"):
def get_history(
self, state, dict_, passive=attributes.PASSIVE_OFF
):
return prop.get_history(state, dict_, passive)
if self.descriptor is None:
desc = getattr(mapper.class_, self.key, None)
if mapper._is_userland_descriptor(self.key, desc):
self.descriptor = desc
if self.descriptor is None:
def fset(obj, value):
setattr(obj, self.name, value)
def fdel(obj):
delattr(obj, self.name)
def fget(obj):
return getattr(obj, self.name)
self.descriptor = property(fget=fget, fset=fset, fdel=fdel)
proxy_attr = attributes.create_proxied_attribute(self.descriptor)(
self.parent.class_,
self.key,
self.descriptor,
lambda: self._comparator_factory(mapper),
doc=self.doc,
original_property=self,
)
proxy_attr.impl = _ProxyImpl(self.key)
mapper.class_manager.instrument_attribute(self.key, proxy_attr)
class CompositeProperty(DescriptorProperty):
"""Defines a "composite" mapped attribute, representing a collection
of columns as one attribute.
:class:`.CompositeProperty` is constructed using the :func:`.composite`
function.
.. seealso::
:ref:`mapper_composite`
"""
def __init__(self, class_, *attrs, **kwargs):
r"""Return a composite column-based property for use with a Mapper.
See the mapping documentation section :ref:`mapper_composite` for a
full usage example.
The :class:`.MapperProperty` returned by :func:`.composite`
is the :class:`.CompositeProperty`.
:param class\_:
The "composite type" class, or any classmethod or callable which
will produce a new instance of the composite object given the
column values in order.
:param \*cols:
List of Column objects to be mapped.
:param active_history=False:
When ``True``, indicates that the "previous" value for a
scalar attribute should be loaded when replaced, if not
already loaded. See the same flag on :func:`.column_property`.
:param group:
A group name for this property when marked as deferred.
:param deferred:
When True, the column property is "deferred", meaning that it does
not load immediately, and is instead loaded when the attribute is
first accessed on an instance. See also
:func:`~sqlalchemy.orm.deferred`.
:param comparator_factory: a class which extends
:class:`.CompositeProperty.Comparator` which provides custom SQL
clause generation for comparison operations.
:param doc:
optional string that will be applied as the doc on the
class-bound descriptor.
:param info: Optional data dictionary which will be populated into the
:attr:`.MapperProperty.info` attribute of this object.
"""
super(CompositeProperty, self).__init__()
self.attrs = attrs
self.composite_class = class_
self.active_history = kwargs.get("active_history", False)
self.deferred = kwargs.get("deferred", False)
self.group = kwargs.get("group", None)
self.comparator_factory = kwargs.pop(
"comparator_factory", self.__class__.Comparator
)
if "info" in kwargs:
self.info = kwargs.pop("info")
util.set_creation_order(self)
self._create_descriptor()
def instrument_class(self, mapper):
super(CompositeProperty, self).instrument_class(mapper)
self._setup_event_handlers()
def do_init(self):
"""Initialization which occurs after the :class:`.CompositeProperty`
has been associated with its parent mapper.
"""
self._setup_arguments_on_columns()
_COMPOSITE_FGET = object()
def _create_descriptor(self):
"""Create the Python descriptor that will serve as
the access point on instances of the mapped class.
"""
def fget(instance):
dict_ = attributes.instance_dict(instance)
state = attributes.instance_state(instance)
if self.key not in dict_:
# key not present. Iterate through related
# attributes, retrieve their values. This
# ensures they all load.
values = [
getattr(instance, key) for key in self._attribute_keys
]
# current expected behavior here is that the composite is
# created on access if the object is persistent or if
# col attributes have non-None. This would be better
# if the composite were created unconditionally,
# but that would be a behavioral change.
if self.key not in dict_ and (
state.key is not None or not _none_set.issuperset(values)
):
dict_[self.key] = self.composite_class(*values)
state.manager.dispatch.refresh(
state, self._COMPOSITE_FGET, [self.key]
)
return dict_.get(self.key, None)
def fset(instance, value):
dict_ = attributes.instance_dict(instance)
state = attributes.instance_state(instance)
attr = state.manager[self.key]
previous = dict_.get(self.key, attributes.NO_VALUE)
for fn in attr.dispatch.set:
value = fn(state, value, previous, attr.impl)
dict_[self.key] = value
if value is None:
for key in self._attribute_keys:
setattr(instance, key, None)
else:
for key, value in zip(
self._attribute_keys, value.__composite_values__()
):
setattr(instance, key, value)
def fdel(instance):
state = attributes.instance_state(instance)
dict_ = attributes.instance_dict(instance)
previous = dict_.pop(self.key, attributes.NO_VALUE)
attr = state.manager[self.key]
attr.dispatch.remove(state, previous, attr.impl)
for key in self._attribute_keys:
setattr(instance, key, None)
self.descriptor = property(fget, fset, fdel)
@util.memoized_property
def _comparable_elements(self):
return [getattr(self.parent.class_, prop.key) for prop in self.props]
@util.memoized_property
def props(self):
props = []
for attr in self.attrs:
if isinstance(attr, str):
prop = self.parent.get_property(attr, _configure_mappers=False)
elif isinstance(attr, schema.Column):
prop = self.parent._columntoproperty[attr]
elif isinstance(attr, attributes.InstrumentedAttribute):
prop = attr.property
else:
raise sa_exc.ArgumentError(
"Composite expects Column objects or mapped "
"attributes/attribute names as arguments, got: %r"
% (attr,)
)
props.append(prop)
return props
@property
def columns(self):
return [a for a in self.attrs if isinstance(a, schema.Column)]
def _setup_arguments_on_columns(self):
"""Propagate configuration arguments made on this composite
to the target columns, for those that apply.
"""
for prop in self.props:
prop.active_history = self.active_history
if self.deferred:
prop.deferred = self.deferred
prop.strategy_key = (("deferred", True), ("instrument", True))
prop.group = self.group
def _setup_event_handlers(self):
"""Establish events that populate/expire the composite attribute."""
def load_handler(state, context):
_load_refresh_handler(state, context, None, is_refresh=False)
def refresh_handler(state, context, to_load):
# note this corresponds to sqlalchemy.ext.mutable load_attrs()
if not to_load or (
{self.key}.union(self._attribute_keys)
).intersection(to_load):
_load_refresh_handler(state, context, to_load, is_refresh=True)
def _load_refresh_handler(state, context, to_load, is_refresh):
dict_ = state.dict
# if context indicates we are coming from the
# fget() handler, this already set the value; skip the
# handler here. (other handlers like mutablecomposite will still
# want to catch it)
# there's an insufficiency here in that the fget() handler
# really should not be using the refresh event and there should
# be some other event that mutablecomposite can subscribe
# towards for this.
if (
not is_refresh or context is self._COMPOSITE_FGET
) and self.key in dict_:
return
# if column elements aren't loaded, skip.
# __get__() will initiate a load for those
# columns
for k in self._attribute_keys:
if k not in dict_:
return
dict_[self.key] = self.composite_class(
*[state.dict[key] for key in self._attribute_keys]
)
def expire_handler(state, keys):
if keys is None or set(self._attribute_keys).intersection(keys):
state.dict.pop(self.key, None)
def insert_update_handler(mapper, connection, state):
"""After an insert or update, some columns may be expired due
to server side defaults, or re-populated due to client side
defaults. Pop out the composite value here so that it
recreates.
"""
state.dict.pop(self.key, None)
event.listen(
self.parent, "after_insert", insert_update_handler, raw=True
)
event.listen(
self.parent, "after_update", insert_update_handler, raw=True
)
event.listen(
self.parent, "load", load_handler, raw=True, propagate=True
)
event.listen(
self.parent, "refresh", refresh_handler, raw=True, propagate=True
)
event.listen(
self.parent, "expire", expire_handler, raw=True, propagate=True
)
# TODO: need a deserialize hook here
@util.memoized_property
def _attribute_keys(self):
return [prop.key for prop in self.props]
def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
"""Provided for userland code that uses attributes.get_history()."""
added = []
deleted = []
has_history = False
for prop in self.props:
key = prop.key
hist = state.manager[key].impl.get_history(state, dict_)
if hist.has_changes():
has_history = True
non_deleted = hist.non_deleted()
if non_deleted:
added.extend(non_deleted)
else:
added.append(None)
if hist.deleted:
deleted.extend(hist.deleted)
else:
deleted.append(None)
if has_history:
return attributes.History(
[self.composite_class(*added)],
(),
[self.composite_class(*deleted)],
)
else:
return attributes.History((), [self.composite_class(*added)], ())
def _comparator_factory(self, mapper):
return self.comparator_factory(self, mapper)
class CompositeBundle(orm_util.Bundle):
def __init__(self, property_, expr):
self.property = property_
super(CompositeProperty.CompositeBundle, self).__init__(
property_.key, *expr
)
def create_row_processor(self, query, procs, labels):
def proc(row):
return self.property.composite_class(
*[proc(row) for proc in procs]
)
return proc
class Comparator(PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.CompositeProperty` attributes.
See the example in :ref:`composite_operations` for an overview
of usage , as well as the documentation for :class:`.PropComparator`.
.. seealso::
:class:`.PropComparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
__hash__ = None
@util.memoized_property
def clauses(self):
return expression.ClauseList(
group=False, *self._comparable_elements
)
def __clause_element__(self):
return self.expression
@util.memoized_property
def expression(self):
clauses = self.clauses._annotate(
{
"parententity": self._parententity,
"parentmapper": self._parententity,
"proxy_key": self.prop.key,
}
)
return CompositeProperty.CompositeBundle(self.prop, clauses)
def _bulk_update_tuples(self, value):
if isinstance(value, sql.elements.BindParameter):
value = value.value
if value is None:
values = [None for key in self.prop._attribute_keys]
elif isinstance(value, self.prop.composite_class):
values = value.__composite_values__()
else:
raise sa_exc.ArgumentError(
"Can't UPDATE composite attribute %s to %r"
% (self.prop, value)
)
return zip(self._comparable_elements, values)
@util.memoized_property
def _comparable_elements(self):
if self._adapt_to_entity:
return [
getattr(self._adapt_to_entity.entity, prop.key)
for prop in self.prop._comparable_elements
]
else:
return self.prop._comparable_elements
def __eq__(self, other):
if other is None:
values = [None] * len(self.prop._comparable_elements)
else:
values = other.__composite_values__()
comparisons = [
a == b for a, b in zip(self.prop._comparable_elements, values)
]
if self._adapt_to_entity:
comparisons = [self.adapter(x) for x in comparisons]
return sql.and_(*comparisons)
def __ne__(self, other):
return sql.not_(self.__eq__(other))
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key
class ConcreteInheritedProperty(DescriptorProperty):
"""A 'do nothing' :class:`.MapperProperty` that disables
an attribute on a concrete subclass that is only present
on the inherited mapper, not the concrete classes' mapper.
Cases where this occurs include:
* When the superclass mapper is mapped against a
"polymorphic union", which includes all attributes from
all subclasses.
* When a relationship() is configured on an inherited mapper,
but not on the subclass mapper. Concrete mappers require
that relationship() is configured explicitly on each
subclass.
"""
def _comparator_factory(self, mapper):
comparator_callable = None
for m in self.parent.iterate_to_root():
p = m._props[self.key]
if not isinstance(p, ConcreteInheritedProperty):
comparator_callable = p.comparator_factory
break
return comparator_callable
def __init__(self):
super(ConcreteInheritedProperty, self).__init__()
def warn():
raise AttributeError(
"Concrete %s does not implement "
"attribute %r at the instance level. Add "
"this property explicitly to %s."
% (self.parent, self.key, self.parent)
)
class NoninheritedConcreteProp(object):
def __set__(s, obj, value):
warn()
def __delete__(s, obj):
warn()
def __get__(s, obj, owner):
if obj is None:
return self.descriptor
warn()
self.descriptor = NoninheritedConcreteProp()
class SynonymProperty(DescriptorProperty):
def __init__(
self,
name,
map_column=None,
descriptor=None,
comparator_factory=None,
doc=None,
info=None,
):
"""Denote an attribute name as a synonym to a mapped property,
in that the attribute will mirror the value and expression behavior
of another attribute.
e.g.::
class MyClass(Base):
__tablename__ = 'my_table'
id = Column(Integer, primary_key=True)
job_status = Column(String(50))
status = synonym("job_status")
:param name: the name of the existing mapped property. This
can refer to the string name ORM-mapped attribute
configured on the class, including column-bound attributes
and relationships.
:param descriptor: a Python :term:`descriptor` that will be used
as a getter (and potentially a setter) when this attribute is
accessed at the instance level.
:param map_column: **For classical mappings and mappings against
an existing Table object only**. if ``True``, the :func:`.synonym`
construct will locate the :class:`_schema.Column`
object upon the mapped
table that would normally be associated with the attribute name of
this synonym, and produce a new :class:`.ColumnProperty` that instead
maps this :class:`_schema.Column`
to the alternate name given as the "name"
argument of the synonym; in this way, the usual step of redefining
the mapping of the :class:`_schema.Column`
to be under a different name is
unnecessary. This is usually intended to be used when a
:class:`_schema.Column`
is to be replaced with an attribute that also uses a
descriptor, that is, in conjunction with the
:paramref:`.synonym.descriptor` parameter::
my_table = Table(
"my_table", metadata,
Column('id', Integer, primary_key=True),
Column('job_status', String(50))
)
class MyClass(object):
@property
def _job_status_descriptor(self):
return "Status: %s" % self._job_status
mapper(
MyClass, my_table, properties={
"job_status": synonym(
"_job_status", map_column=True,
descriptor=MyClass._job_status_descriptor)
}
)
Above, the attribute named ``_job_status`` is automatically
mapped to the ``job_status`` column::
>>> j1 = MyClass()
>>> j1._job_status = "employed"
>>> j1.job_status
Status: employed
When using Declarative, in order to provide a descriptor in
conjunction with a synonym, use the
:func:`sqlalchemy.ext.declarative.synonym_for` helper. However,
note that the :ref:`hybrid properties <mapper_hybrids>` feature
should usually be preferred, particularly when redefining attribute
behavior.
:param info: Optional data dictionary which will be populated into the
:attr:`.InspectionAttr.info` attribute of this object.
.. versionadded:: 1.0.0
:param comparator_factory: A subclass of :class:`.PropComparator`
that will provide custom comparison behavior at the SQL expression
level.
.. note::
For the use case of providing an attribute which redefines both
Python-level and SQL-expression level behavior of an attribute,
please refer to the Hybrid attribute introduced at
:ref:`mapper_hybrids` for a more effective technique.
.. seealso::
:ref:`synonyms` - Overview of synonyms
:func:`.synonym_for` - a helper oriented towards Declarative
:ref:`mapper_hybrids` - The Hybrid Attribute extension provides an
updated approach to augmenting attribute behavior more flexibly
than can be achieved with synonyms.
"""
super(SynonymProperty, self).__init__()
self.name = name
self.map_column = map_column
self.descriptor = descriptor
self.comparator_factory = comparator_factory
self.doc = doc or (descriptor and descriptor.__doc__) or None
if info:
self.info = info
util.set_creation_order(self)
@property
def uses_objects(self):
return getattr(self.parent.class_, self.name).impl.uses_objects
# TODO: when initialized, check _proxied_object,
# emit a warning if its not a column-based property
@util.memoized_property
def _proxied_object(self):
attr = getattr(self.parent.class_, self.name)
if not hasattr(attr, "property") or not isinstance(
attr.property, MapperProperty
):
# attribute is a non-MapperProprerty proxy such as
# hybrid or association proxy
if isinstance(attr, attributes.QueryableAttribute):
return attr.comparator
elif isinstance(attr, operators.ColumnOperators):
return attr
raise sa_exc.InvalidRequestError(
"""synonym() attribute "%s.%s" only supports """
"""ORM mapped attributes, got %r"""
% (self.parent.class_.__name__, self.name, attr)
)
return attr.property
def _comparator_factory(self, mapper):
prop = self._proxied_object
if isinstance(prop, MapperProperty):
if self.comparator_factory:
comp = self.comparator_factory(prop, mapper)
else:
comp = prop.comparator_factory(prop, mapper)
return comp
else:
return prop
def get_history(self, *arg, **kw):
attr = getattr(self.parent.class_, self.name)
return attr.impl.get_history(*arg, **kw)
@util.preload_module("sqlalchemy.orm.properties")
def set_parent(self, parent, init):
properties = util.preloaded.orm_properties
if self.map_column:
# implement the 'map_column' option.
if self.key not in parent.persist_selectable.c:
raise sa_exc.ArgumentError(
"Can't compile synonym '%s': no column on table "
"'%s' named '%s'"
% (
self.name,
parent.persist_selectable.description,
self.key,
)
)
elif (
parent.persist_selectable.c[self.key]
in parent._columntoproperty
and parent._columntoproperty[
parent.persist_selectable.c[self.key]
].key
== self.name
):
raise sa_exc.ArgumentError(
"Can't call map_column=True for synonym %r=%r, "
"a ColumnProperty already exists keyed to the name "
"%r for column %r"
% (self.key, self.name, self.name, self.key)
)
p = properties.ColumnProperty(
parent.persist_selectable.c[self.key]
)
parent._configure_property(self.name, p, init=init, setparent=True)
p._mapped_by_synonym = self.key
self.parent = parent

View File

@@ -0,0 +1,491 @@
# orm/dynamic.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Dynamic collection API.
Dynamic collections act like Query() objects for read operations and support
basic add/delete mutation.
"""
from . import attributes
from . import exc as orm_exc
from . import interfaces
from . import object_mapper
from . import object_session
from . import relationships
from . import strategies
from . import util as orm_util
from .query import Query
from .. import exc
from .. import log
from .. import util
from ..engine import result
@log.class_logger
@relationships.RelationshipProperty.strategy_for(lazy="dynamic")
class DynaLoader(strategies.AbstractRelationshipLoader):
def init_class_attribute(self, mapper):
self.is_class_level = True
if not self.uselist:
raise exc.InvalidRequestError(
"On relationship %s, 'dynamic' loaders cannot be used with "
"many-to-one/one-to-one relationships and/or "
"uselist=False." % self.parent_property
)
elif self.parent_property.direction not in (
interfaces.ONETOMANY,
interfaces.MANYTOMANY,
):
util.warn(
"On relationship %s, 'dynamic' loaders cannot be used with "
"many-to-one/one-to-one relationships and/or "
"uselist=False. This warning will be an exception in a "
"future release." % self.parent_property
)
strategies._register_attribute(
self.parent_property,
mapper,
useobject=True,
impl_class=DynamicAttributeImpl,
target_mapper=self.parent_property.mapper,
order_by=self.parent_property.order_by,
query_class=self.parent_property.query_class,
)
class DynamicAttributeImpl(attributes.AttributeImpl):
uses_objects = True
default_accepts_scalar_loader = False
supports_population = False
collection = False
dynamic = True
order_by = ()
def __init__(
self,
class_,
key,
typecallable,
dispatch,
target_mapper,
order_by,
query_class=None,
**kw
):
super(DynamicAttributeImpl, self).__init__(
class_, key, typecallable, dispatch, **kw
)
self.target_mapper = target_mapper
if order_by:
self.order_by = tuple(order_by)
if not query_class:
self.query_class = AppenderQuery
elif AppenderMixin in query_class.mro():
self.query_class = query_class
else:
self.query_class = mixin_user_query(query_class)
def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
if not passive & attributes.SQL_OK:
return self._get_collection_history(
state, attributes.PASSIVE_NO_INITIALIZE
).added_items
else:
return self.query_class(self, state)
def get_collection(
self,
state,
dict_,
user_data=None,
passive=attributes.PASSIVE_NO_INITIALIZE,
):
if not passive & attributes.SQL_OK:
data = self._get_collection_history(state, passive).added_items
else:
history = self._get_collection_history(state, passive)
data = history.added_plus_unchanged
return DynamicCollectionAdapter(data)
@util.memoized_property
def _append_token(self):
return attributes.Event(self, attributes.OP_APPEND)
@util.memoized_property
def _remove_token(self):
return attributes.Event(self, attributes.OP_REMOVE)
def fire_append_event(
self, state, dict_, value, initiator, collection_history=None
):
if collection_history is None:
collection_history = self._modified_event(state, dict_)
collection_history.add_added(value)
for fn in self.dispatch.append:
value = fn(state, value, initiator or self._append_token)
if self.trackparent and value is not None:
self.sethasparent(attributes.instance_state(value), state, True)
def fire_remove_event(
self, state, dict_, value, initiator, collection_history=None
):
if collection_history is None:
collection_history = self._modified_event(state, dict_)
collection_history.add_removed(value)
if self.trackparent and value is not None:
self.sethasparent(attributes.instance_state(value), state, False)
for fn in self.dispatch.remove:
fn(state, value, initiator or self._remove_token)
def _modified_event(self, state, dict_):
if self.key not in state.committed_state:
state.committed_state[self.key] = CollectionHistory(self, state)
state._modified_event(dict_, self, attributes.NEVER_SET)
# this is a hack to allow the fixtures.ComparableEntity fixture
# to work
dict_[self.key] = True
return state.committed_state[self.key]
def set(
self,
state,
dict_,
value,
initiator=None,
passive=attributes.PASSIVE_OFF,
check_old=None,
pop=False,
_adapt=True,
):
if initiator and initiator.parent_token is self.parent_token:
return
if pop and value is None:
return
iterable = value
new_values = list(iterable)
if state.has_identity:
old_collection = util.IdentitySet(self.get(state, dict_))
collection_history = self._modified_event(state, dict_)
if not state.has_identity:
old_collection = collection_history.added_items
else:
old_collection = old_collection.union(
collection_history.added_items
)
idset = util.IdentitySet
constants = old_collection.intersection(new_values)
additions = idset(new_values).difference(constants)
removals = old_collection.difference(constants)
for member in new_values:
if member in additions:
self.fire_append_event(
state,
dict_,
member,
None,
collection_history=collection_history,
)
for member in removals:
self.fire_remove_event(
state,
dict_,
member,
None,
collection_history=collection_history,
)
def delete(self, *args, **kwargs):
raise NotImplementedError()
def set_committed_value(self, state, dict_, value):
raise NotImplementedError(
"Dynamic attributes don't support " "collection population."
)
def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
c = self._get_collection_history(state, passive)
return c.as_history()
def get_all_pending(
self, state, dict_, passive=attributes.PASSIVE_NO_INITIALIZE
):
c = self._get_collection_history(state, passive)
return [(attributes.instance_state(x), x) for x in c.all_items]
def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF):
if self.key in state.committed_state:
c = state.committed_state[self.key]
else:
c = CollectionHistory(self, state)
if state.has_identity and (passive & attributes.INIT_OK):
return CollectionHistory(self, state, apply_to=c)
else:
return c
def append(
self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF
):
if initiator is not self:
self.fire_append_event(state, dict_, value, initiator)
def remove(
self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF
):
if initiator is not self:
self.fire_remove_event(state, dict_, value, initiator)
def pop(
self, state, dict_, value, initiator, passive=attributes.PASSIVE_OFF
):
self.remove(state, dict_, value, initiator, passive=passive)
class DynamicCollectionAdapter(object):
"""simplified CollectionAdapter for internal API consistency"""
def __init__(self, data):
self.data = data
def __iter__(self):
return iter(self.data)
def _reset_empty(self):
pass
def __len__(self):
return len(self.data)
def __bool__(self):
return True
__nonzero__ = __bool__
class AppenderMixin(object):
query_class = None
def __init__(self, attr, state):
super(AppenderMixin, self).__init__(attr.target_mapper, None)
self.instance = instance = state.obj()
self.attr = attr
mapper = object_mapper(instance)
prop = mapper._props[self.attr.key]
if prop.secondary is not None:
# this is a hack right now. The Query only knows how to
# make subsequent joins() without a given left-hand side
# from self._from_obj[0]. We need to ensure prop.secondary
# is in the FROM. So we purposely put the mapper selectable
# in _from_obj[0] to ensure a user-defined join() later on
# doesn't fail, and secondary is then in _from_obj[1].
# note also, we are using the official ORM-annotated selectable
# from __clause_element__(), see #7868
self._from_obj = (prop.mapper.__clause_element__(), prop.secondary)
self._where_criteria = (
prop._with_parent(instance, alias_secondary=False),
)
if self.attr.order_by:
self._order_by_clauses = self.attr.order_by
def session(self):
sess = object_session(self.instance)
if (
sess is not None
and self.autoflush
and sess.autoflush
and self.instance in sess
):
sess.flush()
if not orm_util.has_identity(self.instance):
return None
else:
return sess
session = property(session, lambda s, x: None)
def _iter(self):
sess = self.session
if sess is None:
state = attributes.instance_state(self.instance)
if state.detached:
util.warn(
"Instance %s is detached, dynamic relationship cannot "
"return a correct result. This warning will become "
"a DetachedInstanceError in a future release."
% (orm_util.state_str(state))
)
return result.IteratorResult(
result.SimpleResultMetaData([self.attr.class_.__name__]),
self.attr._get_collection_history(
attributes.instance_state(self.instance),
attributes.PASSIVE_NO_INITIALIZE,
).added_items,
_source_supports_scalars=True,
).scalars()
else:
return self._generate(sess)._iter()
def __getitem__(self, index):
sess = self.session
if sess is None:
return self.attr._get_collection_history(
attributes.instance_state(self.instance),
attributes.PASSIVE_NO_INITIALIZE,
).indexed(index)
else:
return self._generate(sess).__getitem__(index)
def count(self):
sess = self.session
if sess is None:
return len(
self.attr._get_collection_history(
attributes.instance_state(self.instance),
attributes.PASSIVE_NO_INITIALIZE,
).added_items
)
else:
return self._generate(sess).count()
def _generate(self, sess=None):
# note we're returning an entirely new Query class instance
# here without any assignment capabilities; the class of this
# query is determined by the session.
instance = self.instance
if sess is None:
sess = object_session(instance)
if sess is None:
raise orm_exc.DetachedInstanceError(
"Parent instance %s is not bound to a Session, and no "
"contextual session is established; lazy load operation "
"of attribute '%s' cannot proceed"
% (orm_util.instance_str(instance), self.attr.key)
)
if self.query_class:
query = self.query_class(self.attr.target_mapper, session=sess)
else:
query = sess.query(self.attr.target_mapper)
query._where_criteria = self._where_criteria
query._from_obj = self._from_obj
query._order_by_clauses = self._order_by_clauses
return query
def extend(self, iterator):
for item in iterator:
self.attr.append(
attributes.instance_state(self.instance),
attributes.instance_dict(self.instance),
item,
None,
)
def append(self, item):
self.attr.append(
attributes.instance_state(self.instance),
attributes.instance_dict(self.instance),
item,
None,
)
def remove(self, item):
self.attr.remove(
attributes.instance_state(self.instance),
attributes.instance_dict(self.instance),
item,
None,
)
class AppenderQuery(AppenderMixin, Query):
"""A dynamic query that supports basic collection storage operations."""
def mixin_user_query(cls):
"""Return a new class with AppenderQuery functionality layered over."""
name = "Appender" + cls.__name__
return type(name, (AppenderMixin, cls), {"query_class": cls})
class CollectionHistory(object):
"""Overrides AttributeHistory to receive append/remove events directly."""
def __init__(self, attr, state, apply_to=None):
if apply_to:
coll = AppenderQuery(attr, state).autoflush(False)
self.unchanged_items = util.OrderedIdentitySet(coll)
self.added_items = apply_to.added_items
self.deleted_items = apply_to.deleted_items
self._reconcile_collection = True
else:
self.deleted_items = util.OrderedIdentitySet()
self.added_items = util.OrderedIdentitySet()
self.unchanged_items = util.OrderedIdentitySet()
self._reconcile_collection = False
@property
def added_plus_unchanged(self):
return list(self.added_items.union(self.unchanged_items))
@property
def all_items(self):
return list(
self.added_items.union(self.unchanged_items).union(
self.deleted_items
)
)
def as_history(self):
if self._reconcile_collection:
added = self.added_items.difference(self.unchanged_items)
deleted = self.deleted_items.intersection(self.unchanged_items)
unchanged = self.unchanged_items.difference(deleted)
else:
added, unchanged, deleted = (
self.added_items,
self.unchanged_items,
self.deleted_items,
)
return attributes.History(list(added), list(unchanged), list(deleted))
def indexed(self, index):
return list(self.added_items)[index]
def add_added(self, value):
self.added_items.add(value)
def add_removed(self, value):
if value in self.added_items:
self.added_items.remove(value)
else:
self.deleted_items.add(value)

View File

@@ -0,0 +1,241 @@
# orm/evaluator.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
import operator
from .. import inspect
from .. import util
from ..sql import and_
from ..sql import operators
class UnevaluatableError(Exception):
pass
class _NoObject(operators.ColumnOperators):
def operate(self, *arg, **kw):
return None
def reverse_operate(self, *arg, **kw):
return None
_NO_OBJECT = _NoObject()
_straight_ops = set(
getattr(operators, op)
for op in (
"add",
"mul",
"sub",
"div",
"mod",
"truediv",
"lt",
"le",
"ne",
"gt",
"ge",
"eq",
)
)
_extended_ops = {
operators.in_op: (lambda a, b: a in b if a is not _NO_OBJECT else None),
operators.not_in_op: (
lambda a, b: a not in b if a is not _NO_OBJECT else None
),
}
_notimplemented_ops = set(
getattr(operators, op)
for op in (
"like_op",
"not_like_op",
"ilike_op",
"not_ilike_op",
"startswith_op",
"between_op",
"endswith_op",
"concat_op",
)
)
class EvaluatorCompiler(object):
def __init__(self, target_cls=None):
self.target_cls = target_cls
def process(self, *clauses):
if len(clauses) > 1:
clause = and_(*clauses)
elif clauses:
clause = clauses[0]
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
if not meth:
raise UnevaluatableError(
"Cannot evaluate %s" % type(clause).__name__
)
return meth(clause)
def visit_grouping(self, clause):
return self.process(clause.element)
def visit_null(self, clause):
return lambda obj: None
def visit_false(self, clause):
return lambda obj: False
def visit_true(self, clause):
return lambda obj: True
def visit_column(self, clause):
if "parentmapper" in clause._annotations:
parentmapper = clause._annotations["parentmapper"]
if self.target_cls and not issubclass(
self.target_cls, parentmapper.class_
):
raise UnevaluatableError(
"Can't evaluate criteria against alternate class %s"
% parentmapper.class_
)
key = parentmapper._columntoproperty[clause].key
else:
key = clause.key
if (
self.target_cls
and key in inspect(self.target_cls).column_attrs
):
util.warn(
"Evaluating non-mapped column expression '%s' onto "
"ORM instances; this is a deprecated use case. Please "
"make use of the actual mapped columns in ORM-evaluated "
"UPDATE / DELETE expressions." % clause
)
else:
raise UnevaluatableError("Cannot evaluate column: %s" % clause)
get_corresponding_attr = operator.attrgetter(key)
return (
lambda obj: get_corresponding_attr(obj)
if obj is not None
else _NO_OBJECT
)
def visit_tuple(self, clause):
return self.visit_clauselist(clause)
def visit_clauselist(self, clause):
evaluators = list(map(self.process, clause.clauses))
if clause.operator is operators.or_:
def evaluate(obj):
has_null = False
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if value:
return True
has_null = has_null or value is None
if has_null:
return None
return False
elif clause.operator is operators.and_:
def evaluate(obj):
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if not value:
if value is None or value is _NO_OBJECT:
return None
return False
return True
elif clause.operator is operators.comma_op:
def evaluate(obj):
values = []
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if value is None or value is _NO_OBJECT:
return None
values.append(value)
return tuple(values)
else:
raise UnevaluatableError(
"Cannot evaluate clauselist with operator %s" % clause.operator
)
return evaluate
def visit_binary(self, clause):
eval_left, eval_right = list(
map(self.process, [clause.left, clause.right])
)
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
return eval_left(obj) == eval_right(obj)
elif operator is operators.is_not:
def evaluate(obj):
return eval_left(obj) != eval_right(obj)
elif operator in _extended_ops:
def evaluate(obj):
left_val = eval_left(obj)
right_val = eval_right(obj)
if left_val is None or right_val is None:
return None
return _extended_ops[operator](left_val, right_val)
elif operator in _straight_ops:
def evaluate(obj):
left_val = eval_left(obj)
right_val = eval_right(obj)
if left_val is None or right_val is None:
return None
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
"Cannot evaluate %s with operator %s"
% (type(clause).__name__, clause.operator)
)
return evaluate
def visit_unary(self, clause):
eval_inner = self.process(clause.element)
if clause.operator is operators.inv:
def evaluate(obj):
value = eval_inner(obj)
if value is None:
return None
return not value
return evaluate
raise UnevaluatableError(
"Cannot evaluate %s with operator %s"
% (type(clause).__name__, clause.operator)
)
def visit_bindparam(self, clause):
if clause.callable:
val = clause.callable()
else:
val = clause.value
return lambda obj: val

2876
lib/sqlalchemy/orm/events.py Normal file

File diff suppressed because it is too large Load Diff

204
lib/sqlalchemy/orm/exc.py Normal file
View File

@@ -0,0 +1,204 @@
# orm/exc.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""SQLAlchemy ORM exceptions."""
from .. import exc as sa_exc
from .. import util
from ..exc import MultipleResultsFound # noqa
from ..exc import NoResultFound # noqa
NO_STATE = (AttributeError, KeyError)
"""Exception types that may be raised by instrumentation implementations."""
class StaleDataError(sa_exc.SQLAlchemyError):
"""An operation encountered database state that is unaccounted for.
Conditions which cause this to happen include:
* A flush may have attempted to update or delete rows
and an unexpected number of rows were matched during
the UPDATE or DELETE statement. Note that when
version_id_col is used, rows in UPDATE or DELETE statements
are also matched against the current known version
identifier.
* A mapped object with version_id_col was refreshed,
and the version number coming back from the database does
not match that of the object itself.
* A object is detached from its parent object, however
the object was previously attached to a different parent
identity which was garbage collected, and a decision
cannot be made if the new parent was really the most
recent "parent".
"""
ConcurrentModificationError = StaleDataError
class FlushError(sa_exc.SQLAlchemyError):
"""A invalid condition was detected during flush()."""
class UnmappedError(sa_exc.InvalidRequestError):
"""Base for exceptions that involve expected mappings not present."""
class ObjectDereferencedError(sa_exc.SQLAlchemyError):
"""An operation cannot complete due to an object being garbage
collected.
"""
class DetachedInstanceError(sa_exc.SQLAlchemyError):
"""An attempt to access unloaded attributes on a
mapped instance that is detached."""
code = "bhk3"
class UnmappedInstanceError(UnmappedError):
"""An mapping operation was requested for an unknown instance."""
@util.preload_module("sqlalchemy.orm.base")
def __init__(self, obj, msg=None):
base = util.preloaded.orm_base
if not msg:
try:
base.class_mapper(type(obj))
name = _safe_cls_name(type(obj))
msg = (
"Class %r is mapped, but this instance lacks "
"instrumentation. This occurs when the instance "
"is created before sqlalchemy.orm.mapper(%s) "
"was called." % (name, name)
)
except UnmappedClassError:
msg = _default_unmapped(type(obj))
if isinstance(obj, type):
msg += (
"; was a class (%s) supplied where an instance was "
"required?" % _safe_cls_name(obj)
)
UnmappedError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class UnmappedClassError(UnmappedError):
"""An mapping operation was requested for an unknown class."""
def __init__(self, cls, msg=None):
if not msg:
msg = _default_unmapped(cls)
UnmappedError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class ObjectDeletedError(sa_exc.InvalidRequestError):
"""A refresh operation failed to retrieve the database
row corresponding to an object's known primary key identity.
A refresh operation proceeds when an expired attribute is
accessed on an object, or when :meth:`_query.Query.get` is
used to retrieve an object which is, upon retrieval, detected
as expired. A SELECT is emitted for the target row
based on primary key; if no row is returned, this
exception is raised.
The true meaning of this exception is simply that
no row exists for the primary key identifier associated
with a persistent object. The row may have been
deleted, or in some cases the primary key updated
to a new value, outside of the ORM's management of the target
object.
"""
@util.preload_module("sqlalchemy.orm.base")
def __init__(self, state, msg=None):
base = util.preloaded.orm_base
if not msg:
msg = (
"Instance '%s' has been deleted, or its "
"row is otherwise not present." % base.state_str(state)
)
sa_exc.InvalidRequestError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class UnmappedColumnError(sa_exc.InvalidRequestError):
"""Mapping operation was requested on an unknown column."""
class LoaderStrategyException(sa_exc.InvalidRequestError):
"""A loader strategy for an attribute does not exist."""
def __init__(
self,
applied_to_property_type,
requesting_property,
applies_to,
actual_strategy_type,
strategy_key,
):
if actual_strategy_type is None:
sa_exc.InvalidRequestError.__init__(
self,
"Can't find strategy %s for %s"
% (strategy_key, requesting_property),
)
else:
sa_exc.InvalidRequestError.__init__(
self,
'Can\'t apply "%s" strategy to property "%s", '
'which is a "%s"; this loader strategy is intended '
'to be used with a "%s".'
% (
util.clsname_as_plain_name(actual_strategy_type),
requesting_property,
util.clsname_as_plain_name(applied_to_property_type),
util.clsname_as_plain_name(applies_to),
),
)
def _safe_cls_name(cls):
try:
cls_name = ".".join((cls.__module__, cls.__name__))
except AttributeError:
cls_name = getattr(cls, "__name__", None)
if cls_name is None:
cls_name = repr(cls)
return cls_name
@util.preload_module("sqlalchemy.orm.base")
def _default_unmapped(cls):
base = util.preloaded.orm_base
try:
mappers = base.manager_of_class(cls).mappers
except (TypeError,) + NO_STATE:
mappers = {}
name = _safe_cls_name(cls)
if not mappers:
return "Class '%s' is not mapped" % name

View File

@@ -0,0 +1,254 @@
# orm/identity.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
import weakref
from . import util as orm_util
from .. import exc as sa_exc
from .. import util
class IdentityMap(object):
def __init__(self):
self._dict = {}
self._modified = set()
self._wr = weakref.ref(self)
def _kill(self):
self._add_unpresent = _killed
def keys(self):
return self._dict.keys()
def replace(self, state):
raise NotImplementedError()
def add(self, state):
raise NotImplementedError()
def _add_unpresent(self, state, key):
"""optional inlined form of add() which can assume item isn't present
in the map"""
self.add(state)
def update(self, dict_):
raise NotImplementedError("IdentityMap uses add() to insert data")
def clear(self):
raise NotImplementedError("IdentityMap uses remove() to remove data")
def _manage_incoming_state(self, state):
state._instance_dict = self._wr
if state.modified:
self._modified.add(state)
def _manage_removed_state(self, state):
del state._instance_dict
if state.modified:
self._modified.discard(state)
def _dirty_states(self):
return self._modified
def check_modified(self):
"""return True if any InstanceStates present have been marked
as 'modified'.
"""
return bool(self._modified)
def has_key(self, key):
return key in self
def popitem(self):
raise NotImplementedError("IdentityMap uses remove() to remove data")
def pop(self, key, *args):
raise NotImplementedError("IdentityMap uses remove() to remove data")
def setdefault(self, key, default=None):
raise NotImplementedError("IdentityMap uses add() to insert data")
def __len__(self):
return len(self._dict)
def copy(self):
raise NotImplementedError()
def __setitem__(self, key, value):
raise NotImplementedError("IdentityMap uses add() to insert data")
def __delitem__(self, key):
raise NotImplementedError("IdentityMap uses remove() to remove data")
class WeakInstanceDict(IdentityMap):
def __getitem__(self, key):
state = self._dict[key]
o = state.obj()
if o is None:
raise KeyError(key)
return o
def __contains__(self, key):
try:
if key in self._dict:
state = self._dict[key]
o = state.obj()
else:
return False
except KeyError:
return False
else:
return o is not None
def contains_state(self, state):
if state.key in self._dict:
try:
return self._dict[state.key] is state
except KeyError:
return False
else:
return False
def replace(self, state):
if state.key in self._dict:
try:
existing = self._dict[state.key]
except KeyError:
# catch gc removed the key after we just checked for it
pass
else:
if existing is not state:
self._manage_removed_state(existing)
else:
return None
else:
existing = None
self._dict[state.key] = state
self._manage_incoming_state(state)
return existing
def add(self, state):
key = state.key
# inline of self.__contains__
if key in self._dict:
try:
existing_state = self._dict[key]
except KeyError:
# catch gc removed the key after we just checked for it
pass
else:
if existing_state is not state:
o = existing_state.obj()
if o is not None:
raise sa_exc.InvalidRequestError(
"Can't attach instance "
"%s; another instance with key %s is already "
"present in this session."
% (orm_util.state_str(state), state.key)
)
else:
return False
self._dict[key] = state
self._manage_incoming_state(state)
return True
def _add_unpresent(self, state, key):
# inlined form of add() called by loading.py
self._dict[key] = state
state._instance_dict = self._wr
def get(self, key, default=None):
if key not in self._dict:
return default
try:
state = self._dict[key]
except KeyError:
# catch gc removed the key after we just checked for it
return default
else:
o = state.obj()
if o is None:
return default
return o
def items(self):
values = self.all_states()
result = []
for state in values:
value = state.obj()
if value is not None:
result.append((state.key, value))
return result
def values(self):
values = self.all_states()
result = []
for state in values:
value = state.obj()
if value is not None:
result.append(value)
return result
def __iter__(self):
return iter(self.keys())
if util.py2k:
def iteritems(self):
return iter(self.items())
def itervalues(self):
return iter(self.values())
def all_states(self):
if util.py2k:
return self._dict.values()
else:
return list(self._dict.values())
def _fast_discard(self, state):
# used by InstanceState for state being
# GC'ed, inlines _managed_removed_state
try:
st = self._dict[state.key]
except KeyError:
# catch gc removed the key after we just checked for it
pass
else:
if st is state:
self._dict.pop(state.key, None)
def discard(self, state):
self.safe_discard(state)
def safe_discard(self, state):
if state.key in self._dict:
try:
st = self._dict[state.key]
except KeyError:
# catch gc removed the key after we just checked for it
pass
else:
if st is state:
self._dict.pop(state.key, None)
self._manage_removed_state(state)
def _killed(state, key):
# external function to avoid creating cycles when assigned to
# the IdentityMap
raise sa_exc.InvalidRequestError(
"Object %s cannot be converted to 'persistent' state, as this "
"identity map is no longer valid. Has the owning Session "
"been closed?" % orm_util.state_str(state),
code="lkrp",
)

View File

@@ -0,0 +1,652 @@
# orm/instrumentation.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Defines SQLAlchemy's system of class instrumentation.
This module is usually not directly visible to user applications, but
defines a large part of the ORM's interactivity.
instrumentation.py deals with registration of end-user classes
for state tracking. It interacts closely with state.py
and attributes.py which establish per-instance and per-class-attribute
instrumentation, respectively.
The class instrumentation system can be customized on a per-class
or global basis using the :mod:`sqlalchemy.ext.instrumentation`
module, which provides the means to build and specify
alternate instrumentation forms.
.. versionchanged: 0.8
The instrumentation extension system was moved out of the
ORM and into the external :mod:`sqlalchemy.ext.instrumentation`
package. When that package is imported, it installs
itself within sqlalchemy.orm so that its more comprehensive
resolution mechanics take effect.
"""
import weakref
from . import base
from . import collections
from . import exc
from . import interfaces
from . import state
from .. import util
from ..util import HasMemoized
DEL_ATTR = util.symbol("DEL_ATTR")
class ClassManager(HasMemoized, dict):
"""Tracks state information at the class level."""
MANAGER_ATTR = base.DEFAULT_MANAGER_ATTR
STATE_ATTR = base.DEFAULT_STATE_ATTR
_state_setter = staticmethod(util.attrsetter(STATE_ATTR))
expired_attribute_loader = None
"previously known as deferred_scalar_loader"
init_method = None
factory = None
mapper = None
declarative_scan = None
registry = None
@property
@util.deprecated(
"1.4",
message="The ClassManager.deferred_scalar_loader attribute is now "
"named expired_attribute_loader",
)
def deferred_scalar_loader(self):
return self.expired_attribute_loader
@deferred_scalar_loader.setter
@util.deprecated(
"1.4",
message="The ClassManager.deferred_scalar_loader attribute is now "
"named expired_attribute_loader",
)
def deferred_scalar_loader(self, obj):
self.expired_attribute_loader = obj
def __init__(self, class_):
self.class_ = class_
self.info = {}
self.new_init = None
self.local_attrs = {}
self.originals = {}
self._finalized = False
self._bases = [
mgr
for mgr in [
manager_of_class(base)
for base in self.class_.__bases__
if isinstance(base, type)
]
if mgr is not None
]
for base_ in self._bases:
self.update(base_)
self.dispatch._events._new_classmanager_instance(class_, self)
for basecls in class_.__mro__:
mgr = manager_of_class(basecls)
if mgr is not None:
self.dispatch._update(mgr.dispatch)
self.manage()
if "__del__" in class_.__dict__:
util.warn(
"__del__() method on class %s will "
"cause unreachable cycles and memory leaks, "
"as SQLAlchemy instrumentation often creates "
"reference cycles. Please remove this method." % class_
)
def _update_state(
self,
finalize=False,
mapper=None,
registry=None,
declarative_scan=None,
expired_attribute_loader=None,
init_method=None,
):
if mapper:
self.mapper = mapper
if registry:
registry._add_manager(self)
if declarative_scan:
self.declarative_scan = weakref.ref(declarative_scan)
if expired_attribute_loader:
self.expired_attribute_loader = expired_attribute_loader
if init_method:
assert not self._finalized, (
"class is already instrumented, "
"init_method %s can't be applied" % init_method
)
self.init_method = init_method
if not self._finalized:
self.original_init = (
self.init_method
if self.init_method is not None
and self.class_.__init__ is object.__init__
else self.class_.__init__
)
if finalize and not self._finalized:
self._finalize()
def _finalize(self):
if self._finalized:
return
self._finalized = True
self._instrument_init()
_instrumentation_factory.dispatch.class_instrument(self.class_)
def __hash__(self):
return id(self)
def __eq__(self, other):
return other is self
@property
def is_mapped(self):
return "mapper" in self.__dict__
@HasMemoized.memoized_attribute
def _all_key_set(self):
return frozenset(self)
@HasMemoized.memoized_attribute
def _collection_impl_keys(self):
return frozenset(
[attr.key for attr in self.values() if attr.impl.collection]
)
@HasMemoized.memoized_attribute
def _scalar_loader_impls(self):
return frozenset(
[
attr.impl
for attr in self.values()
if attr.impl.accepts_scalar_loader
]
)
@HasMemoized.memoized_attribute
def _loader_impls(self):
return frozenset([attr.impl for attr in self.values()])
@util.memoized_property
def mapper(self):
# raises unless self.mapper has been assigned
raise exc.UnmappedClassError(self.class_)
def _all_sqla_attributes(self, exclude=None):
"""return an iterator of all classbound attributes that are
implement :class:`.InspectionAttr`.
This includes :class:`.QueryableAttribute` as well as extension
types such as :class:`.hybrid_property` and
:class:`.AssociationProxy`.
"""
found = {}
# constraints:
# 1. yield keys in cls.__dict__ order
# 2. if a subclass has the same key as a superclass, include that
# key as part of the ordering of the superclass, because an
# overridden key is usually installed by the mapper which is going
# on a different ordering
# 3. don't use getattr() as this fires off descriptors
for supercls in self.class_.__mro__[0:-1]:
inherits = supercls.__mro__[1]
for key in supercls.__dict__:
found.setdefault(key, supercls)
if key in inherits.__dict__:
continue
val = found[key].__dict__[key]
if (
isinstance(val, interfaces.InspectionAttr)
and val.is_attribute
):
yield key, val
def _get_class_attr_mro(self, key, default=None):
"""return an attribute on the class without tripping it."""
for supercls in self.class_.__mro__:
if key in supercls.__dict__:
return supercls.__dict__[key]
else:
return default
def _attr_has_impl(self, key):
"""Return True if the given attribute is fully initialized.
i.e. has an impl.
"""
return key in self and self[key].impl is not None
def _subclass_manager(self, cls):
"""Create a new ClassManager for a subclass of this ClassManager's
class.
This is called automatically when attributes are instrumented so that
the attributes can be propagated to subclasses against their own
class-local manager, without the need for mappers etc. to have already
pre-configured managers for the full class hierarchy. Mappers
can post-configure the auto-generated ClassManager when needed.
"""
return register_class(cls, finalize=False)
def _instrument_init(self):
self.new_init = _generate_init(self.class_, self, self.original_init)
self.install_member("__init__", self.new_init)
@util.memoized_property
def _state_constructor(self):
self.dispatch.first_init(self, self.class_)
return state.InstanceState
def manage(self):
"""Mark this instance as the manager for its class."""
setattr(self.class_, self.MANAGER_ATTR, self)
@util.hybridmethod
def manager_getter(self):
return _default_manager_getter
@util.hybridmethod
def state_getter(self):
"""Return a (instance) -> InstanceState callable.
"state getter" callables should raise either KeyError or
AttributeError if no InstanceState could be found for the
instance.
"""
return _default_state_getter
@util.hybridmethod
def dict_getter(self):
return _default_dict_getter
def instrument_attribute(self, key, inst, propagated=False):
if propagated:
if key in self.local_attrs:
return # don't override local attr with inherited attr
else:
self.local_attrs[key] = inst
self.install_descriptor(key, inst)
self._reset_memoizations()
self[key] = inst
for cls in self.class_.__subclasses__():
manager = self._subclass_manager(cls)
manager.instrument_attribute(key, inst, True)
def subclass_managers(self, recursive):
for cls in self.class_.__subclasses__():
mgr = manager_of_class(cls)
if mgr is not None and mgr is not self:
yield mgr
if recursive:
for m in mgr.subclass_managers(True):
yield m
def post_configure_attribute(self, key):
_instrumentation_factory.dispatch.attribute_instrument(
self.class_, key, self[key]
)
def uninstrument_attribute(self, key, propagated=False):
if key not in self:
return
if propagated:
if key in self.local_attrs:
return # don't get rid of local attr
else:
del self.local_attrs[key]
self.uninstall_descriptor(key)
self._reset_memoizations()
del self[key]
for cls in self.class_.__subclasses__():
manager = manager_of_class(cls)
if manager:
manager.uninstrument_attribute(key, True)
def unregister(self):
"""remove all instrumentation established by this ClassManager."""
for key in list(self.originals):
self.uninstall_member(key)
self.mapper = self.dispatch = self.new_init = None
self.info.clear()
for key in list(self):
if key in self.local_attrs:
self.uninstrument_attribute(key)
if self.MANAGER_ATTR in self.class_.__dict__:
delattr(self.class_, self.MANAGER_ATTR)
def install_descriptor(self, key, inst):
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
raise KeyError(
"%r: requested attribute name conflicts with "
"instrumentation attribute of the same name." % key
)
setattr(self.class_, key, inst)
def uninstall_descriptor(self, key):
delattr(self.class_, key)
def install_member(self, key, implementation):
if key in (self.STATE_ATTR, self.MANAGER_ATTR):
raise KeyError(
"%r: requested attribute name conflicts with "
"instrumentation attribute of the same name." % key
)
self.originals.setdefault(key, self.class_.__dict__.get(key, DEL_ATTR))
setattr(self.class_, key, implementation)
def uninstall_member(self, key):
original = self.originals.pop(key, None)
if original is not DEL_ATTR:
setattr(self.class_, key, original)
else:
delattr(self.class_, key)
def instrument_collection_class(self, key, collection_class):
return collections.prepare_instrumentation(collection_class)
def initialize_collection(self, key, state, factory):
user_data = factory()
adapter = collections.CollectionAdapter(
self.get_impl(key), state, user_data
)
return adapter, user_data
def is_instrumented(self, key, search=False):
if search:
return key in self
else:
return key in self.local_attrs
def get_impl(self, key):
return self[key].impl
@property
def attributes(self):
return iter(self.values())
# InstanceState management
def new_instance(self, state=None):
instance = self.class_.__new__(self.class_)
if state is None:
state = self._state_constructor(instance, self)
self._state_setter(instance, state)
return instance
def setup_instance(self, instance, state=None):
if state is None:
state = self._state_constructor(instance, self)
self._state_setter(instance, state)
def teardown_instance(self, instance):
delattr(instance, self.STATE_ATTR)
def _serialize(self, state, state_dict):
return _SerializeManager(state, state_dict)
def _new_state_if_none(self, instance):
"""Install a default InstanceState if none is present.
A private convenience method used by the __init__ decorator.
"""
if hasattr(instance, self.STATE_ATTR):
return False
elif self.class_ is not instance.__class__ and self.is_mapped:
# this will create a new ClassManager for the
# subclass, without a mapper. This is likely a
# user error situation but allow the object
# to be constructed, so that it is usable
# in a non-ORM context at least.
return self._subclass_manager(
instance.__class__
)._new_state_if_none(instance)
else:
state = self._state_constructor(instance, self)
self._state_setter(instance, state)
return state
def has_state(self, instance):
return hasattr(instance, self.STATE_ATTR)
def has_parent(self, state, key, optimistic=False):
"""TODO"""
return self.get_impl(key).hasparent(state, optimistic=optimistic)
def __bool__(self):
"""All ClassManagers are non-zero regardless of attribute state."""
return True
__nonzero__ = __bool__
def __repr__(self):
return "<%s of %r at %x>" % (
self.__class__.__name__,
self.class_,
id(self),
)
class _SerializeManager(object):
"""Provide serialization of a :class:`.ClassManager`.
The :class:`.InstanceState` uses ``__init__()`` on serialize
and ``__call__()`` on deserialize.
"""
def __init__(self, state, d):
self.class_ = state.class_
manager = state.manager
manager.dispatch.pickle(state, d)
def __call__(self, state, inst, state_dict):
state.manager = manager = manager_of_class(self.class_)
if manager is None:
raise exc.UnmappedInstanceError(
inst,
"Cannot deserialize object of type %r - "
"no mapper() has "
"been configured for this class within the current "
"Python process!" % self.class_,
)
elif manager.is_mapped and not manager.mapper.configured:
manager.mapper._check_configure()
# setup _sa_instance_state ahead of time so that
# unpickle events can access the object normally.
# see [ticket:2362]
if inst is not None:
manager.setup_instance(inst, state)
manager.dispatch.unpickle(state, state_dict)
class InstrumentationFactory(object):
"""Factory for new ClassManager instances."""
def create_manager_for_cls(self, class_):
assert class_ is not None
assert manager_of_class(class_) is None
# give a more complicated subclass
# a chance to do what it wants here
manager, factory = self._locate_extended_factory(class_)
if factory is None:
factory = ClassManager
manager = factory(class_)
self._check_conflicts(class_, factory)
manager.factory = factory
return manager
def _locate_extended_factory(self, class_):
"""Overridden by a subclass to do an extended lookup."""
return None, None
def _check_conflicts(self, class_, factory):
"""Overridden by a subclass to test for conflicting factories."""
return
def unregister(self, class_):
manager = manager_of_class(class_)
manager.unregister()
self.dispatch.class_uninstrument(class_)
# this attribute is replaced by sqlalchemy.ext.instrumentation
# when imported.
_instrumentation_factory = InstrumentationFactory()
# these attributes are replaced by sqlalchemy.ext.instrumentation
# when a non-standard InstrumentationManager class is first
# used to instrument a class.
instance_state = _default_state_getter = base.instance_state
instance_dict = _default_dict_getter = base.instance_dict
manager_of_class = _default_manager_getter = base.manager_of_class
def register_class(
class_,
finalize=True,
mapper=None,
registry=None,
declarative_scan=None,
expired_attribute_loader=None,
init_method=None,
):
"""Register class instrumentation.
Returns the existing or newly created class manager.
"""
manager = manager_of_class(class_)
if manager is None:
manager = _instrumentation_factory.create_manager_for_cls(class_)
manager._update_state(
mapper=mapper,
registry=registry,
declarative_scan=declarative_scan,
expired_attribute_loader=expired_attribute_loader,
init_method=init_method,
finalize=finalize,
)
return manager
def unregister_class(class_):
"""Unregister class instrumentation."""
_instrumentation_factory.unregister(class_)
def is_instrumented(instance, key):
"""Return True if the given attribute on the given instance is
instrumented by the attributes package.
This function may be used regardless of instrumentation
applied directly to the class, i.e. no descriptors are required.
"""
return manager_of_class(instance.__class__).is_instrumented(
key, search=True
)
def _generate_init(class_, class_manager, original_init):
"""Build an __init__ decorator that triggers ClassManager events."""
# TODO: we should use the ClassManager's notion of the
# original '__init__' method, once ClassManager is fixed
# to always reference that.
if original_init is None:
original_init = class_.__init__
# Go through some effort here and don't change the user's __init__
# calling signature, including the unlikely case that it has
# a return value.
# FIXME: need to juggle local names to avoid constructor argument
# clashes.
func_body = """\
def __init__(%(apply_pos)s):
new_state = class_manager._new_state_if_none(%(self_arg)s)
if new_state:
return new_state._initialize_instance(%(apply_kw)s)
else:
return original_init(%(apply_kw)s)
"""
func_vars = util.format_argspec_init(original_init, grouped=False)
func_text = func_body % func_vars
if util.py2k:
func = getattr(original_init, "im_func", original_init)
func_defaults = getattr(func, "func_defaults", None)
else:
func_defaults = getattr(original_init, "__defaults__", None)
func_kw_defaults = getattr(original_init, "__kwdefaults__", None)
env = locals().copy()
env["__name__"] = __name__
exec(func_text, env)
__init__ = env["__init__"]
__init__.__doc__ = original_init.__doc__
__init__._sa_original_init = original_init
if func_defaults:
__init__.__defaults__ = func_defaults
if not util.py2k and func_kw_defaults:
__init__.__kwdefaults__ = func_kw_defaults
return __init__

View File

@@ -0,0 +1,978 @@
# orm/interfaces.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""
Contains various base classes used throughout the ORM.
Defines some key base classes prominent within the internals.
This module and the classes within are mostly private, though some attributes
are exposed when inspecting mappings.
"""
from __future__ import absolute_import
import collections
from . import exc as orm_exc
from . import path_registry
from .base import _MappedAttribute # noqa
from .base import EXT_CONTINUE
from .base import EXT_SKIP
from .base import EXT_STOP
from .base import InspectionAttr # noqa
from .base import InspectionAttrInfo # noqa
from .base import MANYTOMANY
from .base import MANYTOONE
from .base import NOT_EXTENSION
from .base import ONETOMANY
from .. import inspect
from .. import inspection
from .. import util
from ..sql import operators
from ..sql import roles
from ..sql import visitors
from ..sql.base import ExecutableOption
from ..sql.traversals import HasCacheKey
__all__ = (
"EXT_CONTINUE",
"EXT_STOP",
"EXT_SKIP",
"ONETOMANY",
"MANYTOMANY",
"MANYTOONE",
"NOT_EXTENSION",
"LoaderStrategy",
"MapperOption",
"LoaderOption",
"MapperProperty",
"PropComparator",
"StrategizedProperty",
)
class ORMStatementRole(roles.StatementRole):
_role_name = (
"Executable SQL or text() construct, including ORM " "aware objects"
)
class ORMColumnsClauseRole(roles.ColumnsClauseRole):
_role_name = "ORM mapped entity, aliased entity, or Column expression"
class ORMEntityColumnsClauseRole(ORMColumnsClauseRole):
_role_name = "ORM mapped or aliased entity"
class ORMFromClauseRole(roles.StrictFromClauseRole):
_role_name = "ORM mapped entity, aliased entity, or FROM expression"
@inspection._self_inspects
class MapperProperty(
HasCacheKey, _MappedAttribute, InspectionAttr, util.MemoizedSlots
):
"""Represent a particular class attribute mapped by :class:`_orm.Mapper`.
The most common occurrences of :class:`.MapperProperty` are the
mapped :class:`_schema.Column`, which is represented in a mapping as
an instance of :class:`.ColumnProperty`,
and a reference to another class produced by :func:`_orm.relationship`,
represented in the mapping as an instance of
:class:`.RelationshipProperty`.
"""
__slots__ = (
"_configure_started",
"_configure_finished",
"parent",
"key",
"info",
)
_cache_key_traversal = [
("parent", visitors.ExtendedInternalTraversal.dp_has_cache_key),
("key", visitors.ExtendedInternalTraversal.dp_string),
]
cascade = frozenset()
"""The set of 'cascade' attribute names.
This collection is checked before the 'cascade_iterator' method is called.
The collection typically only applies to a RelationshipProperty.
"""
is_property = True
"""Part of the InspectionAttr interface; states this object is a
mapper property.
"""
@property
def _links_to_entity(self):
"""True if this MapperProperty refers to a mapped entity.
Should only be True for RelationshipProperty, False for all others.
"""
raise NotImplementedError()
def _memoized_attr_info(self):
"""Info dictionary associated with the object, allowing user-defined
data to be associated with this :class:`.InspectionAttr`.
The dictionary is generated when first accessed. Alternatively,
it can be specified as a constructor argument to the
:func:`.column_property`, :func:`_orm.relationship`, or
:func:`.composite`
functions.
.. versionchanged:: 1.0.0 :attr:`.MapperProperty.info` is also
available on extension types via the
:attr:`.InspectionAttrInfo.info` attribute, so that it can apply
to a wider variety of ORM and extension constructs.
.. seealso::
:attr:`.QueryableAttribute.info`
:attr:`.SchemaItem.info`
"""
return {}
def setup(self, context, query_entity, path, adapter, **kwargs):
"""Called by Query for the purposes of constructing a SQL statement.
Each MapperProperty associated with the target mapper processes the
statement referenced by the query context, adding columns and/or
criterion as appropriate.
"""
def create_row_processor(
self, context, query_entity, path, mapper, result, adapter, populators
):
"""Produce row processing functions and append to the given
set of populators lists.
"""
def cascade_iterator(
self, type_, state, dict_, visited_states, halt_on=None
):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
Return an iterator3-tuples (instance, mapper, state).
Note that the 'cascade' collection on this MapperProperty is
checked first for the given type before cascade_iterator is called.
This method typically only applies to RelationshipProperty.
"""
return iter(())
def set_parent(self, parent, init):
"""Set the parent mapper that references this MapperProperty.
This method is overridden by some subclasses to perform extra
setup when the mapper is first known.
"""
self.parent = parent
def instrument_class(self, mapper):
"""Hook called by the Mapper to the property to initiate
instrumentation of the class attribute managed by this
MapperProperty.
The MapperProperty here will typically call out to the
attributes module to set up an InstrumentedAttribute.
This step is the first of two steps to set up an InstrumentedAttribute,
and is called early in the mapper setup process.
The second step is typically the init_class_attribute step,
called from StrategizedProperty via the post_instrument_class()
hook. This step assigns additional state to the InstrumentedAttribute
(specifically the "impl") which has been determined after the
MapperProperty has determined what kind of persistence
management it needs to do (e.g. scalar, object, collection, etc).
"""
def __init__(self):
self._configure_started = False
self._configure_finished = False
def init(self):
"""Called after all mappers are created to assemble
relationships between mappers and perform other post-mapper-creation
initialization steps.
"""
self._configure_started = True
self.do_init()
self._configure_finished = True
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
:class:`.MapperProperty`.
This is basically a ``getattr()`` call::
return getattr(self.parent.class_, self.key)
I.e. if this :class:`.MapperProperty` were named ``addresses``,
and the class to which it is mapped is ``User``, this sequence
is possible::
>>> from sqlalchemy import inspect
>>> mapper = inspect(User)
>>> addresses_property = mapper.attrs.addresses
>>> addresses_property.class_attribute is User.addresses
True
>>> User.addresses.property is addresses_property
True
"""
return getattr(self.parent.class_, self.key)
def do_init(self):
"""Perform subclass-specific initialization post-mapper-creation
steps.
This is a template method called by the ``MapperProperty``
object's init() method.
"""
def post_instrument_class(self, mapper):
"""Perform instrumentation adjustments that need to occur
after init() has completed.
The given Mapper is the Mapper invoking the operation, which
may not be the same Mapper as self.parent in an inheritance
scenario; however, Mapper will always at least be a sub-mapper of
self.parent.
This method is typically used by StrategizedProperty, which delegates
it to LoaderStrategy.init_class_attribute() to perform final setup
on the class-bound InstrumentedAttribute.
"""
def merge(
self,
session,
source_state,
source_dict,
dest_state,
dest_dict,
load,
_recursive,
_resolve_conflict_map,
):
"""Merge the attribute represented by this ``MapperProperty``
from source to destination object.
"""
def __repr__(self):
return "<%s at 0x%x; %s>" % (
self.__class__.__name__,
id(self),
getattr(self, "key", "no key"),
)
@inspection._self_inspects
class PropComparator(operators.ColumnOperators):
r"""Defines SQL operators for :class:`.MapperProperty` objects.
SQLAlchemy allows for operators to
be redefined at both the Core and ORM level. :class:`.PropComparator`
is the base class of operator redefinition for ORM-level operations,
including those of :class:`.ColumnProperty`,
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
.. note:: With the advent of Hybrid properties introduced in SQLAlchemy
0.7, as well as Core-level operator redefinition in
SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
instances is extremely rare. See :ref:`hybrids_toplevel` as well
as :ref:`types_operators`.
User-defined subclasses of :class:`.PropComparator` may be created. The
built-in Python comparison and math operator methods, such as
:meth:`.operators.ColumnOperators.__eq__`,
:meth:`.operators.ColumnOperators.__lt__`, and
:meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
new operator behavior. The custom :class:`.PropComparator` is passed to
the :class:`.MapperProperty` instance via the ``comparator_factory``
argument. In each case,
the appropriate subclass of :class:`.PropComparator` should be used::
# definition of custom PropComparator subclasses
from sqlalchemy.orm.properties import \
ColumnProperty,\
CompositeProperty,\
RelationshipProperty
class MyColumnComparator(ColumnProperty.Comparator):
def __eq__(self, other):
return self.__clause_element__() == other
class MyRelationshipComparator(RelationshipProperty.Comparator):
def any(self, expression):
"define the 'any' operation"
# ...
class MyCompositeComparator(CompositeProperty.Comparator):
def __gt__(self, other):
"redefine the 'greater than' operation"
return sql.and_(*[a>b for a, b in
zip(self.__clause_element__().clauses,
other.__composite_values__())])
# application of custom PropComparator subclasses
from sqlalchemy.orm import column_property, relationship, composite
from sqlalchemy import Column, String
class SomeMappedClass(Base):
some_column = column_property(Column("some_column", String),
comparator_factory=MyColumnComparator)
some_relationship = relationship(SomeOtherClass,
comparator_factory=MyRelationshipComparator)
some_composite = composite(
Column("a", String), Column("b", String),
comparator_factory=MyCompositeComparator
)
Note that for column-level operator redefinition, it's usually
simpler to define the operators at the Core level, using the
:attr:`.TypeEngine.comparator_factory` attribute. See
:ref:`types_operators` for more detail.
.. seealso::
:class:`.ColumnProperty.Comparator`
:class:`.RelationshipProperty.Comparator`
:class:`.CompositeProperty.Comparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
__slots__ = "prop", "property", "_parententity", "_adapt_to_entity"
__visit_name__ = "orm_prop_comparator"
def __init__(
self,
prop,
parentmapper,
adapt_to_entity=None,
):
self.prop = self.property = prop
self._parententity = adapt_to_entity or parentmapper
self._adapt_to_entity = adapt_to_entity
def __clause_element__(self):
raise NotImplementedError("%r" % self)
def _bulk_update_tuples(self, value):
"""Receive a SQL expression that represents a value in the SET
clause of an UPDATE statement.
Return a tuple that can be passed to a :class:`_expression.Update`
construct.
"""
return [(self.__clause_element__(), value)]
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
"""
return self.__class__(self.prop, self._parententity, adapt_to_entity)
@property
def _parentmapper(self):
"""legacy; this is renamed to _parententity to be
compatible with QueryableAttribute."""
return inspect(self._parententity).mapper
@property
def _propagate_attrs(self):
# this suits the case in coercions where we don't actually
# call ``__clause_element__()`` but still need to get
# resolved._propagate_attrs. See #6558.
return util.immutabledict(
{
"compile_state_plugin": "orm",
"plugin_subject": self._parentmapper,
}
)
@property
def adapter(self):
"""Produce a callable that adapts column expressions
to suit an aliased version of this comparator.
"""
if self._adapt_to_entity is None:
return None
else:
return self._adapt_to_entity._adapt_element
@property
def info(self):
return self.property.info
@staticmethod
def any_op(a, b, **kwargs):
return a.any(b, **kwargs)
@staticmethod
def has_op(a, b, **kwargs):
return a.has(b, **kwargs)
@staticmethod
def of_type_op(a, class_):
return a.of_type(class_)
def of_type(self, class_):
r"""Redefine this object in terms of a polymorphic subclass,
:func:`_orm.with_polymorphic` construct, or :func:`_orm.aliased`
construct.
Returns a new PropComparator from which further criterion can be
evaluated.
e.g.::
query.join(Company.employees.of_type(Engineer)).\
filter(Engineer.name=='foo')
:param \class_: a class or mapper indicating that criterion will be
against this specific subclass.
.. seealso::
:ref:`queryguide_join_onclause` - in the :ref:`queryguide_toplevel`
:ref:`inheritance_of_type`
"""
return self.operate(PropComparator.of_type_op, class_)
def and_(self, *criteria):
"""Add additional criteria to the ON clause that's represented by this
relationship attribute.
E.g.::
stmt = select(User).join(
User.addresses.and_(Address.email_address != 'foo')
)
stmt = select(User).options(
joinedload(User.addresses.and_(Address.email_address != 'foo'))
)
.. versionadded:: 1.4
.. seealso::
:ref:`orm_queryguide_join_on_augmented`
:ref:`loader_option_criteria`
:func:`.with_loader_criteria`
"""
return self.operate(operators.and_, *criteria)
def any(self, criterion=None, **kwargs):
r"""Return true if this collection contains any member that meets the
given criterion.
The usual implementation of ``any()`` is
:meth:`.RelationshipProperty.Comparator.any`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.any_op, criterion, **kwargs)
def has(self, criterion=None, **kwargs):
r"""Return true if this element references a member which meets the
given criterion.
The usual implementation of ``has()`` is
:meth:`.RelationshipProperty.Comparator.has`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.has_op, criterion, **kwargs)
class StrategizedProperty(MapperProperty):
"""A MapperProperty which uses selectable strategies to affect
loading behavior.
There is a single strategy selected by default. Alternate
strategies can be selected at Query time through the usage of
``StrategizedOption`` objects via the Query.options() method.
The mechanics of StrategizedProperty are used for every Query
invocation for every mapped attribute participating in that Query,
to determine first how the attribute will be rendered in SQL
and secondly how the attribute will retrieve a value from a result
row and apply it to a mapped object. The routines here are very
performance-critical.
"""
__slots__ = (
"_strategies",
"strategy",
"_wildcard_token",
"_default_path_loader_key",
)
inherit_cache = True
strategy_wildcard_key = None
def _memoized_attr__wildcard_token(self):
return (
"%s:%s"
% (self.strategy_wildcard_key, path_registry._WILDCARD_TOKEN),
)
def _memoized_attr__default_path_loader_key(self):
return (
"loader",
(
"%s:%s"
% (self.strategy_wildcard_key, path_registry._DEFAULT_TOKEN),
),
)
def _get_context_loader(self, context, path):
load = None
search_path = path[self]
# search among: exact match, "attr.*", "default" strategy
# if any.
for path_key in (
search_path._loader_key,
search_path._wildcard_path_loader_key,
search_path._default_path_loader_key,
):
if path_key in context.attributes:
load = context.attributes[path_key]
break
return load
def _get_strategy(self, key):
try:
return self._strategies[key]
except KeyError:
pass
# run outside to prevent transfer of exception context
cls = self._strategy_lookup(self, *key)
# this previously was setting self._strategies[cls], that's
# a bad idea; should use strategy key at all times because every
# strategy has multiple keys at this point
self._strategies[key] = strategy = cls(self, key)
return strategy
def setup(self, context, query_entity, path, adapter, **kwargs):
loader = self._get_context_loader(context, path)
if loader and loader.strategy:
strat = self._get_strategy(loader.strategy)
else:
strat = self.strategy
strat.setup_query(
context, query_entity, path, loader, adapter, **kwargs
)
def create_row_processor(
self, context, query_entity, path, mapper, result, adapter, populators
):
loader = self._get_context_loader(context, path)
if loader and loader.strategy:
strat = self._get_strategy(loader.strategy)
else:
strat = self.strategy
strat.create_row_processor(
context,
query_entity,
path,
loader,
mapper,
result,
adapter,
populators,
)
def do_init(self):
self._strategies = {}
self.strategy = self._get_strategy(self.strategy_key)
def post_instrument_class(self, mapper):
if (
not self.parent.non_primary
and not mapper.class_manager._attr_has_impl(self.key)
):
self.strategy.init_class_attribute(mapper)
_all_strategies = collections.defaultdict(dict)
@classmethod
def strategy_for(cls, **kw):
def decorate(dec_cls):
# ensure each subclass of the strategy has its
# own _strategy_keys collection
if "_strategy_keys" not in dec_cls.__dict__:
dec_cls._strategy_keys = []
key = tuple(sorted(kw.items()))
cls._all_strategies[cls][key] = dec_cls
dec_cls._strategy_keys.append(key)
return dec_cls
return decorate
@classmethod
def _strategy_lookup(cls, requesting_property, *key):
requesting_property.parent._with_polymorphic_mappers
for prop_cls in cls.__mro__:
if prop_cls in cls._all_strategies:
strategies = cls._all_strategies[prop_cls]
try:
return strategies[key]
except KeyError:
pass
for property_type, strats in cls._all_strategies.items():
if key in strats:
intended_property_type = property_type
actual_strategy = strats[key]
break
else:
intended_property_type = None
actual_strategy = None
raise orm_exc.LoaderStrategyException(
cls,
requesting_property,
intended_property_type,
actual_strategy,
key,
)
class ORMOption(ExecutableOption):
"""Base class for option objects that are passed to ORM queries.
These options may be consumed by :meth:`.Query.options`,
:meth:`.Select.options`, or in a more general sense by any
:meth:`.Executable.options` method. They are interpreted at
statement compile time or execution time in modern use. The
deprecated :class:`.MapperOption` is consumed at ORM query construction
time.
.. versionadded:: 1.4
"""
__slots__ = ()
_is_legacy_option = False
propagate_to_loaders = False
"""if True, indicate this option should be carried along
to "secondary" SELECT statements that occur for relationship
lazy loaders as well as attribute load / refresh operations.
"""
_is_compile_state = False
_is_criteria_option = False
_is_strategy_option = False
class CompileStateOption(HasCacheKey, ORMOption):
"""base for :class:`.ORMOption` classes that affect the compilation of
a SQL query and therefore need to be part of the cache key.
.. note:: :class:`.CompileStateOption` is generally non-public and
should not be used as a base class for user-defined options; instead,
use :class:`.UserDefinedOption`, which is easier to use as it does not
interact with ORM compilation internals or caching.
:class:`.CompileStateOption` defines an internal attribute
``_is_compile_state=True`` which has the effect of the ORM compilation
routines for SELECT and other statements will call upon these options when
a SQL string is being compiled. As such, these classes implement
:class:`.HasCacheKey` and need to provide robust ``_cache_key_traversal``
structures.
The :class:`.CompileStateOption` class is used to implement the ORM
:class:`.LoaderOption` and :class:`.CriteriaOption` classes.
.. versionadded:: 1.4.28
"""
_is_compile_state = True
def process_compile_state(self, compile_state):
"""Apply a modification to a given :class:`.CompileState`."""
def process_compile_state_replaced_entities(
self, compile_state, mapper_entities
):
"""Apply a modification to a given :class:`.CompileState`,
given entities that were replaced by with_only_columns() or
with_entities().
.. versionadded:: 1.4.19
"""
class LoaderOption(CompileStateOption):
"""Describe a loader modification to an ORM statement at compilation time.
.. versionadded:: 1.4
"""
def process_compile_state_replaced_entities(
self, compile_state, mapper_entities
):
"""Apply a modification to a given :class:`.CompileState`,
given entities that were replaced by with_only_columns() or
with_entities().
.. versionadded:: 1.4.19
"""
self.process_compile_state(compile_state)
def process_compile_state(self, compile_state):
"""Apply a modification to a given :class:`.CompileState`."""
class CriteriaOption(CompileStateOption):
"""Describe a WHERE criteria modification to an ORM statement at
compilation time.
.. versionadded:: 1.4
"""
_is_criteria_option = True
def process_compile_state(self, compile_state):
"""Apply a modification to a given :class:`.CompileState`."""
def get_global_criteria(self, attributes):
"""update additional entity criteria options in the given
attributes dictionary.
"""
class UserDefinedOption(ORMOption):
"""Base class for a user-defined option that can be consumed from the
:meth:`.SessionEvents.do_orm_execute` event hook.
"""
_is_legacy_option = False
propagate_to_loaders = False
"""if True, indicate this option should be carried along
to "secondary" Query objects produced during lazy loads
or refresh operations.
"""
def __init__(self, payload=None):
self.payload = payload
@util.deprecated_cls(
"1.4",
"The :class:`.MapperOption class is deprecated and will be removed "
"in a future release. For "
"modifications to queries on a per-execution basis, use the "
":class:`.UserDefinedOption` class to establish state within a "
":class:`.Query` or other Core statement, then use the "
":meth:`.SessionEvents.before_orm_execute` hook to consume them.",
constructor=None,
)
class MapperOption(ORMOption):
"""Describe a modification to a Query"""
_is_legacy_option = True
propagate_to_loaders = False
"""if True, indicate this option should be carried along
to "secondary" Query objects produced during lazy loads
or refresh operations.
"""
def process_query(self, query):
"""Apply a modification to the given :class:`_query.Query`."""
def process_query_conditionally(self, query):
"""same as process_query(), except that this option may not
apply to the given query.
This is typically applied during a lazy load or scalar refresh
operation to propagate options stated in the original Query to the
new Query being used for the load. It occurs for those options that
specify propagate_to_loaders=True.
"""
self.process_query(query)
class LoaderStrategy(object):
"""Describe the loading behavior of a StrategizedProperty object.
The ``LoaderStrategy`` interacts with the querying process in three
ways:
* it controls the configuration of the ``InstrumentedAttribute``
placed on a class to handle the behavior of the attribute. this
may involve setting up class-level callable functions to fire
off a select operation when the attribute is first accessed
(i.e. a lazy load)
* it processes the ``QueryContext`` at statement construction time,
where it can modify the SQL statement that is being produced.
For example, simple column attributes will add their represented
column to the list of selected columns, a joined eager loader
may establish join clauses to add to the statement.
* It produces "row processor" functions at result fetching time.
These "row processor" functions populate a particular attribute
on a particular mapped instance.
"""
__slots__ = (
"parent_property",
"is_class_level",
"parent",
"key",
"strategy_key",
"strategy_opts",
)
def __init__(self, parent, strategy_key):
self.parent_property = parent
self.is_class_level = False
self.parent = self.parent_property.parent
self.key = self.parent_property.key
self.strategy_key = strategy_key
self.strategy_opts = dict(strategy_key)
def init_class_attribute(self, mapper):
pass
def setup_query(
self, compile_state, query_entity, path, loadopt, adapter, **kwargs
):
"""Establish column and other state for a given QueryContext.
This method fulfills the contract specified by MapperProperty.setup().
StrategizedProperty delegates its setup() method
directly to this method.
"""
def create_row_processor(
self,
context,
query_entity,
path,
loadopt,
mapper,
result,
adapter,
populators,
):
"""Establish row processing functions for a given QueryContext.
This method fulfills the contract specified by
MapperProperty.create_row_processor().
StrategizedProperty delegates its create_row_processor() method
directly to this method.
"""
def __str__(self):
return str(self.parent_property)

File diff suppressed because it is too large Load Diff

3658
lib/sqlalchemy/orm/mapper.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,519 @@
# orm/path_registry.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Path tracking utilities, representing mapper graph traversals.
"""
from itertools import chain
import logging
from . import base as orm_base
from .. import exc
from .. import inspection
from .. import util
from ..sql import visitors
from ..sql.traversals import HasCacheKey
log = logging.getLogger(__name__)
def _unreduce_path(path):
return PathRegistry.deserialize(path)
_WILDCARD_TOKEN = "*"
_DEFAULT_TOKEN = "_sa_default"
class PathRegistry(HasCacheKey):
"""Represent query load paths and registry functions.
Basically represents structures like:
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
These structures are generated by things like
query options (joinedload(), subqueryload(), etc.) and are
used to compose keys stored in the query._attributes dictionary
for various options.
They are then re-composed at query compile/result row time as
the query is formed and as rows are fetched, where they again
serve to compose keys to look up options in the context.attributes
dictionary, which is copied from query._attributes.
The path structure has a limited amount of caching, where each
"root" ultimately pulls from a fixed registry associated with
the first mapper, that also contains elements for each of its
property keys. However paths longer than two elements, which
are the exception rather than the rule, are generated on an
as-needed basis.
"""
__slots__ = ()
is_token = False
is_root = False
_cache_key_traversal = [
("path", visitors.ExtendedInternalTraversal.dp_has_cache_key_list)
]
def __eq__(self, other):
try:
return other is not None and self.path == other._path_for_compare
except AttributeError:
util.warn(
"Comparison of PathRegistry to %r is not supported"
% (type(other))
)
return False
def __ne__(self, other):
try:
return other is None or self.path != other._path_for_compare
except AttributeError:
util.warn(
"Comparison of PathRegistry to %r is not supported"
% (type(other))
)
return True
@property
def _path_for_compare(self):
return self.path
def set(self, attributes, key, value):
log.debug("set '%s' on path '%s' to '%s'", key, self, value)
attributes[(key, self.natural_path)] = value
def setdefault(self, attributes, key, value):
log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
attributes.setdefault((key, self.natural_path), value)
def get(self, attributes, key, value=None):
key = (key, self.natural_path)
if key in attributes:
return attributes[key]
else:
return value
def __len__(self):
return len(self.path)
def __hash__(self):
return id(self)
@property
def length(self):
return len(self.path)
def pairs(self):
path = self.path
for i in range(0, len(path), 2):
yield path[i], path[i + 1]
def contains_mapper(self, mapper):
for path_mapper in [self.path[i] for i in range(0, len(self.path), 2)]:
if path_mapper.is_mapper and path_mapper.isa(mapper):
return True
else:
return False
def contains(self, attributes, key):
return (key, self.path) in attributes
def __reduce__(self):
return _unreduce_path, (self.serialize(),)
@classmethod
def _serialize_path(cls, path):
return list(
zip(
[
m.class_ if (m.is_mapper or m.is_aliased_class) else str(m)
for m in [path[i] for i in range(0, len(path), 2)]
],
[
path[i].key if (path[i].is_property) else str(path[i])
for i in range(1, len(path), 2)
]
+ [None],
)
)
@classmethod
def _deserialize_path(cls, path):
def _deserialize_mapper_token(mcls):
return (
# note: we likely dont want configure=True here however
# this is maintained at the moment for backwards compatibility
orm_base._inspect_mapped_class(mcls, configure=True)
if mcls not in PathToken._intern
else PathToken._intern[mcls]
)
def _deserialize_key_token(mcls, key):
if key is None:
return None
elif key in PathToken._intern:
return PathToken._intern[key]
else:
return orm_base._inspect_mapped_class(
mcls, configure=True
).attrs[key]
p = tuple(
chain(
*[
(
_deserialize_mapper_token(mcls),
_deserialize_key_token(mcls, key),
)
for mcls, key in path
]
)
)
if p and p[-1] is None:
p = p[0:-1]
return p
@classmethod
def serialize_context_dict(cls, dict_, tokens):
return [
((key, cls._serialize_path(path)), value)
for (key, path), value in [
(k, v)
for k, v in dict_.items()
if isinstance(k, tuple) and k[0] in tokens
]
]
@classmethod
def deserialize_context_dict(cls, serialized):
return util.OrderedDict(
((key, tuple(cls._deserialize_path(path))), value)
for (key, path), value in serialized
)
def serialize(self):
path = self.path
return self._serialize_path(path)
@classmethod
def deserialize(cls, path):
if path is None:
return None
p = cls._deserialize_path(path)
return cls.coerce(p)
@classmethod
def per_mapper(cls, mapper):
if mapper.is_mapper:
return CachingEntityRegistry(cls.root, mapper)
else:
return SlotsEntityRegistry(cls.root, mapper)
@classmethod
def coerce(cls, raw):
return util.reduce(lambda prev, next: prev[next], raw, cls.root)
def token(self, token):
if token.endswith(":" + _WILDCARD_TOKEN):
return TokenRegistry(self, token)
elif token.endswith(":" + _DEFAULT_TOKEN):
return TokenRegistry(self.root, token)
else:
raise exc.ArgumentError("invalid token: %s" % token)
def __add__(self, other):
return util.reduce(lambda prev, next: prev[next], other.path, self)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.path)
class RootRegistry(PathRegistry):
"""Root registry, defers to mappers so that
paths are maintained per-root-mapper.
"""
inherit_cache = True
path = natural_path = ()
has_entity = False
is_aliased_class = False
is_root = True
def __getitem__(self, entity):
if entity in PathToken._intern:
return PathToken._intern[entity]
else:
return entity._path_registry
PathRegistry.root = RootRegistry()
class PathToken(orm_base.InspectionAttr, HasCacheKey, str):
"""cacheable string token"""
_intern = {}
def _gen_cache_key(self, anon_map, bindparams):
return (str(self),)
@property
def _path_for_compare(self):
return None
@classmethod
def intern(cls, strvalue):
if strvalue in cls._intern:
return cls._intern[strvalue]
else:
cls._intern[strvalue] = result = PathToken(strvalue)
return result
class TokenRegistry(PathRegistry):
__slots__ = ("token", "parent", "path", "natural_path")
inherit_cache = True
def __init__(self, parent, token):
token = PathToken.intern(token)
self.token = token
self.parent = parent
self.path = parent.path + (token,)
self.natural_path = parent.natural_path + (token,)
has_entity = False
is_token = True
def generate_for_superclasses(self):
if not self.parent.is_aliased_class and not self.parent.is_root:
for ent in self.parent.mapper.iterate_to_root():
yield TokenRegistry(self.parent.parent[ent], self.token)
elif (
self.parent.is_aliased_class
and self.parent.entity._is_with_polymorphic
):
yield self
for ent in self.parent.entity._with_polymorphic_entities:
yield TokenRegistry(self.parent.parent[ent], self.token)
else:
yield self
def __getitem__(self, entity):
raise NotImplementedError()
class PropRegistry(PathRegistry):
is_unnatural = False
inherit_cache = True
def __init__(self, parent, prop):
# restate this path in terms of the
# given MapperProperty's parent.
insp = inspection.inspect(parent[-1])
natural_parent = parent
if not insp.is_aliased_class or insp._use_mapper_path:
parent = natural_parent = parent.parent[prop.parent]
elif (
insp.is_aliased_class
and insp.with_polymorphic_mappers
and prop.parent in insp.with_polymorphic_mappers
):
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
parent = parent.parent[subclass_entity]
# when building a path where with_polymorphic() is in use,
# special logic to determine the "natural path" when subclass
# entities are used.
#
# here we are trying to distinguish between a path that starts
# on a the with_polymorhpic entity vs. one that starts on a
# normal entity that introduces a with_polymorphic() in the
# middle using of_type():
#
# # as in test_polymorphic_rel->
# # test_subqueryload_on_subclass_uses_path_correctly
# wp = with_polymorphic(RegularEntity, "*")
# sess.query(wp).options(someload(wp.SomeSubEntity.foos))
#
# vs
#
# # as in test_relationship->JoinedloadWPolyOfTypeContinued
# wp = with_polymorphic(SomeFoo, "*")
# sess.query(RegularEntity).options(
# someload(RegularEntity.foos.of_type(wp))
# .someload(wp.SubFoo.bar)
# )
#
# in the former case, the Query as it generates a path that we
# want to match will be in terms of the with_polymorphic at the
# beginning. in the latter case, Query will generate simple
# paths that don't know about this with_polymorphic, so we must
# use a separate natural path.
#
#
if parent.parent:
natural_parent = parent.parent[subclass_entity.mapper]
self.is_unnatural = True
else:
natural_parent = parent
elif (
natural_parent.parent
and insp.is_aliased_class
and prop.parent # this should always be the case here
is not insp.mapper
and insp.mapper.isa(prop.parent)
):
natural_parent = parent.parent[prop.parent]
self.prop = prop
self.parent = parent
self.path = parent.path + (prop,)
self.natural_path = natural_parent.natural_path + (prop,)
self._wildcard_path_loader_key = (
"loader",
parent.path + self.prop._wildcard_token,
)
self._default_path_loader_key = self.prop._default_path_loader_key
self._loader_key = ("loader", self.natural_path)
def __str__(self):
return " -> ".join(str(elem) for elem in self.path)
@util.memoized_property
def has_entity(self):
return self.prop._links_to_entity
@util.memoized_property
def entity(self):
return self.prop.entity
@property
def mapper(self):
return self.prop.mapper
@property
def entity_path(self):
return self[self.entity]
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return SlotsEntityRegistry(self, entity)
class AbstractEntityRegistry(PathRegistry):
__slots__ = ()
has_entity = True
def __init__(self, parent, entity):
self.key = entity
self.parent = parent
self.is_aliased_class = entity.is_aliased_class
self.entity = entity
self.path = parent.path + (entity,)
# the "natural path" is the path that we get when Query is traversing
# from the lead entities into the various relationships; it corresponds
# to the structure of mappers and relationships. when we are given a
# path that comes from loader options, as of 1.3 it can have ac-hoc
# with_polymorphic() and other AliasedInsp objects inside of it, which
# are usually not present in mappings. So here we track both the
# "enhanced" path in self.path and the "natural" path that doesn't
# include those objects so these two traversals can be matched up.
# the test here for "(self.is_aliased_class or parent.is_unnatural)"
# are to avoid the more expensive conditional logic that follows if we
# know we don't have to do it. This conditional can just as well be
# "if parent.path:", it just is more function calls.
if parent.path and (self.is_aliased_class or parent.is_unnatural):
# this is an infrequent code path used only for loader strategies
# that also make use of of_type().
if entity.mapper.isa(parent.natural_path[-1].entity):
self.natural_path = parent.natural_path + (entity.mapper,)
else:
self.natural_path = parent.natural_path + (
parent.natural_path[-1].entity,
)
# it seems to make sense that since these paths get mixed up
# with statements that are cached or not, we should make
# sure the natural path is cacheable across different occurrences
# of equivalent AliasedClass objects. however, so far this
# does not seem to be needed for whatever reason.
# elif not parent.path and self.is_aliased_class:
# self.natural_path = (self.entity._generate_cache_key()[0], )
else:
# self.natural_path = parent.natural_path + (entity, )
self.natural_path = self.path
@property
def entity_path(self):
return self
@property
def mapper(self):
return inspection.inspect(self.entity).mapper
def __bool__(self):
return True
__nonzero__ = __bool__
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
elif entity in PathToken._intern:
return TokenRegistry(self, PathToken._intern[entity])
else:
return PropRegistry(self, entity)
class SlotsEntityRegistry(AbstractEntityRegistry):
# for aliased class, return lightweight, no-cycles created
# version
inherit_cache = True
__slots__ = (
"key",
"parent",
"is_aliased_class",
"entity",
"path",
"natural_path",
)
class CachingEntityRegistry(AbstractEntityRegistry, dict):
# for long lived mapper, return dict based caching
# version that creates reference cycles
inherit_cache = True
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return dict.__getitem__(self, entity)
def __missing__(self, key):
self[key] = item = PropRegistry(self, key)
return item

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,430 @@
# orm/properties.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""MapperProperty implementations.
This is a private module which defines the behavior of individual ORM-
mapped attributes.
"""
from __future__ import absolute_import
from . import attributes
from .descriptor_props import CompositeProperty
from .descriptor_props import ConcreteInheritedProperty
from .descriptor_props import SynonymProperty
from .interfaces import PropComparator
from .interfaces import StrategizedProperty
from .relationships import RelationshipProperty
from .. import log
from .. import util
from ..sql import coercions
from ..sql import roles
__all__ = [
"ColumnProperty",
"CompositeProperty",
"ConcreteInheritedProperty",
"RelationshipProperty",
"SynonymProperty",
]
@log.class_logger
class ColumnProperty(StrategizedProperty):
"""Describes an object attribute that corresponds to a table column.
Public constructor is the :func:`_orm.column_property` function.
"""
strategy_wildcard_key = "column"
inherit_cache = True
_links_to_entity = False
__slots__ = (
"columns",
"group",
"deferred",
"instrument",
"comparator_factory",
"descriptor",
"active_history",
"expire_on_flush",
"info",
"doc",
"strategy_key",
"_creation_order",
"_is_polymorphic_discriminator",
"_mapped_by_synonym",
"_deferred_column_loader",
"_raise_column_loader",
"_renders_in_subqueries",
"raiseload",
)
def __init__(self, *columns, **kwargs):
r"""Provide a column-level property for use with a mapping.
Column-based properties can normally be applied to the mapper's
``properties`` dictionary using the :class:`_schema.Column`
element directly.
Use this function when the given column is not directly present within
the mapper's selectable; examples include SQL expressions, functions,
and scalar SELECT queries.
The :func:`_orm.column_property` function returns an instance of
:class:`.ColumnProperty`.
Columns that aren't present in the mapper's selectable won't be
persisted by the mapper and are effectively "read-only" attributes.
:param \*cols:
list of Column objects to be mapped.
:param active_history=False:
When ``True``, indicates that the "previous" value for a
scalar attribute should be loaded when replaced, if not
already loaded. Normally, history tracking logic for
simple non-primary-key scalar values only needs to be
aware of the "new" value in order to perform a flush. This
flag is available for applications that make use of
:func:`.attributes.get_history` or :meth:`.Session.is_modified`
which also need to know
the "previous" value of the attribute.
:param comparator_factory: a class which extends
:class:`.ColumnProperty.Comparator` which provides custom SQL
clause generation for comparison operations.
:param group:
a group name for this property when marked as deferred.
:param deferred:
when True, the column property is "deferred", meaning that
it does not load immediately, and is instead loaded when the
attribute is first accessed on an instance. See also
:func:`~sqlalchemy.orm.deferred`.
:param doc:
optional string that will be applied as the doc on the
class-bound descriptor.
:param expire_on_flush=True:
Disable expiry on flush. A column_property() which refers
to a SQL expression (and not a single table-bound column)
is considered to be a "read only" property; populating it
has no effect on the state of data, and it can only return
database state. For this reason a column_property()'s value
is expired whenever the parent object is involved in a
flush, that is, has any kind of "dirty" state within a flush.
Setting this parameter to ``False`` will have the effect of
leaving any existing value present after the flush proceeds.
Note however that the :class:`.Session` with default expiration
settings still expires
all attributes after a :meth:`.Session.commit` call, however.
:param info: Optional data dictionary which will be populated into the
:attr:`.MapperProperty.info` attribute of this object.
:param raiseload: if True, indicates the column should raise an error
when undeferred, rather than loading the value. This can be
altered at query time by using the :func:`.deferred` option with
raiseload=False.
.. versionadded:: 1.4
.. seealso::
:ref:`deferred_raiseload`
.. seealso::
:ref:`column_property_options` - to map columns while including
mapping options
:ref:`mapper_column_property_sql_expressions` - to map SQL
expressions
"""
super(ColumnProperty, self).__init__()
self.columns = [
coercions.expect(roles.LabeledColumnExprRole, c) for c in columns
]
self.group = kwargs.pop("group", None)
self.deferred = kwargs.pop("deferred", False)
self.raiseload = kwargs.pop("raiseload", False)
self.instrument = kwargs.pop("_instrument", True)
self.comparator_factory = kwargs.pop(
"comparator_factory", self.__class__.Comparator
)
self.descriptor = kwargs.pop("descriptor", None)
self.active_history = kwargs.pop("active_history", False)
self.expire_on_flush = kwargs.pop("expire_on_flush", True)
if "info" in kwargs:
self.info = kwargs.pop("info")
if "doc" in kwargs:
self.doc = kwargs.pop("doc")
else:
for col in reversed(self.columns):
doc = getattr(col, "doc", None)
if doc is not None:
self.doc = doc
break
else:
self.doc = None
if kwargs:
raise TypeError(
"%s received unexpected keyword argument(s): %s"
% (self.__class__.__name__, ", ".join(sorted(kwargs.keys())))
)
util.set_creation_order(self)
self.strategy_key = (
("deferred", self.deferred),
("instrument", self.instrument),
)
if self.raiseload:
self.strategy_key += (("raiseload", True),)
def _memoized_attr__renders_in_subqueries(self):
return ("deferred", True) not in self.strategy_key or (
self not in self.parent._readonly_props
)
@util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
def _memoized_attr__deferred_column_loader(self):
state = util.preloaded.orm_state
strategies = util.preloaded.orm_strategies
return state.InstanceState._instance_level_callable_processor(
self.parent.class_manager,
strategies.LoadDeferredColumns(self.key),
self.key,
)
@util.preload_module("sqlalchemy.orm.state", "sqlalchemy.orm.strategies")
def _memoized_attr__raise_column_loader(self):
state = util.preloaded.orm_state
strategies = util.preloaded.orm_strategies
return state.InstanceState._instance_level_callable_processor(
self.parent.class_manager,
strategies.LoadDeferredColumns(self.key, True),
self.key,
)
def __clause_element__(self):
"""Allow the ColumnProperty to work in expression before it is turned
into an instrumented attribute.
"""
return self.expression
@property
def expression(self):
"""Return the primary column or expression for this ColumnProperty.
E.g.::
class File(Base):
# ...
name = Column(String(64))
extension = Column(String(8))
filename = column_property(name + '.' + extension)
path = column_property('C:/' + filename.expression)
.. seealso::
:ref:`mapper_column_property_sql_expressions_composed`
"""
return self.columns[0]
def instrument_class(self, mapper):
if not self.instrument:
return
attributes.register_descriptor(
mapper.class_,
self.key,
comparator=self.comparator_factory(self, mapper),
parententity=mapper,
doc=self.doc,
)
def do_init(self):
super(ColumnProperty, self).do_init()
if len(self.columns) > 1 and set(self.parent.primary_key).issuperset(
self.columns
):
util.warn(
(
"On mapper %s, primary key column '%s' is being combined "
"with distinct primary key column '%s' in attribute '%s'. "
"Use explicit properties to give each column its own "
"mapped attribute name."
)
% (self.parent, self.columns[1], self.columns[0], self.key)
)
def copy(self):
return ColumnProperty(
deferred=self.deferred,
group=self.group,
active_history=self.active_history,
*self.columns
)
def _getcommitted(
self, state, dict_, column, passive=attributes.PASSIVE_OFF
):
return state.get_impl(self.key).get_committed_value(
state, dict_, passive=passive
)
def merge(
self,
session,
source_state,
source_dict,
dest_state,
dest_dict,
load,
_recursive,
_resolve_conflict_map,
):
if not self.instrument:
return
elif self.key in source_dict:
value = source_dict[self.key]
if not load:
dest_dict[self.key] = value
else:
impl = dest_state.get_impl(self.key)
impl.set(dest_state, dest_dict, value, None)
elif dest_state.has_identity and self.key not in dest_dict:
dest_state._expire_attributes(
dest_dict, [self.key], no_loader=True
)
class Comparator(util.MemoizedSlots, PropComparator):
"""Produce boolean, comparison, and other operators for
:class:`.ColumnProperty` attributes.
See the documentation for :class:`.PropComparator` for a brief
overview.
.. seealso::
:class:`.PropComparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
__slots__ = "__clause_element__", "info", "expressions"
def _orm_annotate_column(self, column):
"""annotate and possibly adapt a column to be returned
as the mapped-attribute exposed version of the column.
The column in this context needs to act as much like the
column in an ORM mapped context as possible, so includes
annotations to give hints to various ORM functions as to
the source entity of this column. It also adapts it
to the mapper's with_polymorphic selectable if one is
present.
"""
pe = self._parententity
annotations = {
"entity_namespace": pe,
"parententity": pe,
"parentmapper": pe,
"proxy_key": self.prop.key,
}
col = column
# for a mapper with polymorphic_on and an adapter, return
# the column against the polymorphic selectable.
# see also orm.util._orm_downgrade_polymorphic_columns
# for the reverse operation.
if self._parentmapper._polymorphic_adapter:
mapper_local_col = col
col = self._parentmapper._polymorphic_adapter.traverse(col)
# this is a clue to the ORM Query etc. that this column
# was adapted to the mapper's polymorphic_adapter. the
# ORM uses this hint to know which column its adapting.
annotations["adapt_column"] = mapper_local_col
return col._annotate(annotations)._set_propagate_attrs(
{"compile_state_plugin": "orm", "plugin_subject": pe}
)
def _memoized_method___clause_element__(self):
if self.adapter:
return self.adapter(self.prop.columns[0], self.prop.key)
else:
return self._orm_annotate_column(self.prop.columns[0])
def _memoized_attr_info(self):
"""The .info dictionary for this attribute."""
ce = self.__clause_element__()
try:
return ce.info
except AttributeError:
return self.prop.info
def _memoized_attr_expressions(self):
"""The full sequence of columns referenced by this
attribute, adjusted for any aliasing in progress.
.. versionadded:: 1.3.17
"""
if self.adapter:
return [
self.adapter(col, self.prop.key)
for col in self.prop.columns
]
else:
return [
self._orm_annotate_column(col) for col in self.prop.columns
]
def _fallback_getattr(self, key):
"""proxy attribute access down to the mapped column.
this allows user-defined comparison methods to be accessed.
"""
return getattr(self.__clause_element__(), key)
def operate(self, op, *other, **kwargs):
return op(self.__clause_element__(), *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
col = self.__clause_element__()
return op(col._bind_param(op, other), col, **kwargs)
def __str__(self):
return str(self.parent.class_.__name__) + "." + self.key

3508
lib/sqlalchemy/orm/query.py Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,228 @@
# orm/scoping.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from . import class_mapper
from . import exc as orm_exc
from .session import Session
from .. import exc as sa_exc
from ..util import create_proxy_methods
from ..util import ScopedRegistry
from ..util import ThreadLocalRegistry
from ..util import warn
from ..util import warn_deprecated
__all__ = ["scoped_session", "ScopedSessionMixin"]
class ScopedSessionMixin(object):
@property
def _proxied(self):
return self.registry()
def __call__(self, **kw):
r"""Return the current :class:`.Session`, creating it
using the :attr:`.scoped_session.session_factory` if not present.
:param \**kw: Keyword arguments will be passed to the
:attr:`.scoped_session.session_factory` callable, if an existing
:class:`.Session` is not present. If the :class:`.Session` is present
and keyword arguments have been passed,
:exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
"""
if kw:
if self.registry.has():
raise sa_exc.InvalidRequestError(
"Scoped session is already present; "
"no new arguments may be specified."
)
else:
sess = self.session_factory(**kw)
self.registry.set(sess)
else:
sess = self.registry()
if not self._support_async and sess._is_asyncio:
warn_deprecated(
"Using `scoped_session` with asyncio is deprecated and "
"will raise an error in a future version. "
"Please use `async_scoped_session` instead.",
"1.4.23",
)
return sess
def configure(self, **kwargs):
"""reconfigure the :class:`.sessionmaker` used by this
:class:`.scoped_session`.
See :meth:`.sessionmaker.configure`.
"""
if self.registry.has():
warn(
"At least one scoped session is already present. "
" configure() can not affect sessions that have "
"already been created."
)
self.session_factory.configure(**kwargs)
@create_proxy_methods(
Session,
":class:`_orm.Session`",
":class:`_orm.scoping.scoped_session`",
classmethods=["close_all", "object_session", "identity_key"],
methods=[
"__contains__",
"__iter__",
"add",
"add_all",
"begin",
"begin_nested",
"close",
"commit",
"connection",
"delete",
"execute",
"expire",
"expire_all",
"expunge",
"expunge_all",
"flush",
"get",
"get_bind",
"is_modified",
"bulk_save_objects",
"bulk_insert_mappings",
"bulk_update_mappings",
"merge",
"query",
"refresh",
"rollback",
"scalar",
"scalars",
],
attributes=[
"bind",
"dirty",
"deleted",
"new",
"identity_map",
"is_active",
"autoflush",
"no_autoflush",
"info",
"autocommit",
],
)
class scoped_session(ScopedSessionMixin):
"""Provides scoped management of :class:`.Session` objects.
See :ref:`unitofwork_contextual` for a tutorial.
.. note::
When using :ref:`asyncio_toplevel`, the async-compatible
:class:`_asyncio.async_scoped_session` class should be
used in place of :class:`.scoped_session`.
"""
_support_async = False
session_factory = None
"""The `session_factory` provided to `__init__` is stored in this
attribute and may be accessed at a later time. This can be useful when
a new non-scoped :class:`.Session` or :class:`_engine.Connection` to the
database is needed."""
def __init__(self, session_factory, scopefunc=None):
"""Construct a new :class:`.scoped_session`.
:param session_factory: a factory to create new :class:`.Session`
instances. This is usually, but not necessarily, an instance
of :class:`.sessionmaker`.
:param scopefunc: optional function which defines
the current scope. If not passed, the :class:`.scoped_session`
object assumes "thread-local" scope, and will use
a Python ``threading.local()`` in order to maintain the current
:class:`.Session`. If passed, the function should return
a hashable token; this token will be used as the key in a
dictionary in order to store and retrieve the current
:class:`.Session`.
"""
self.session_factory = session_factory
if scopefunc:
self.registry = ScopedRegistry(session_factory, scopefunc)
else:
self.registry = ThreadLocalRegistry(session_factory)
def remove(self):
"""Dispose of the current :class:`.Session`, if present.
This will first call :meth:`.Session.close` method
on the current :class:`.Session`, which releases any existing
transactional/connection resources still being held; transactions
specifically are rolled back. The :class:`.Session` is then
discarded. Upon next usage within the same scope,
the :class:`.scoped_session` will produce a new
:class:`.Session` object.
"""
if self.registry.has():
self.registry().close()
self.registry.clear()
def query_property(self, query_cls=None):
"""return a class property which produces a :class:`_query.Query`
object
against the class and the current :class:`.Session` when called.
e.g.::
Session = scoped_session(sessionmaker())
class MyClass(object):
query = Session.query_property()
# after mappers are defined
result = MyClass.query.filter(MyClass.name=='foo').all()
Produces instances of the session's configured query class by
default. To override and use a custom implementation, provide
a ``query_cls`` callable. The callable will be invoked with
the class's mapper as a positional argument and a session
keyword argument.
There is no limit to the number of query properties placed on
a class.
"""
class query(object):
def __get__(s, instance, owner):
try:
mapper = class_mapper(owner)
if mapper:
if query_cls:
# custom query class
return query_cls(mapper, session=self.registry())
else:
# session's configured query class
return self.registry().query(mapper)
except orm_exc.UnmappedClassError:
return None
return query()
ScopedSession = scoped_session
"""Old name for backwards compatibility."""

File diff suppressed because it is too large Load Diff

1025
lib/sqlalchemy/orm/state.py Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

167
lib/sqlalchemy/orm/sync.py Normal file
View File

@@ -0,0 +1,167 @@
# orm/sync.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""private module containing functions used for copying data
between instances based on join conditions.
"""
from . import attributes
from . import exc
from . import util as orm_util
from .. import util
def populate(
source,
source_mapper,
dest,
dest_mapper,
synchronize_pairs,
uowcommit,
flag_cascaded_pks,
):
source_dict = source.dict
dest_dict = dest.dict
for l, r in synchronize_pairs:
try:
# inline of source_mapper._get_state_attr_by_column
prop = source_mapper._columntoproperty[l]
value = source.manager[prop.key].impl.get(
source, source_dict, attributes.PASSIVE_OFF
)
except exc.UnmappedColumnError as err:
_raise_col_to_prop(False, source_mapper, l, dest_mapper, r, err)
try:
# inline of dest_mapper._set_state_attr_by_column
prop = dest_mapper._columntoproperty[r]
dest.manager[prop.key].impl.set(dest, dest_dict, value, None)
except exc.UnmappedColumnError as err:
_raise_col_to_prop(True, source_mapper, l, dest_mapper, r, err)
# technically the "r.primary_key" check isn't
# needed here, but we check for this condition to limit
# how often this logic is invoked for memory/performance
# reasons, since we only need this info for a primary key
# destination.
if (
flag_cascaded_pks
and l.primary_key
and r.primary_key
and r.references(l)
):
uowcommit.attributes[("pk_cascaded", dest, r)] = True
def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs):
# a simplified version of populate() used by bulk insert mode
for l, r in synchronize_pairs:
try:
prop = source_mapper._columntoproperty[l]
value = source_dict[prop.key]
except exc.UnmappedColumnError as err:
_raise_col_to_prop(False, source_mapper, l, source_mapper, r, err)
try:
prop = source_mapper._columntoproperty[r]
source_dict[prop.key] = value
except exc.UnmappedColumnError:
_raise_col_to_prop(True, source_mapper, l, source_mapper, r)
def clear(dest, dest_mapper, synchronize_pairs):
for l, r in synchronize_pairs:
if (
r.primary_key
and dest_mapper._get_state_attr_by_column(dest, dest.dict, r)
not in orm_util._none_set
):
raise AssertionError(
"Dependency rule tried to blank-out primary key "
"column '%s' on instance '%s'" % (r, orm_util.state_str(dest))
)
try:
dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None)
except exc.UnmappedColumnError as err:
_raise_col_to_prop(True, None, l, dest_mapper, r, err)
def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
for l, r in synchronize_pairs:
try:
oldvalue = source_mapper._get_committed_attr_by_column(
source.obj(), l
)
value = source_mapper._get_state_attr_by_column(
source, source.dict, l, passive=attributes.PASSIVE_OFF
)
except exc.UnmappedColumnError as err:
_raise_col_to_prop(False, source_mapper, l, None, r, err)
dest[r.key] = value
dest[old_prefix + r.key] = oldvalue
def populate_dict(source, source_mapper, dict_, synchronize_pairs):
for l, r in synchronize_pairs:
try:
value = source_mapper._get_state_attr_by_column(
source, source.dict, l, passive=attributes.PASSIVE_OFF
)
except exc.UnmappedColumnError as err:
_raise_col_to_prop(False, source_mapper, l, None, r, err)
dict_[r.key] = value
def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
"""return true if the source object has changes from an old to a
new value on the given synchronize pairs
"""
for l, r in synchronize_pairs:
try:
prop = source_mapper._columntoproperty[l]
except exc.UnmappedColumnError as err:
_raise_col_to_prop(False, source_mapper, l, None, r, err)
history = uowcommit.get_attribute_history(
source, prop.key, attributes.PASSIVE_NO_INITIALIZE
)
if bool(history.deleted):
return True
else:
return False
def _raise_col_to_prop(
isdest, source_mapper, source_column, dest_mapper, dest_column, err
):
if isdest:
util.raise_(
exc.UnmappedColumnError(
"Can't execute sync rule for "
"destination column '%s'; mapper '%s' does not map "
"this column. Try using an explicit `foreign_keys` "
"collection which does not include this column (or use "
"a viewonly=True relation)." % (dest_column, dest_mapper)
),
replace_context=err,
)
else:
util.raise_(
exc.UnmappedColumnError(
"Can't execute sync rule for "
"source column '%s'; mapper '%s' does not map this "
"column. Try using an explicit `foreign_keys` "
"collection which does not include destination column "
"'%s' (or use a viewonly=True relation)."
% (source_column, source_mapper, dest_column)
),
replace_context=err,
)

View File

@@ -0,0 +1,784 @@
# orm/unitofwork.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""The internals for the unit of work system.
The session's flush() process passes objects to a contextual object
here, which assembles flush tasks based on mappers and their properties,
organizes them in order of dependency, and executes.
"""
from . import attributes
from . import exc as orm_exc
from . import util as orm_util
from .. import event
from .. import util
from ..util import topological
def _warn_for_cascade_backrefs(state, prop):
util.warn_deprecated_20(
'"%s" object is being merged into a Session along the backref '
'cascade path for relationship "%s"; in SQLAlchemy 2.0, this '
"reverse cascade will not take place. Set cascade_backrefs to "
"False in either the relationship() or backref() function for "
"the 2.0 behavior; or to set globally for the whole "
"Session, set the future=True flag" % (state.class_.__name__, prop),
code="s9r1",
)
def track_cascade_events(descriptor, prop):
"""Establish event listeners on object attributes which handle
cascade-on-set/append.
"""
key = prop.key
def append(state, item, initiator):
# process "save_update" cascade rules for when
# an instance is appended to the list of another instance
if item is None:
return
sess = state.session
if sess:
if sess._warn_on_events:
sess._flush_warning("collection append")
prop = state.manager.mapper._props[key]
item_state = attributes.instance_state(item)
if (
prop._cascade.save_update
and (
(prop.cascade_backrefs and not sess.future)
or key == initiator.key
)
and not sess._contains_state(item_state)
):
if key != initiator.key:
_warn_for_cascade_backrefs(item_state, prop)
sess._save_or_update_state(item_state)
return item
def remove(state, item, initiator):
if item is None:
return
sess = state.session
prop = state.manager.mapper._props[key]
if sess and sess._warn_on_events:
sess._flush_warning(
"collection remove"
if prop.uselist
else "related attribute delete"
)
if (
item is not None
and item is not attributes.NEVER_SET
and item is not attributes.PASSIVE_NO_RESULT
and prop._cascade.delete_orphan
):
# expunge pending orphans
item_state = attributes.instance_state(item)
if prop.mapper._is_orphan(item_state):
if sess and item_state in sess._new:
sess.expunge(item)
else:
# the related item may or may not itself be in a
# Session, however the parent for which we are catching
# the event is not in a session, so memoize this on the
# item
item_state._orphaned_outside_of_session = True
def set_(state, newvalue, oldvalue, initiator):
# process "save_update" cascade rules for when an instance
# is attached to another instance
if oldvalue is newvalue:
return newvalue
sess = state.session
if sess:
if sess._warn_on_events:
sess._flush_warning("related attribute set")
prop = state.manager.mapper._props[key]
if newvalue is not None:
newvalue_state = attributes.instance_state(newvalue)
if (
prop._cascade.save_update
and (
(prop.cascade_backrefs and not sess.future)
or key == initiator.key
)
and not sess._contains_state(newvalue_state)
):
if key != initiator.key:
_warn_for_cascade_backrefs(newvalue_state, prop)
sess._save_or_update_state(newvalue_state)
if (
oldvalue is not None
and oldvalue is not attributes.NEVER_SET
and oldvalue is not attributes.PASSIVE_NO_RESULT
and prop._cascade.delete_orphan
):
# possible to reach here with attributes.NEVER_SET ?
oldvalue_state = attributes.instance_state(oldvalue)
if oldvalue_state in sess._new and prop.mapper._is_orphan(
oldvalue_state
):
sess.expunge(oldvalue)
return newvalue
event.listen(descriptor, "append_wo_mutation", append, raw=True)
event.listen(descriptor, "append", append, raw=True, retval=True)
event.listen(descriptor, "remove", remove, raw=True, retval=True)
event.listen(descriptor, "set", set_, raw=True, retval=True)
class UOWTransaction(object):
def __init__(self, session):
self.session = session
# dictionary used by external actors to
# store arbitrary state information.
self.attributes = {}
# dictionary of mappers to sets of
# DependencyProcessors, which are also
# set to be part of the sorted flush actions,
# which have that mapper as a parent.
self.deps = util.defaultdict(set)
# dictionary of mappers to sets of InstanceState
# items pending for flush which have that mapper
# as a parent.
self.mappers = util.defaultdict(set)
# a dictionary of Preprocess objects, which gather
# additional states impacted by the flush
# and determine if a flush action is needed
self.presort_actions = {}
# dictionary of PostSortRec objects, each
# one issues work during the flush within
# a certain ordering.
self.postsort_actions = {}
# a set of 2-tuples, each containing two
# PostSortRec objects where the second
# is dependent on the first being executed
# first
self.dependencies = set()
# dictionary of InstanceState-> (isdelete, listonly)
# tuples, indicating if this state is to be deleted
# or insert/updated, or just refreshed
self.states = {}
# tracks InstanceStates which will be receiving
# a "post update" call. Keys are mappers,
# values are a set of states and a set of the
# columns which should be included in the update.
self.post_update_states = util.defaultdict(lambda: (set(), set()))
@property
def has_work(self):
return bool(self.states)
def was_already_deleted(self, state):
"""Return ``True`` if the given state is expired and was deleted
previously.
"""
if state.expired:
try:
state._load_expired(state, attributes.PASSIVE_OFF)
except orm_exc.ObjectDeletedError:
self.session._remove_newly_deleted([state])
return True
return False
def is_deleted(self, state):
"""Return ``True`` if the given state is marked as deleted
within this uowtransaction."""
return state in self.states and self.states[state][0]
def memo(self, key, callable_):
if key in self.attributes:
return self.attributes[key]
else:
self.attributes[key] = ret = callable_()
return ret
def remove_state_actions(self, state):
"""Remove pending actions for a state from the uowtransaction."""
isdelete = self.states[state][0]
self.states[state] = (isdelete, True)
def get_attribute_history(
self, state, key, passive=attributes.PASSIVE_NO_INITIALIZE
):
"""Facade to attributes.get_state_history(), including
caching of results."""
hashkey = ("history", state, key)
# cache the objects, not the states; the strong reference here
# prevents newly loaded objects from being dereferenced during the
# flush process
if hashkey in self.attributes:
history, state_history, cached_passive = self.attributes[hashkey]
# if the cached lookup was "passive" and now
# we want non-passive, do a non-passive lookup and re-cache
if (
not cached_passive & attributes.SQL_OK
and passive & attributes.SQL_OK
):
impl = state.manager[key].impl
history = impl.get_history(
state,
state.dict,
attributes.PASSIVE_OFF
| attributes.LOAD_AGAINST_COMMITTED
| attributes.NO_RAISE,
)
if history and impl.uses_objects:
state_history = history.as_state()
else:
state_history = history
self.attributes[hashkey] = (history, state_history, passive)
else:
impl = state.manager[key].impl
# TODO: store the history as (state, object) tuples
# so we don't have to keep converting here
history = impl.get_history(
state,
state.dict,
passive
| attributes.LOAD_AGAINST_COMMITTED
| attributes.NO_RAISE,
)
if history and impl.uses_objects:
state_history = history.as_state()
else:
state_history = history
self.attributes[hashkey] = (history, state_history, passive)
return state_history
def has_dep(self, processor):
return (processor, True) in self.presort_actions
def register_preprocessor(self, processor, fromparent):
key = (processor, fromparent)
if key not in self.presort_actions:
self.presort_actions[key] = Preprocess(processor, fromparent)
def register_object(
self,
state,
isdelete=False,
listonly=False,
cancel_delete=False,
operation=None,
prop=None,
):
if not self.session._contains_state(state):
# this condition is normal when objects are registered
# as part of a relationship cascade operation. it should
# not occur for the top-level register from Session.flush().
if not state.deleted and operation is not None:
util.warn(
"Object of type %s not in session, %s operation "
"along '%s' will not proceed"
% (orm_util.state_class_str(state), operation, prop)
)
return False
if state not in self.states:
mapper = state.manager.mapper
if mapper not in self.mappers:
self._per_mapper_flush_actions(mapper)
self.mappers[mapper].add(state)
self.states[state] = (isdelete, listonly)
else:
if not listonly and (isdelete or cancel_delete):
self.states[state] = (isdelete, False)
return True
def register_post_update(self, state, post_update_cols):
mapper = state.manager.mapper.base_mapper
states, cols = self.post_update_states[mapper]
states.add(state)
cols.update(post_update_cols)
def _per_mapper_flush_actions(self, mapper):
saves = SaveUpdateAll(self, mapper.base_mapper)
deletes = DeleteAll(self, mapper.base_mapper)
self.dependencies.add((saves, deletes))
for dep in mapper._dependency_processors:
dep.per_property_preprocessors(self)
for prop in mapper.relationships:
if prop.viewonly:
continue
dep = prop._dependency_processor
dep.per_property_preprocessors(self)
@util.memoized_property
def _mapper_for_dep(self):
"""return a dynamic mapping of (Mapper, DependencyProcessor) to
True or False, indicating if the DependencyProcessor operates
on objects of that Mapper.
The result is stored in the dictionary persistently once
calculated.
"""
return util.PopulateDict(
lambda tup: tup[0]._props.get(tup[1].key) is tup[1].prop
)
def filter_states_for_dep(self, dep, states):
"""Filter the given list of InstanceStates to those relevant to the
given DependencyProcessor.
"""
mapper_for_dep = self._mapper_for_dep
return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
checktup = (isdelete, listonly)
for mapper in mapper.base_mapper.self_and_descendants:
for state in self.mappers[mapper]:
if self.states[state] == checktup:
yield state
def _generate_actions(self):
"""Generate the full, unsorted collection of PostSortRecs as
well as dependency pairs for this UOWTransaction.
"""
# execute presort_actions, until all states
# have been processed. a presort_action might
# add new states to the uow.
while True:
ret = False
for action in list(self.presort_actions.values()):
if action.execute(self):
ret = True
if not ret:
break
# see if the graph of mapper dependencies has cycles.
self.cycles = cycles = topological.find_cycles(
self.dependencies, list(self.postsort_actions.values())
)
if cycles:
# if yes, break the per-mapper actions into
# per-state actions
convert = dict(
(rec, set(rec.per_state_flush_actions(self))) for rec in cycles
)
# rewrite the existing dependencies to point to
# the per-state actions for those per-mapper actions
# that were broken up.
for edge in list(self.dependencies):
if (
None in edge
or edge[0].disabled
or edge[1].disabled
or cycles.issuperset(edge)
):
self.dependencies.remove(edge)
elif edge[0] in cycles:
self.dependencies.remove(edge)
for dep in convert[edge[0]]:
self.dependencies.add((dep, edge[1]))
elif edge[1] in cycles:
self.dependencies.remove(edge)
for dep in convert[edge[1]]:
self.dependencies.add((edge[0], dep))
return set(
[a for a in self.postsort_actions.values() if not a.disabled]
).difference(cycles)
def execute(self):
postsort_actions = self._generate_actions()
postsort_actions = sorted(
postsort_actions,
key=lambda item: item.sort_key,
)
# sort = topological.sort(self.dependencies, postsort_actions)
# print "--------------"
# print "\ndependencies:", self.dependencies
# print "\ncycles:", self.cycles
# print "\nsort:", list(sort)
# print "\nCOUNT OF POSTSORT ACTIONS", len(postsort_actions)
# execute
if self.cycles:
for subset in topological.sort_as_subsets(
self.dependencies, postsort_actions
):
set_ = set(subset)
while set_:
n = set_.pop()
n.execute_aggregate(self, set_)
else:
for rec in topological.sort(self.dependencies, postsort_actions):
rec.execute(self)
def finalize_flush_changes(self):
"""Mark processed objects as clean / deleted after a successful
flush().
This method is called within the flush() method after the
execute() method has succeeded and the transaction has been committed.
"""
if not self.states:
return
states = set(self.states)
isdel = set(
s for (s, (isdelete, listonly)) in self.states.items() if isdelete
)
other = states.difference(isdel)
if isdel:
self.session._remove_newly_deleted(isdel)
if other:
self.session._register_persistent(other)
class IterateMappersMixin(object):
def _mappers(self, uow):
if self.fromparent:
return iter(
m
for m in self.dependency_processor.parent.self_and_descendants
if uow._mapper_for_dep[(m, self.dependency_processor)]
)
else:
return self.dependency_processor.mapper.self_and_descendants
class Preprocess(IterateMappersMixin):
__slots__ = (
"dependency_processor",
"fromparent",
"processed",
"setup_flush_actions",
)
def __init__(self, dependency_processor, fromparent):
self.dependency_processor = dependency_processor
self.fromparent = fromparent
self.processed = set()
self.setup_flush_actions = False
def execute(self, uow):
delete_states = set()
save_states = set()
for mapper in self._mappers(uow):
for state in uow.mappers[mapper].difference(self.processed):
(isdelete, listonly) = uow.states[state]
if not listonly:
if isdelete:
delete_states.add(state)
else:
save_states.add(state)
if delete_states:
self.dependency_processor.presort_deletes(uow, delete_states)
self.processed.update(delete_states)
if save_states:
self.dependency_processor.presort_saves(uow, save_states)
self.processed.update(save_states)
if delete_states or save_states:
if not self.setup_flush_actions and (
self.dependency_processor.prop_has_changes(
uow, delete_states, True
)
or self.dependency_processor.prop_has_changes(
uow, save_states, False
)
):
self.dependency_processor.per_property_flush_actions(uow)
self.setup_flush_actions = True
return True
else:
return False
class PostSortRec(object):
__slots__ = ("disabled",)
def __new__(cls, uow, *args):
key = (cls,) + args
if key in uow.postsort_actions:
return uow.postsort_actions[key]
else:
uow.postsort_actions[key] = ret = object.__new__(cls)
ret.disabled = False
return ret
def execute_aggregate(self, uow, recs):
self.execute(uow)
class ProcessAll(IterateMappersMixin, PostSortRec):
__slots__ = "dependency_processor", "isdelete", "fromparent", "sort_key"
def __init__(self, uow, dependency_processor, isdelete, fromparent):
self.dependency_processor = dependency_processor
self.sort_key = (
"ProcessAll",
self.dependency_processor.sort_key,
isdelete,
)
self.isdelete = isdelete
self.fromparent = fromparent
uow.deps[dependency_processor.parent.base_mapper].add(
dependency_processor
)
def execute(self, uow):
states = self._elements(uow)
if self.isdelete:
self.dependency_processor.process_deletes(uow, states)
else:
self.dependency_processor.process_saves(uow, states)
def per_state_flush_actions(self, uow):
# this is handled by SaveUpdateAll and DeleteAll,
# since a ProcessAll should unconditionally be pulled
# into per-state if either the parent/child mappers
# are part of a cycle
return iter([])
def __repr__(self):
return "%s(%s, isdelete=%s)" % (
self.__class__.__name__,
self.dependency_processor,
self.isdelete,
)
def _elements(self, uow):
for mapper in self._mappers(uow):
for state in uow.mappers[mapper]:
(isdelete, listonly) = uow.states[state]
if isdelete == self.isdelete and not listonly:
yield state
class PostUpdateAll(PostSortRec):
__slots__ = "mapper", "isdelete", "sort_key"
def __init__(self, uow, mapper, isdelete):
self.mapper = mapper
self.isdelete = isdelete
self.sort_key = ("PostUpdateAll", mapper._sort_key, isdelete)
@util.preload_module("sqlalchemy.orm.persistence")
def execute(self, uow):
persistence = util.preloaded.orm_persistence
states, cols = uow.post_update_states[self.mapper]
states = [s for s in states if uow.states[s][0] == self.isdelete]
persistence.post_update(self.mapper, states, uow, cols)
class SaveUpdateAll(PostSortRec):
__slots__ = ("mapper", "sort_key")
def __init__(self, uow, mapper):
self.mapper = mapper
self.sort_key = ("SaveUpdateAll", mapper._sort_key)
assert mapper is mapper.base_mapper
@util.preload_module("sqlalchemy.orm.persistence")
def execute(self, uow):
util.preloaded.orm_persistence.save_obj(
self.mapper,
uow.states_for_mapper_hierarchy(self.mapper, False, False),
uow,
)
def per_state_flush_actions(self, uow):
states = list(
uow.states_for_mapper_hierarchy(self.mapper, False, False)
)
base_mapper = self.mapper.base_mapper
delete_all = DeleteAll(uow, base_mapper)
for state in states:
# keep saves before deletes -
# this ensures 'row switch' operations work
action = SaveUpdateState(uow, state)
uow.dependencies.add((action, delete_all))
yield action
for dep in uow.deps[self.mapper]:
states_for_prop = uow.filter_states_for_dep(dep, states)
dep.per_state_flush_actions(uow, states_for_prop, False)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.mapper)
class DeleteAll(PostSortRec):
__slots__ = ("mapper", "sort_key")
def __init__(self, uow, mapper):
self.mapper = mapper
self.sort_key = ("DeleteAll", mapper._sort_key)
assert mapper is mapper.base_mapper
@util.preload_module("sqlalchemy.orm.persistence")
def execute(self, uow):
util.preloaded.orm_persistence.delete_obj(
self.mapper,
uow.states_for_mapper_hierarchy(self.mapper, True, False),
uow,
)
def per_state_flush_actions(self, uow):
states = list(
uow.states_for_mapper_hierarchy(self.mapper, True, False)
)
base_mapper = self.mapper.base_mapper
save_all = SaveUpdateAll(uow, base_mapper)
for state in states:
# keep saves before deletes -
# this ensures 'row switch' operations work
action = DeleteState(uow, state)
uow.dependencies.add((save_all, action))
yield action
for dep in uow.deps[self.mapper]:
states_for_prop = uow.filter_states_for_dep(dep, states)
dep.per_state_flush_actions(uow, states_for_prop, True)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.mapper)
class ProcessState(PostSortRec):
__slots__ = "dependency_processor", "isdelete", "state", "sort_key"
def __init__(self, uow, dependency_processor, isdelete, state):
self.dependency_processor = dependency_processor
self.sort_key = ("ProcessState", dependency_processor.sort_key)
self.isdelete = isdelete
self.state = state
def execute_aggregate(self, uow, recs):
cls_ = self.__class__
dependency_processor = self.dependency_processor
isdelete = self.isdelete
our_recs = [
r
for r in recs
if r.__class__ is cls_
and r.dependency_processor is dependency_processor
and r.isdelete is isdelete
]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
if isdelete:
dependency_processor.process_deletes(uow, states)
else:
dependency_processor.process_saves(uow, states)
def __repr__(self):
return "%s(%s, %s, delete=%s)" % (
self.__class__.__name__,
self.dependency_processor,
orm_util.state_str(self.state),
self.isdelete,
)
class SaveUpdateState(PostSortRec):
__slots__ = "state", "mapper", "sort_key"
def __init__(self, uow, state):
self.state = state
self.mapper = state.mapper.base_mapper
self.sort_key = ("ProcessState", self.mapper._sort_key)
@util.preload_module("sqlalchemy.orm.persistence")
def execute_aggregate(self, uow, recs):
persistence = util.preloaded.orm_persistence
cls_ = self.__class__
mapper = self.mapper
our_recs = [
r for r in recs if r.__class__ is cls_ and r.mapper is mapper
]
recs.difference_update(our_recs)
persistence.save_obj(
mapper, [self.state] + [r.state for r in our_recs], uow
)
def __repr__(self):
return "%s(%s)" % (
self.__class__.__name__,
orm_util.state_str(self.state),
)
class DeleteState(PostSortRec):
__slots__ = "state", "mapper", "sort_key"
def __init__(self, uow, state):
self.state = state
self.mapper = state.mapper.base_mapper
self.sort_key = ("DeleteState", self.mapper._sort_key)
@util.preload_module("sqlalchemy.orm.persistence")
def execute_aggregate(self, uow, recs):
persistence = util.preloaded.orm_persistence
cls_ = self.__class__
mapper = self.mapper
our_recs = [
r for r in recs if r.__class__ is cls_ and r.mapper is mapper
]
recs.difference_update(our_recs)
states = [self.state] + [r.state for r in our_recs]
persistence.delete_obj(
mapper, [s for s in states if uow.states[s][0]], uow
)
def __repr__(self):
return "%s(%s)" % (
self.__class__.__name__,
orm_util.state_str(self.state),
)

2149
lib/sqlalchemy/orm/util.py Normal file

File diff suppressed because it is too large Load Diff