mirror of
https://gitlab.com/MoonTestUse1/AdministrationItDepartmens.git
synced 2025-08-14 00:25:46 +02:00
Проверка 09.02.2025
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
# orm/__init__.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/_orm_constructors.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -28,8 +28,8 @@ from .properties import MappedColumn
|
||||
from .properties import MappedSQLExpression
|
||||
from .query import AliasOption
|
||||
from .relationships import _RelationshipArgumentType
|
||||
from .relationships import _RelationshipDeclared
|
||||
from .relationships import _RelationshipSecondaryArgument
|
||||
from .relationships import Relationship
|
||||
from .relationships import RelationshipProperty
|
||||
from .session import Session
|
||||
from .util import _ORMJoin
|
||||
@@ -70,7 +70,7 @@ if TYPE_CHECKING:
|
||||
from ..sql._typing import _TypeEngineArgument
|
||||
from ..sql.elements import ColumnElement
|
||||
from ..sql.schema import _ServerDefaultArgument
|
||||
from ..sql.schema import FetchedValue
|
||||
from ..sql.schema import _ServerOnUpdateArgument
|
||||
from ..sql.selectable import Alias
|
||||
from ..sql.selectable import Subquery
|
||||
|
||||
@@ -108,6 +108,7 @@ def mapped_column(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
nullable: Optional[
|
||||
Union[bool, Literal[SchemaConst.NULL_UNSPECIFIED]]
|
||||
] = SchemaConst.NULL_UNSPECIFIED,
|
||||
@@ -127,7 +128,7 @@ def mapped_column(
|
||||
onupdate: Optional[Any] = None,
|
||||
insert_default: Optional[Any] = _NoArg.NO_ARG,
|
||||
server_default: Optional[_ServerDefaultArgument] = None,
|
||||
server_onupdate: Optional[FetchedValue] = None,
|
||||
server_onupdate: Optional[_ServerOnUpdateArgument] = None,
|
||||
active_history: bool = False,
|
||||
quote: Optional[bool] = None,
|
||||
system: bool = False,
|
||||
@@ -255,12 +256,28 @@ def mapped_column(
|
||||
be used instead**. This is necessary to disambiguate the callable from
|
||||
being interpreted as a dataclass level default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`defaults_default_factory_insert_default`
|
||||
|
||||
:paramref:`_orm.mapped_column.insert_default`
|
||||
|
||||
:paramref:`_orm.mapped_column.default_factory`
|
||||
|
||||
:param insert_default: Passed directly to the
|
||||
:paramref:`_schema.Column.default` parameter; will supersede the value
|
||||
of :paramref:`_orm.mapped_column.default` when present, however
|
||||
:paramref:`_orm.mapped_column.default` will always apply to the
|
||||
constructor default for a dataclasses mapping.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`defaults_default_factory_insert_default`
|
||||
|
||||
:paramref:`_orm.mapped_column.default`
|
||||
|
||||
:paramref:`_orm.mapped_column.default_factory`
|
||||
|
||||
:param sort_order: An integer that indicates how this mapped column
|
||||
should be sorted compared to the others when the ORM is creating a
|
||||
:class:`_schema.Table`. Among mapped columns that have the same
|
||||
@@ -295,6 +312,15 @@ def mapped_column(
|
||||
specifies a default-value generation function that will take place
|
||||
as part of the ``__init__()``
|
||||
method as generated by the dataclass process.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`defaults_default_factory_insert_default`
|
||||
|
||||
:paramref:`_orm.mapped_column.default`
|
||||
|
||||
:paramref:`_orm.mapped_column.insert_default`
|
||||
|
||||
:param compare: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`, indicates if this field
|
||||
should be included in comparison operations when generating the
|
||||
@@ -306,6 +332,13 @@ def mapped_column(
|
||||
:ref:`orm_declarative_native_dataclasses`, indicates if this field
|
||||
should be marked as keyword-only when generating the ``__init__()``.
|
||||
|
||||
:param hash: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`, controls if this field
|
||||
is included when generating the ``__hash__()`` method for the mapped
|
||||
class.
|
||||
|
||||
.. versionadded:: 2.0.36
|
||||
|
||||
:param \**kw: All remaining keyword arguments are passed through to the
|
||||
constructor for the :class:`_schema.Column`.
|
||||
|
||||
@@ -320,7 +353,7 @@ def mapped_column(
|
||||
autoincrement=autoincrement,
|
||||
insert_default=insert_default,
|
||||
attribute_options=_AttributeOptions(
|
||||
init, repr, default, default_factory, compare, kw_only
|
||||
init, repr, default, default_factory, compare, kw_only, hash
|
||||
),
|
||||
doc=doc,
|
||||
key=key,
|
||||
@@ -415,12 +448,13 @@ def column_property(
|
||||
deferred: bool = False,
|
||||
raiseload: bool = False,
|
||||
comparator_factory: Optional[Type[PropComparator[_T]]] = None,
|
||||
init: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002
|
||||
init: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
repr: Union[_NoArg, bool] = _NoArg.NO_ARG, # noqa: A002
|
||||
default: Optional[Any] = _NoArg.NO_ARG,
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
active_history: bool = False,
|
||||
expire_on_flush: bool = True,
|
||||
info: Optional[_InfoType] = None,
|
||||
@@ -509,13 +543,43 @@ def column_property(
|
||||
|
||||
:ref:`orm_queryguide_deferred_raiseload`
|
||||
|
||||
:param init:
|
||||
:param init: Specific to :ref:`orm_declarative_native_dataclasses`,
|
||||
specifies if the mapped attribute should be part of the ``__init__()``
|
||||
method as generated by the dataclass process.
|
||||
:param repr: Specific to :ref:`orm_declarative_native_dataclasses`,
|
||||
specifies if the mapped attribute should be part of the ``__repr__()``
|
||||
method as generated by the dataclass process.
|
||||
:param default_factory: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`,
|
||||
specifies a default-value generation function that will take place
|
||||
as part of the ``__init__()``
|
||||
method as generated by the dataclass process.
|
||||
|
||||
:param default:
|
||||
.. seealso::
|
||||
|
||||
:param default_factory:
|
||||
:ref:`defaults_default_factory_insert_default`
|
||||
|
||||
:param kw_only:
|
||||
:paramref:`_orm.mapped_column.default`
|
||||
|
||||
:paramref:`_orm.mapped_column.insert_default`
|
||||
|
||||
:param compare: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`, indicates if this field
|
||||
should be included in comparison operations when generating the
|
||||
``__eq__()`` and ``__ne__()`` methods for the mapped class.
|
||||
|
||||
.. versionadded:: 2.0.0b4
|
||||
|
||||
:param kw_only: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`, indicates if this field
|
||||
should be marked as keyword-only when generating the ``__init__()``.
|
||||
|
||||
:param hash: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`, controls if this field
|
||||
is included when generating the ``__hash__()`` method for the mapped
|
||||
class.
|
||||
|
||||
.. versionadded:: 2.0.36
|
||||
|
||||
"""
|
||||
return MappedSQLExpression(
|
||||
@@ -528,6 +592,7 @@ def column_property(
|
||||
default_factory,
|
||||
compare,
|
||||
kw_only,
|
||||
hash,
|
||||
),
|
||||
group=group,
|
||||
deferred=deferred,
|
||||
@@ -556,6 +621,7 @@ def composite(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
info: Optional[_InfoType] = None,
|
||||
doc: Optional[str] = None,
|
||||
**__kw: Any,
|
||||
@@ -577,6 +643,7 @@ def composite(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
info: Optional[_InfoType] = None,
|
||||
doc: Optional[str] = None,
|
||||
**__kw: Any,
|
||||
@@ -598,6 +665,7 @@ def composite(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
info: Optional[_InfoType] = None,
|
||||
doc: Optional[str] = None,
|
||||
**__kw: Any,
|
||||
@@ -620,6 +688,7 @@ def composite(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
info: Optional[_InfoType] = None,
|
||||
doc: Optional[str] = None,
|
||||
**__kw: Any,
|
||||
@@ -694,6 +763,12 @@ def composite(
|
||||
:ref:`orm_declarative_native_dataclasses`, indicates if this field
|
||||
should be marked as keyword-only when generating the ``__init__()``.
|
||||
|
||||
:param hash: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`, controls if this field
|
||||
is included when generating the ``__hash__()`` method for the mapped
|
||||
class.
|
||||
|
||||
.. versionadded:: 2.0.36
|
||||
"""
|
||||
if __kw:
|
||||
raise _no_kw()
|
||||
@@ -702,7 +777,7 @@ def composite(
|
||||
_class_or_attr,
|
||||
*attrs,
|
||||
attribute_options=_AttributeOptions(
|
||||
init, repr, default, default_factory, compare, kw_only
|
||||
init, repr, default, default_factory, compare, kw_only, hash
|
||||
),
|
||||
group=group,
|
||||
deferred=deferred,
|
||||
@@ -716,7 +791,10 @@ def composite(
|
||||
|
||||
def with_loader_criteria(
|
||||
entity_or_base: _EntityType[Any],
|
||||
where_criteria: _ColumnExpressionArgument[bool],
|
||||
where_criteria: Union[
|
||||
_ColumnExpressionArgument[bool],
|
||||
Callable[[Any], _ColumnExpressionArgument[bool]],
|
||||
],
|
||||
loader_only: bool = False,
|
||||
include_aliases: bool = False,
|
||||
propagate_to_loaders: bool = True,
|
||||
@@ -745,7 +823,7 @@ def with_loader_criteria(
|
||||
|
||||
stmt = select(User).options(
|
||||
selectinload(User.addresses),
|
||||
with_loader_criteria(Address, Address.email_address != 'foo'))
|
||||
with_loader_criteria(Address, Address.email_address != "foo"),
|
||||
)
|
||||
|
||||
Above, the "selectinload" for ``User.addresses`` will apply the
|
||||
@@ -755,8 +833,10 @@ def with_loader_criteria(
|
||||
ON clause of the join, in this example using :term:`1.x style`
|
||||
queries::
|
||||
|
||||
q = session.query(User).outerjoin(User.addresses).options(
|
||||
with_loader_criteria(Address, Address.email_address != 'foo'))
|
||||
q = (
|
||||
session.query(User)
|
||||
.outerjoin(User.addresses)
|
||||
.options(with_loader_criteria(Address, Address.email_address != "foo"))
|
||||
)
|
||||
|
||||
The primary purpose of :func:`_orm.with_loader_criteria` is to use
|
||||
@@ -769,6 +849,7 @@ def with_loader_criteria(
|
||||
|
||||
session = Session(bind=engine)
|
||||
|
||||
|
||||
@event.listens_for("do_orm_execute", session)
|
||||
def _add_filtering_criteria(execute_state):
|
||||
|
||||
@@ -780,8 +861,8 @@ def with_loader_criteria(
|
||||
execute_state.statement = execute_state.statement.options(
|
||||
with_loader_criteria(
|
||||
SecurityRole,
|
||||
lambda cls: cls.role.in_(['some_role']),
|
||||
include_aliases=True
|
||||
lambda cls: cls.role.in_(["some_role"]),
|
||||
include_aliases=True,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -818,16 +899,19 @@ def with_loader_criteria(
|
||||
``A -> A.bs -> B``, the given :func:`_orm.with_loader_criteria`
|
||||
option will affect the way in which the JOIN is rendered::
|
||||
|
||||
stmt = select(A).join(A.bs).options(
|
||||
contains_eager(A.bs),
|
||||
with_loader_criteria(B, B.flag == 1)
|
||||
stmt = (
|
||||
select(A)
|
||||
.join(A.bs)
|
||||
.options(contains_eager(A.bs), with_loader_criteria(B, B.flag == 1))
|
||||
)
|
||||
|
||||
Above, the given :func:`_orm.with_loader_criteria` option will
|
||||
affect the ON clause of the JOIN that is specified by
|
||||
``.join(A.bs)``, so is applied as expected. The
|
||||
:func:`_orm.contains_eager` option has the effect that columns from
|
||||
``B`` are added to the columns clause::
|
||||
``B`` are added to the columns clause:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT
|
||||
b.id, b.a_id, b.data, b.flag,
|
||||
@@ -893,7 +977,7 @@ def with_loader_criteria(
|
||||
|
||||
.. versionadded:: 1.4.0b2
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
return LoaderCriteriaOption(
|
||||
entity_or_base,
|
||||
where_criteria,
|
||||
@@ -927,6 +1011,7 @@ def relationship(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
lazy: _LazyLoadArgumentType = "select",
|
||||
passive_deletes: Union[Literal["all"], bool] = False,
|
||||
passive_updates: bool = True,
|
||||
@@ -947,7 +1032,7 @@ def relationship(
|
||||
omit_join: Literal[None, False] = None,
|
||||
sync_backref: Optional[bool] = None,
|
||||
**kw: Any,
|
||||
) -> Relationship[Any]:
|
||||
) -> _RelationshipDeclared[Any]:
|
||||
"""Provide a relationship between two mapped classes.
|
||||
|
||||
This corresponds to a parent-child or associative table relationship.
|
||||
@@ -1750,10 +1835,15 @@ def relationship(
|
||||
:ref:`orm_declarative_native_dataclasses`, indicates if this field
|
||||
should be marked as keyword-only when generating the ``__init__()``.
|
||||
|
||||
:param hash: Specific to
|
||||
:ref:`orm_declarative_native_dataclasses`, controls if this field
|
||||
is included when generating the ``__hash__()`` method for the mapped
|
||||
class.
|
||||
|
||||
.. versionadded:: 2.0.36
|
||||
"""
|
||||
|
||||
return Relationship(
|
||||
return _RelationshipDeclared(
|
||||
argument,
|
||||
secondary=secondary,
|
||||
uselist=uselist,
|
||||
@@ -1768,7 +1858,7 @@ def relationship(
|
||||
cascade=cascade,
|
||||
viewonly=viewonly,
|
||||
attribute_options=_AttributeOptions(
|
||||
init, repr, default, default_factory, compare, kw_only
|
||||
init, repr, default, default_factory, compare, kw_only, hash
|
||||
),
|
||||
lazy=lazy,
|
||||
passive_deletes=passive_deletes,
|
||||
@@ -1803,6 +1893,7 @@ def synonym(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
info: Optional[_InfoType] = None,
|
||||
doc: Optional[str] = None,
|
||||
) -> Synonym[Any]:
|
||||
@@ -1813,14 +1904,13 @@ def synonym(
|
||||
e.g.::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'my_table'
|
||||
__tablename__ = "my_table"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
job_status = Column(String(50))
|
||||
|
||||
status = synonym("job_status")
|
||||
|
||||
|
||||
:param name: the name of the existing mapped property. This
|
||||
can refer to the string name ORM-mapped attribute
|
||||
configured on the class, including column-bound attributes
|
||||
@@ -1848,11 +1938,13 @@ def synonym(
|
||||
:paramref:`.synonym.descriptor` parameter::
|
||||
|
||||
my_table = Table(
|
||||
"my_table", metadata,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('job_status', String(50))
|
||||
"my_table",
|
||||
metadata,
|
||||
Column("id", Integer, primary_key=True),
|
||||
Column("job_status", String(50)),
|
||||
)
|
||||
|
||||
|
||||
class MyClass:
|
||||
@property
|
||||
def _job_status_descriptor(self):
|
||||
@@ -1860,11 +1952,15 @@ def synonym(
|
||||
|
||||
|
||||
mapper(
|
||||
MyClass, my_table, properties={
|
||||
MyClass,
|
||||
my_table,
|
||||
properties={
|
||||
"job_status": synonym(
|
||||
"_job_status", map_column=True,
|
||||
descriptor=MyClass._job_status_descriptor)
|
||||
}
|
||||
"_job_status",
|
||||
map_column=True,
|
||||
descriptor=MyClass._job_status_descriptor,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
Above, the attribute named ``_job_status`` is automatically
|
||||
@@ -1913,7 +2009,7 @@ def synonym(
|
||||
descriptor=descriptor,
|
||||
comparator_factory=comparator_factory,
|
||||
attribute_options=_AttributeOptions(
|
||||
init, repr, default, default_factory, compare, kw_only
|
||||
init, repr, default, default_factory, compare, kw_only, hash
|
||||
),
|
||||
doc=doc,
|
||||
info=info,
|
||||
@@ -2014,8 +2110,7 @@ def backref(name: str, **kwargs: Any) -> ORMBackrefArgument:
|
||||
|
||||
E.g.::
|
||||
|
||||
'items':relationship(
|
||||
SomeItem, backref=backref('parent', lazy='subquery'))
|
||||
"items": relationship(SomeItem, backref=backref("parent", lazy="subquery"))
|
||||
|
||||
The :paramref:`_orm.relationship.backref` parameter is generally
|
||||
considered to be legacy; for modern applications, using
|
||||
@@ -2027,7 +2122,7 @@ def backref(name: str, **kwargs: Any) -> ORMBackrefArgument:
|
||||
|
||||
:ref:`relationships_backref` - background on backrefs
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
return (name, kwargs)
|
||||
|
||||
@@ -2044,6 +2139,7 @@ def deferred(
|
||||
default_factory: Union[_NoArg, Callable[[], _T]] = _NoArg.NO_ARG,
|
||||
compare: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
kw_only: Union[_NoArg, bool] = _NoArg.NO_ARG,
|
||||
hash: Union[_NoArg, bool, None] = _NoArg.NO_ARG, # noqa: A002
|
||||
active_history: bool = False,
|
||||
expire_on_flush: bool = True,
|
||||
info: Optional[_InfoType] = None,
|
||||
@@ -2078,7 +2174,7 @@ def deferred(
|
||||
column,
|
||||
*additional_columns,
|
||||
attribute_options=_AttributeOptions(
|
||||
init, repr, default, default_factory, compare, kw_only
|
||||
init, repr, default, default_factory, compare, kw_only, hash
|
||||
),
|
||||
group=group,
|
||||
deferred=True,
|
||||
@@ -2121,6 +2217,7 @@ def query_expression(
|
||||
_NoArg.NO_ARG,
|
||||
compare,
|
||||
_NoArg.NO_ARG,
|
||||
_NoArg.NO_ARG,
|
||||
),
|
||||
expire_on_flush=expire_on_flush,
|
||||
info=info,
|
||||
@@ -2267,6 +2364,16 @@ def aliased(
|
||||
supported by all modern databases with regards to right-nested joins
|
||||
and generally produces more efficient queries.
|
||||
|
||||
When :paramref:`_orm.aliased.flat` is combined with
|
||||
:paramref:`_orm.aliased.name`, the resulting joins will alias individual
|
||||
tables using a naming scheme similar to ``<prefix>_<tablename>``. This
|
||||
naming scheme is for visibility / debugging purposes only and the
|
||||
specific scheme is subject to change without notice.
|
||||
|
||||
.. versionadded:: 2.0.32 added support for combining
|
||||
:paramref:`_orm.aliased.name` with :paramref:`_orm.aliased.flat`.
|
||||
Previously, this would raise ``NotImplementedError``.
|
||||
|
||||
:param adapt_on_names: if True, more liberal "matching" will be used when
|
||||
mapping the mapped columns of the ORM entity to those of the
|
||||
given selectable - a name-based match will be performed if the
|
||||
@@ -2276,17 +2383,21 @@ def aliased(
|
||||
aggregate functions::
|
||||
|
||||
class UnitPrice(Base):
|
||||
__tablename__ = 'unit_price'
|
||||
__tablename__ = "unit_price"
|
||||
...
|
||||
unit_id = Column(Integer)
|
||||
price = Column(Numeric)
|
||||
|
||||
aggregated_unit_price = Session.query(
|
||||
func.sum(UnitPrice.price).label('price')
|
||||
).group_by(UnitPrice.unit_id).subquery()
|
||||
|
||||
aggregated_unit_price = aliased(UnitPrice,
|
||||
alias=aggregated_unit_price, adapt_on_names=True)
|
||||
aggregated_unit_price = (
|
||||
Session.query(func.sum(UnitPrice.price).label("price"))
|
||||
.group_by(UnitPrice.unit_id)
|
||||
.subquery()
|
||||
)
|
||||
|
||||
aggregated_unit_price = aliased(
|
||||
UnitPrice, alias=aggregated_unit_price, adapt_on_names=True
|
||||
)
|
||||
|
||||
Above, functions on ``aggregated_unit_price`` which refer to
|
||||
``.price`` will return the
|
||||
@@ -2314,6 +2425,7 @@ def with_polymorphic(
|
||||
aliased: bool = False,
|
||||
innerjoin: bool = False,
|
||||
adapt_on_names: bool = False,
|
||||
name: Optional[str] = None,
|
||||
_use_mapper_path: bool = False,
|
||||
) -> AliasedClass[_O]:
|
||||
"""Produce an :class:`.AliasedClass` construct which specifies
|
||||
@@ -2385,6 +2497,10 @@ def with_polymorphic(
|
||||
|
||||
.. versionadded:: 1.4.33
|
||||
|
||||
:param name: Name given to the generated :class:`.AliasedClass`.
|
||||
|
||||
.. versionadded:: 2.0.31
|
||||
|
||||
"""
|
||||
return AliasedInsp._with_polymorphic_factory(
|
||||
base,
|
||||
@@ -2395,6 +2511,7 @@ def with_polymorphic(
|
||||
adapt_on_names=adapt_on_names,
|
||||
aliased=aliased,
|
||||
innerjoin=innerjoin,
|
||||
name=name,
|
||||
_use_mapper_path=_use_mapper_path,
|
||||
)
|
||||
|
||||
@@ -2426,16 +2543,21 @@ def join(
|
||||
:meth:`_sql.Select.select_from` method, as in::
|
||||
|
||||
from sqlalchemy.orm import join
|
||||
stmt = select(User).\
|
||||
select_from(join(User, Address, User.addresses)).\
|
||||
filter(Address.email_address=='foo@bar.com')
|
||||
|
||||
stmt = (
|
||||
select(User)
|
||||
.select_from(join(User, Address, User.addresses))
|
||||
.filter(Address.email_address == "foo@bar.com")
|
||||
)
|
||||
|
||||
In modern SQLAlchemy the above join can be written more
|
||||
succinctly as::
|
||||
|
||||
stmt = select(User).\
|
||||
join(User.addresses).\
|
||||
filter(Address.email_address=='foo@bar.com')
|
||||
stmt = (
|
||||
select(User)
|
||||
.join(User.addresses)
|
||||
.filter(Address.email_address == "foo@bar.com")
|
||||
)
|
||||
|
||||
.. warning:: using :func:`_orm.join` directly may not work properly
|
||||
with modern ORM options such as :func:`_orm.with_loader_criteria`.
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/_typing.py
|
||||
# Copyright (C) 2022-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2022-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/attributes.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -401,7 +401,7 @@ class QueryableAttribute(
|
||||
parententity=adapt_to_entity,
|
||||
)
|
||||
|
||||
def of_type(self, entity: _EntityType[Any]) -> QueryableAttribute[_T]:
|
||||
def of_type(self, entity: _EntityType[_T]) -> QueryableAttribute[_T]:
|
||||
return QueryableAttribute(
|
||||
self.class_,
|
||||
self.key,
|
||||
@@ -503,7 +503,7 @@ def _queryable_attribute_unreduce(
|
||||
return getattr(entity, key)
|
||||
|
||||
|
||||
class InstrumentedAttribute(QueryableAttribute[_T]):
|
||||
class InstrumentedAttribute(QueryableAttribute[_T_co]):
|
||||
"""Class bound instrumented attribute which adds basic
|
||||
:term:`descriptor` methods.
|
||||
|
||||
@@ -544,14 +544,14 @@ class InstrumentedAttribute(QueryableAttribute[_T]):
|
||||
@overload
|
||||
def __get__(
|
||||
self, instance: None, owner: Any
|
||||
) -> InstrumentedAttribute[_T]: ...
|
||||
) -> InstrumentedAttribute[_T_co]: ...
|
||||
|
||||
@overload
|
||||
def __get__(self, instance: object, owner: Any) -> _T: ...
|
||||
def __get__(self, instance: object, owner: Any) -> _T_co: ...
|
||||
|
||||
def __get__(
|
||||
self, instance: Optional[object], owner: Any
|
||||
) -> Union[InstrumentedAttribute[_T], _T]:
|
||||
) -> Union[InstrumentedAttribute[_T_co], _T_co]:
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
@@ -2663,7 +2663,7 @@ def init_collection(obj: object, key: str) -> CollectionAdapter:
|
||||
This function is used to provide direct access to collection internals
|
||||
for a previously unloaded attribute. e.g.::
|
||||
|
||||
collection_adapter = init_collection(someobject, 'elements')
|
||||
collection_adapter = init_collection(someobject, "elements")
|
||||
for elem in values:
|
||||
collection_adapter.append_without_event(elem)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/base.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -21,6 +21,7 @@ from typing import Generic
|
||||
from typing import no_type_check
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
@@ -144,7 +145,7 @@ class PassiveFlag(FastIntFlag):
|
||||
"""
|
||||
|
||||
NO_AUTOFLUSH = 64
|
||||
"""Loader callables should disable autoflush.""",
|
||||
"""Loader callables should disable autoflush."""
|
||||
|
||||
NO_RAISE = 128
|
||||
"""Loader callables should not raise any assertions"""
|
||||
@@ -282,6 +283,8 @@ _never_set = frozenset([NEVER_SET])
|
||||
|
||||
_none_set = frozenset([None, NEVER_SET, PASSIVE_NO_RESULT])
|
||||
|
||||
_none_only_set = frozenset([None])
|
||||
|
||||
_SET_DEFERRED_EXPIRED = util.symbol("SET_DEFERRED_EXPIRED")
|
||||
|
||||
_DEFER_FOR_STATE = util.symbol("DEFER_FOR_STATE")
|
||||
@@ -579,7 +582,7 @@ class InspectionAttr:
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
__slots__: Tuple[str, ...] = ()
|
||||
|
||||
is_selectable = False
|
||||
"""Return True if this object is an instance of
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/bulk_persistence.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -76,6 +76,7 @@ def _bulk_insert(
|
||||
mapper: Mapper[_O],
|
||||
mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
|
||||
session_transaction: SessionTransaction,
|
||||
*,
|
||||
isstates: bool,
|
||||
return_defaults: bool,
|
||||
render_nulls: bool,
|
||||
@@ -89,6 +90,7 @@ def _bulk_insert(
|
||||
mapper: Mapper[_O],
|
||||
mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
|
||||
session_transaction: SessionTransaction,
|
||||
*,
|
||||
isstates: bool,
|
||||
return_defaults: bool,
|
||||
render_nulls: bool,
|
||||
@@ -101,6 +103,7 @@ def _bulk_insert(
|
||||
mapper: Mapper[_O],
|
||||
mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
|
||||
session_transaction: SessionTransaction,
|
||||
*,
|
||||
isstates: bool,
|
||||
return_defaults: bool,
|
||||
render_nulls: bool,
|
||||
@@ -116,13 +119,35 @@ def _bulk_insert(
|
||||
)
|
||||
|
||||
if isstates:
|
||||
if TYPE_CHECKING:
|
||||
mappings = cast(Iterable[InstanceState[_O]], mappings)
|
||||
|
||||
if return_defaults:
|
||||
# list of states allows us to attach .key for return_defaults case
|
||||
states = [(state, state.dict) for state in mappings]
|
||||
mappings = [dict_ for (state, dict_) in states]
|
||||
else:
|
||||
mappings = [state.dict for state in mappings]
|
||||
else:
|
||||
mappings = [dict(m) for m in mappings]
|
||||
if TYPE_CHECKING:
|
||||
mappings = cast(Iterable[Dict[str, Any]], mappings)
|
||||
|
||||
if return_defaults:
|
||||
# use dictionaries given, so that newly populated defaults
|
||||
# can be delivered back to the caller (see #11661). This is **not**
|
||||
# compatible with other use cases such as a session-executed
|
||||
# insert() construct, as this will confuse the case of
|
||||
# insert-per-subclass for joined inheritance cases (see
|
||||
# test_bulk_statements.py::BulkDMLReturningJoinedInhTest).
|
||||
#
|
||||
# So in this conditional, we have **only** called
|
||||
# session.bulk_insert_mappings() which does not have this
|
||||
# requirement
|
||||
mappings = list(mappings)
|
||||
else:
|
||||
# for all other cases we need to establish a local dictionary
|
||||
# so that the incoming dictionaries aren't mutated
|
||||
mappings = [dict(m) for m in mappings]
|
||||
_expand_composites(mapper, mappings)
|
||||
|
||||
connection = session_transaction.connection(base_mapper)
|
||||
@@ -218,6 +243,7 @@ def _bulk_insert(
|
||||
state.key = (
|
||||
identity_cls,
|
||||
tuple([dict_[key] for key in identity_props]),
|
||||
None,
|
||||
)
|
||||
|
||||
if use_orm_insert_stmt is not None:
|
||||
@@ -230,6 +256,7 @@ def _bulk_update(
|
||||
mapper: Mapper[Any],
|
||||
mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
|
||||
session_transaction: SessionTransaction,
|
||||
*,
|
||||
isstates: bool,
|
||||
update_changed_only: bool,
|
||||
use_orm_update_stmt: Literal[None] = ...,
|
||||
@@ -242,6 +269,7 @@ def _bulk_update(
|
||||
mapper: Mapper[Any],
|
||||
mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
|
||||
session_transaction: SessionTransaction,
|
||||
*,
|
||||
isstates: bool,
|
||||
update_changed_only: bool,
|
||||
use_orm_update_stmt: Optional[dml.Update] = ...,
|
||||
@@ -253,6 +281,7 @@ def _bulk_update(
|
||||
mapper: Mapper[Any],
|
||||
mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
|
||||
session_transaction: SessionTransaction,
|
||||
*,
|
||||
isstates: bool,
|
||||
update_changed_only: bool,
|
||||
use_orm_update_stmt: Optional[dml.Update] = None,
|
||||
@@ -590,6 +619,7 @@ class ORMDMLState(AbstractORMCompileState):
|
||||
querycontext = QueryContext(
|
||||
compile_state.from_statement_ctx,
|
||||
compile_state.select_statement,
|
||||
statement,
|
||||
params,
|
||||
session,
|
||||
load_options,
|
||||
@@ -614,6 +644,7 @@ class BulkUDCompileState(ORMDMLState):
|
||||
_eval_condition = None
|
||||
_matched_rows = None
|
||||
_identity_token = None
|
||||
_populate_existing: bool = False
|
||||
|
||||
@classmethod
|
||||
def can_use_returning(
|
||||
@@ -646,6 +677,7 @@ class BulkUDCompileState(ORMDMLState):
|
||||
{
|
||||
"synchronize_session",
|
||||
"autoflush",
|
||||
"populate_existing",
|
||||
"identity_token",
|
||||
"is_delete_using",
|
||||
"is_update_from",
|
||||
@@ -830,53 +862,39 @@ class BulkUDCompileState(ORMDMLState):
|
||||
return return_crit
|
||||
|
||||
@classmethod
|
||||
def _interpret_returning_rows(cls, mapper, rows):
|
||||
"""translate from local inherited table columns to base mapper
|
||||
primary key columns.
|
||||
def _interpret_returning_rows(cls, result, mapper, rows):
|
||||
"""return rows that indicate PK cols in mapper.primary_key position
|
||||
for RETURNING rows.
|
||||
|
||||
Joined inheritance mappers always establish the primary key in terms of
|
||||
the base table. When we UPDATE a sub-table, we can only get
|
||||
RETURNING for the sub-table's columns.
|
||||
Prior to 2.0.36, this method seemed to be written for some kind of
|
||||
inheritance scenario but the scenario was unused for actual joined
|
||||
inheritance, and the function instead seemed to perform some kind of
|
||||
partial translation that would remove non-PK cols if the PK cols
|
||||
happened to be first in the row, but not otherwise. The joined
|
||||
inheritance walk feature here seems to have never been used as it was
|
||||
always skipped by the "local_table" check.
|
||||
|
||||
Here, we create a lookup from the local sub table's primary key
|
||||
columns to the base table PK columns so that we can get identity
|
||||
key values from RETURNING that's against the joined inheritance
|
||||
sub-table.
|
||||
|
||||
the complexity here is to support more than one level deep of
|
||||
inheritance, where we have to link columns to each other across
|
||||
the inheritance hierarchy.
|
||||
As of 2.0.36 the function strips away non-PK cols and provides the
|
||||
PK cols for the table in mapper PK order.
|
||||
|
||||
"""
|
||||
|
||||
if mapper.local_table is not mapper.base_mapper.local_table:
|
||||
return rows
|
||||
try:
|
||||
if mapper.local_table is not mapper.base_mapper.local_table:
|
||||
# TODO: dive more into how a local table PK is used for fetch
|
||||
# sync, not clear if this is correct as it depends on the
|
||||
# downstream routine to fetch rows using
|
||||
# local_table.primary_key order
|
||||
pk_keys = result._tuple_getter(mapper.local_table.primary_key)
|
||||
else:
|
||||
pk_keys = result._tuple_getter(mapper.primary_key)
|
||||
except KeyError:
|
||||
# can't use these rows, they don't have PK cols in them
|
||||
# this is an unusual case where the user would have used
|
||||
# .return_defaults()
|
||||
return []
|
||||
|
||||
# this starts as a mapping of
|
||||
# local_pk_col: local_pk_col.
|
||||
# we will then iteratively rewrite the "value" of the dict with
|
||||
# each successive superclass column
|
||||
local_pk_to_base_pk = {pk: pk for pk in mapper.local_table.primary_key}
|
||||
|
||||
for mp in mapper.iterate_to_root():
|
||||
if mp.inherits is None:
|
||||
break
|
||||
elif mp.local_table is mp.inherits.local_table:
|
||||
continue
|
||||
|
||||
t_to_e = dict(mp._table_to_equated[mp.inherits.local_table])
|
||||
col_to_col = {sub_pk: super_pk for super_pk, sub_pk in t_to_e[mp]}
|
||||
for pk, super_ in local_pk_to_base_pk.items():
|
||||
local_pk_to_base_pk[pk] = col_to_col[super_]
|
||||
|
||||
lookup = {
|
||||
local_pk_to_base_pk[lpk]: idx
|
||||
for idx, lpk in enumerate(mapper.local_table.primary_key)
|
||||
}
|
||||
primary_key_convert = [
|
||||
lookup[bpk] for bpk in mapper.base_mapper.primary_key
|
||||
]
|
||||
return [tuple(row[idx] for idx in primary_key_convert) for row in rows]
|
||||
return [pk_keys(row) for row in rows]
|
||||
|
||||
@classmethod
|
||||
def _get_matched_objects_on_criteria(cls, update_options, states):
|
||||
@@ -1439,6 +1457,9 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
|
||||
|
||||
new_stmt = statement._clone()
|
||||
|
||||
if new_stmt.table._annotations["parententity"] is mapper:
|
||||
new_stmt.table = mapper.local_table
|
||||
|
||||
# note if the statement has _multi_values, these
|
||||
# are passed through to the new statement, which will then raise
|
||||
# InvalidRequestError because UPDATE doesn't support multi_values
|
||||
@@ -1557,10 +1578,20 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
|
||||
bind_arguments: _BindArguments,
|
||||
conn: Connection,
|
||||
) -> _result.Result:
|
||||
|
||||
update_options = execution_options.get(
|
||||
"_sa_orm_update_options", cls.default_update_options
|
||||
)
|
||||
|
||||
if update_options._populate_existing:
|
||||
load_options = execution_options.get(
|
||||
"_sa_orm_load_options", QueryContext.default_load_options
|
||||
)
|
||||
load_options += {"_populate_existing": True}
|
||||
execution_options = execution_options.union(
|
||||
{"_sa_orm_load_options": load_options}
|
||||
)
|
||||
|
||||
if update_options._dml_strategy not in (
|
||||
"orm",
|
||||
"auto",
|
||||
@@ -1716,7 +1747,10 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
|
||||
session,
|
||||
update_options,
|
||||
statement,
|
||||
result.context.compiled_parameters[0],
|
||||
[(obj, state, dict_) for obj, state, dict_, _ in matched_objects],
|
||||
result.prefetch_cols(),
|
||||
result.postfetch_cols(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -1728,9 +1762,8 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
|
||||
returned_defaults_rows = result.returned_defaults_rows
|
||||
if returned_defaults_rows:
|
||||
pk_rows = cls._interpret_returning_rows(
|
||||
target_mapper, returned_defaults_rows
|
||||
result, target_mapper, returned_defaults_rows
|
||||
)
|
||||
|
||||
matched_rows = [
|
||||
tuple(row) + (update_options._identity_token,)
|
||||
for row in pk_rows
|
||||
@@ -1761,6 +1794,7 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
|
||||
session,
|
||||
update_options,
|
||||
statement,
|
||||
result.context.compiled_parameters[0],
|
||||
[
|
||||
(
|
||||
obj,
|
||||
@@ -1769,16 +1803,26 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
|
||||
)
|
||||
for obj in objs
|
||||
],
|
||||
result.prefetch_cols(),
|
||||
result.postfetch_cols(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _apply_update_set_values_to_objects(
|
||||
cls, session, update_options, statement, matched_objects
|
||||
cls,
|
||||
session,
|
||||
update_options,
|
||||
statement,
|
||||
effective_params,
|
||||
matched_objects,
|
||||
prefetch_cols,
|
||||
postfetch_cols,
|
||||
):
|
||||
"""apply values to objects derived from an update statement, e.g.
|
||||
UPDATE..SET <values>
|
||||
|
||||
"""
|
||||
|
||||
mapper = update_options._subject_mapper
|
||||
target_cls = mapper.class_
|
||||
evaluator_compiler = evaluator._EvaluatorCompiler(target_cls)
|
||||
@@ -1801,7 +1845,35 @@ class BulkORMUpdate(BulkUDCompileState, UpdateDMLState):
|
||||
attrib = {k for k, v in resolved_keys_as_propnames}
|
||||
|
||||
states = set()
|
||||
|
||||
to_prefetch = {
|
||||
c
|
||||
for c in prefetch_cols
|
||||
if c.key in effective_params
|
||||
and c in mapper._columntoproperty
|
||||
and c.key not in evaluated_keys
|
||||
}
|
||||
to_expire = {
|
||||
mapper._columntoproperty[c].key
|
||||
for c in postfetch_cols
|
||||
if c in mapper._columntoproperty
|
||||
}.difference(evaluated_keys)
|
||||
|
||||
prefetch_transfer = [
|
||||
(mapper._columntoproperty[c].key, c.key) for c in to_prefetch
|
||||
]
|
||||
|
||||
for obj, state, dict_ in matched_objects:
|
||||
|
||||
dict_.update(
|
||||
{
|
||||
col_to_prop: effective_params[c_key]
|
||||
for col_to_prop, c_key in prefetch_transfer
|
||||
}
|
||||
)
|
||||
|
||||
state._expire_attributes(state.dict, to_expire)
|
||||
|
||||
to_evaluate = state.unmodified.intersection(evaluated_keys)
|
||||
|
||||
for key in to_evaluate:
|
||||
@@ -1858,6 +1930,9 @@ class BulkORMDelete(BulkUDCompileState, DeleteDMLState):
|
||||
|
||||
new_stmt = statement._clone()
|
||||
|
||||
if new_stmt.table._annotations["parententity"] is mapper:
|
||||
new_stmt.table = mapper.local_table
|
||||
|
||||
new_crit = cls._adjust_for_extra_criteria(
|
||||
self.global_attributes, mapper
|
||||
)
|
||||
@@ -2018,7 +2093,7 @@ class BulkORMDelete(BulkUDCompileState, DeleteDMLState):
|
||||
|
||||
if returned_defaults_rows:
|
||||
pk_rows = cls._interpret_returning_rows(
|
||||
target_mapper, returned_defaults_rows
|
||||
result, target_mapper, returned_defaults_rows
|
||||
)
|
||||
|
||||
matched_rows = [
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/clsregistry.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -287,8 +287,9 @@ class _ModuleMarker(ClsRegistryToken):
|
||||
|
||||
def _remove_item(self, name: str) -> None:
|
||||
self.contents.pop(name, None)
|
||||
if not self.contents and self.parent is not None:
|
||||
self.parent._remove_item(self.name)
|
||||
if not self.contents:
|
||||
if self.parent is not None:
|
||||
self.parent._remove_item(self.name)
|
||||
_registries.discard(self)
|
||||
|
||||
def resolve_attr(self, key: str) -> Union[_ModNS, Type[Any]]:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/collections.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -21,6 +21,8 @@ provided. One is a bundle of generic decorators that map function arguments
|
||||
and return values to events::
|
||||
|
||||
from sqlalchemy.orm.collections import collection
|
||||
|
||||
|
||||
class MyClass:
|
||||
# ...
|
||||
|
||||
@@ -32,7 +34,6 @@ and return values to events::
|
||||
def pop(self):
|
||||
return self.data.pop()
|
||||
|
||||
|
||||
The second approach is a bundle of targeted decorators that wrap appropriate
|
||||
append and remove notifiers around the mutation methods present in the
|
||||
standard Python ``list``, ``set`` and ``dict`` interfaces. These could be
|
||||
@@ -73,10 +74,11 @@ generally not needed. Odds are, the extension method will delegate to a
|
||||
method that's already instrumented. For example::
|
||||
|
||||
class QueueIsh(list):
|
||||
def push(self, item):
|
||||
self.append(item)
|
||||
def shift(self):
|
||||
return self.pop(0)
|
||||
def push(self, item):
|
||||
self.append(item)
|
||||
|
||||
def shift(self):
|
||||
return self.pop(0)
|
||||
|
||||
There's no need to decorate these methods. ``append`` and ``pop`` are already
|
||||
instrumented as part of the ``list`` interface. Decorating them would fire
|
||||
@@ -148,10 +150,12 @@ __all__ = [
|
||||
"keyfunc_mapping",
|
||||
"column_keyed_dict",
|
||||
"attribute_keyed_dict",
|
||||
"column_keyed_dict",
|
||||
"attribute_keyed_dict",
|
||||
"MappedCollection",
|
||||
"KeyFuncDict",
|
||||
# old names in < 2.0
|
||||
"mapped_collection",
|
||||
"column_mapped_collection",
|
||||
"attribute_mapped_collection",
|
||||
"MappedCollection",
|
||||
]
|
||||
|
||||
__instrumentation_mutex = threading.Lock()
|
||||
@@ -193,9 +197,10 @@ class collection:
|
||||
The recipe decorators all require parens, even those that take no
|
||||
arguments::
|
||||
|
||||
@collection.adds('entity')
|
||||
@collection.adds("entity")
|
||||
def insert(self, position, entity): ...
|
||||
|
||||
|
||||
@collection.removes_return()
|
||||
def popitem(self): ...
|
||||
|
||||
@@ -215,11 +220,13 @@ class collection:
|
||||
@collection.appender
|
||||
def add(self, append): ...
|
||||
|
||||
|
||||
# or, equivalently
|
||||
@collection.appender
|
||||
@collection.adds(1)
|
||||
def add(self, append): ...
|
||||
|
||||
|
||||
# for mapping type, an 'append' may kick out a previous value
|
||||
# that occupies that slot. consider d['a'] = 'foo'- any previous
|
||||
# value in d['a'] is discarded.
|
||||
@@ -259,10 +266,11 @@ class collection:
|
||||
@collection.remover
|
||||
def zap(self, entity): ...
|
||||
|
||||
|
||||
# or, equivalently
|
||||
@collection.remover
|
||||
@collection.removes_return()
|
||||
def zap(self, ): ...
|
||||
def zap(self): ...
|
||||
|
||||
If the value to remove is not present in the collection, you may
|
||||
raise an exception or return None to ignore the error.
|
||||
@@ -362,7 +370,8 @@ class collection:
|
||||
@collection.adds(1)
|
||||
def push(self, item): ...
|
||||
|
||||
@collection.adds('entity')
|
||||
|
||||
@collection.adds("entity")
|
||||
def do_stuff(self, thing, entity=None): ...
|
||||
|
||||
"""
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/context.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -104,6 +104,7 @@ class QueryContext:
|
||||
"top_level_context",
|
||||
"compile_state",
|
||||
"query",
|
||||
"user_passed_query",
|
||||
"params",
|
||||
"load_options",
|
||||
"bind_arguments",
|
||||
@@ -148,6 +149,10 @@ class QueryContext:
|
||||
self,
|
||||
compile_state: CompileState,
|
||||
statement: Union[Select[Any], FromStatement[Any]],
|
||||
user_passed_query: Union[
|
||||
Select[Any],
|
||||
FromStatement[Any],
|
||||
],
|
||||
params: _CoreSingleExecuteParams,
|
||||
session: Session,
|
||||
load_options: Union[
|
||||
@@ -162,6 +167,13 @@ class QueryContext:
|
||||
self.bind_arguments = bind_arguments or _EMPTY_DICT
|
||||
self.compile_state = compile_state
|
||||
self.query = statement
|
||||
|
||||
# the query that the end user passed to Session.execute() or similar.
|
||||
# this is usually the same as .query, except in the bulk_persistence
|
||||
# routines where a separate FromStatement is manufactured in the
|
||||
# compile stage; this allows differentiation in that case.
|
||||
self.user_passed_query = user_passed_query
|
||||
|
||||
self.session = session
|
||||
self.loaders_require_buffering = False
|
||||
self.loaders_require_uniquing = False
|
||||
@@ -169,7 +181,7 @@ class QueryContext:
|
||||
self.top_level_context = load_options._sa_top_level_orm_context
|
||||
|
||||
cached_options = compile_state.select_statement._with_options
|
||||
uncached_options = statement._with_options
|
||||
uncached_options = user_passed_query._with_options
|
||||
|
||||
# see issue #7447 , #8399 for some background
|
||||
# propagated loader options will be present on loaded InstanceState
|
||||
@@ -578,6 +590,7 @@ class ORMCompileState(AbstractORMCompileState):
|
||||
querycontext = QueryContext(
|
||||
compile_state,
|
||||
statement,
|
||||
statement,
|
||||
params,
|
||||
session,
|
||||
load_options,
|
||||
@@ -888,6 +901,8 @@ class FromStatement(GroupedElement, Generative, TypedReturnsRows[_TP]):
|
||||
("_compile_options", InternalTraversal.dp_has_cache_key)
|
||||
]
|
||||
|
||||
is_from_statement = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entities: Iterable[_ColumnsClauseArgument[Any]],
|
||||
@@ -905,6 +920,10 @@ class FromStatement(GroupedElement, Generative, TypedReturnsRows[_TP]):
|
||||
]
|
||||
self.element = element
|
||||
self.is_dml = element.is_dml
|
||||
self.is_select = element.is_select
|
||||
self.is_delete = element.is_delete
|
||||
self.is_insert = element.is_insert
|
||||
self.is_update = element.is_update
|
||||
self._label_style = (
|
||||
element._label_style if is_select_base(element) else None
|
||||
)
|
||||
@@ -1551,10 +1570,10 @@ class ORMSelectCompileState(ORMCompileState, SelectState):
|
||||
)
|
||||
statement._label_style = self.label_style
|
||||
|
||||
# Oracle however does not allow FOR UPDATE on the subquery,
|
||||
# and the Oracle dialect ignores it, plus for PostgreSQL, MySQL
|
||||
# we expect that all elements of the row are locked, so also put it
|
||||
# on the outside (except in the case of PG when OF is used)
|
||||
# Oracle Database however does not allow FOR UPDATE on the subquery,
|
||||
# and the Oracle Database dialects ignore it, plus for PostgreSQL,
|
||||
# MySQL we expect that all elements of the row are locked, so also put
|
||||
# it on the outside (except in the case of PG when OF is used)
|
||||
if (
|
||||
self._for_update_arg is not None
|
||||
and self._for_update_arg.of is None
|
||||
@@ -3044,7 +3063,10 @@ class _RawColumnEntity(_ColumnEntity):
|
||||
if not is_current_entities or column._is_text_clause:
|
||||
self._label_name = None
|
||||
else:
|
||||
self._label_name = compile_state._label_convention(column)
|
||||
if parent_bundle:
|
||||
self._label_name = column._proxy_key
|
||||
else:
|
||||
self._label_name = compile_state._label_convention(column)
|
||||
|
||||
if parent_bundle:
|
||||
parent_bundle._entities.append(self)
|
||||
@@ -3138,9 +3160,12 @@ class _ORMColumnEntity(_ColumnEntity):
|
||||
self.raw_column_index = raw_column_index
|
||||
|
||||
if is_current_entities:
|
||||
self._label_name = compile_state._label_convention(
|
||||
column, col_name=orm_key
|
||||
)
|
||||
if parent_bundle:
|
||||
self._label_name = orm_key if orm_key else column._proxy_key
|
||||
else:
|
||||
self._label_name = compile_state._label_convention(
|
||||
column, col_name=orm_key
|
||||
)
|
||||
else:
|
||||
self._label_name = None
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/decl_api.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -14,7 +14,6 @@ import re
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import ClassVar
|
||||
from typing import Dict
|
||||
from typing import FrozenSet
|
||||
@@ -72,13 +71,16 @@ from ..sql.selectable import FromClause
|
||||
from ..util import hybridmethod
|
||||
from ..util import hybridproperty
|
||||
from ..util import typing as compat_typing
|
||||
from ..util import warn_deprecated
|
||||
from ..util.typing import CallableReference
|
||||
from ..util.typing import de_optionalize_union_types
|
||||
from ..util.typing import flatten_newtype
|
||||
from ..util.typing import is_generic
|
||||
from ..util.typing import is_literal
|
||||
from ..util.typing import is_newtype
|
||||
from ..util.typing import is_pep695
|
||||
from ..util.typing import Literal
|
||||
from ..util.typing import LITERAL_TYPES
|
||||
from ..util.typing import Self
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -207,7 +209,7 @@ def synonym_for(
|
||||
:paramref:`.orm.synonym.descriptor` parameter::
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = 'my_table'
|
||||
__tablename__ = "my_table"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
_job_status = Column("job_status", String(50))
|
||||
@@ -373,20 +375,21 @@ class declared_attr(interfaces._MappedAttribute[_T], _declared_attr_common):
|
||||
for subclasses::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
__tablename__ = "employee"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
type: Mapped[str] = mapped_column(String(50))
|
||||
|
||||
@declared_attr.directive
|
||||
def __mapper_args__(cls) -> Dict[str, Any]:
|
||||
if cls.__name__ == 'Employee':
|
||||
if cls.__name__ == "Employee":
|
||||
return {
|
||||
"polymorphic_on":cls.type,
|
||||
"polymorphic_identity":"Employee"
|
||||
"polymorphic_on": cls.type,
|
||||
"polymorphic_identity": "Employee",
|
||||
}
|
||||
else:
|
||||
return {"polymorphic_identity":cls.__name__}
|
||||
return {"polymorphic_identity": cls.__name__}
|
||||
|
||||
|
||||
class Engineer(Employee):
|
||||
pass
|
||||
@@ -485,6 +488,7 @@ def declarative_mixin(cls: Type[_T]) -> Type[_T]:
|
||||
from sqlalchemy.orm import declared_attr
|
||||
from sqlalchemy.orm import declarative_mixin
|
||||
|
||||
|
||||
@declarative_mixin
|
||||
class MyMixin:
|
||||
|
||||
@@ -492,10 +496,11 @@ def declarative_mixin(cls: Type[_T]) -> Type[_T]:
|
||||
def __tablename__(cls):
|
||||
return cls.__name__.lower()
|
||||
|
||||
__table_args__ = {'mysql_engine': 'InnoDB'}
|
||||
__mapper_args__= {'always_refresh': True}
|
||||
__table_args__ = {"mysql_engine": "InnoDB"}
|
||||
__mapper_args__ = {"always_refresh": True}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
class MyModel(MyMixin, Base):
|
||||
name = Column(String(1000))
|
||||
@@ -638,10 +643,10 @@ class DeclarativeBase(
|
||||
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
The above ``Base`` class is now usable as the base for new declarative
|
||||
mappings. The superclass makes use of the ``__init_subclass__()``
|
||||
method to set up new classes and metaclasses aren't used.
|
||||
@@ -664,11 +669,12 @@ class DeclarativeBase(
|
||||
bigint = Annotated[int, "bigint"]
|
||||
my_metadata = MetaData()
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
metadata = my_metadata
|
||||
type_annotation_map = {
|
||||
str: String().with_variant(String(255), "mysql", "mariadb"),
|
||||
bigint: BigInteger()
|
||||
bigint: BigInteger(),
|
||||
}
|
||||
|
||||
Class-level attributes which may be specified include:
|
||||
@@ -1221,42 +1227,34 @@ class registry:
|
||||
|
||||
self.type_annotation_map.update(
|
||||
{
|
||||
sub_type: sqltype
|
||||
de_optionalize_union_types(typ): sqltype
|
||||
for typ, sqltype in type_annotation_map.items()
|
||||
for sub_type in compat_typing.expand_unions(
|
||||
typ, include_union=True, discard_none=True
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
def _resolve_type(
|
||||
self, python_type: _MatchedOnType
|
||||
self, python_type: _MatchedOnType, _do_fallbacks: bool = True
|
||||
) -> Optional[sqltypes.TypeEngine[Any]]:
|
||||
search: Iterable[Tuple[_MatchedOnType, Type[Any]]]
|
||||
python_type_type: Type[Any]
|
||||
search: Iterable[Tuple[_MatchedOnType, Type[Any]]]
|
||||
|
||||
if is_generic(python_type):
|
||||
if is_literal(python_type):
|
||||
python_type_type = cast("Type[Any]", python_type)
|
||||
python_type_type = python_type # type: ignore[assignment]
|
||||
|
||||
search = ( # type: ignore[assignment]
|
||||
search = (
|
||||
(python_type, python_type_type),
|
||||
(Literal, python_type_type),
|
||||
*((lt, python_type_type) for lt in LITERAL_TYPES), # type: ignore[arg-type] # noqa: E501
|
||||
)
|
||||
else:
|
||||
python_type_type = python_type.__origin__
|
||||
search = ((python_type, python_type_type),)
|
||||
elif is_newtype(python_type):
|
||||
python_type_type = flatten_newtype(python_type)
|
||||
search = ((python_type, python_type_type),)
|
||||
elif is_pep695(python_type):
|
||||
python_type_type = python_type.__value__
|
||||
flattened = None
|
||||
search = ((python_type, python_type_type),)
|
||||
else:
|
||||
python_type_type = cast("Type[Any]", python_type)
|
||||
flattened = None
|
||||
elif isinstance(python_type, type):
|
||||
python_type_type = python_type
|
||||
search = ((pt, pt) for pt in python_type_type.__mro__)
|
||||
else:
|
||||
python_type_type = python_type # type: ignore[assignment]
|
||||
search = ((python_type, python_type_type),)
|
||||
|
||||
for pt, flattened in search:
|
||||
# we search through full __mro__ for types. however...
|
||||
@@ -1280,6 +1278,39 @@ class registry:
|
||||
if resolved_sql_type is not None:
|
||||
return resolved_sql_type
|
||||
|
||||
# 2.0 fallbacks
|
||||
if _do_fallbacks:
|
||||
python_type_to_check: Any = None
|
||||
kind = None
|
||||
if is_pep695(python_type):
|
||||
# NOTE: assume there aren't type alias types of new types.
|
||||
python_type_to_check = python_type
|
||||
while is_pep695(python_type_to_check):
|
||||
python_type_to_check = python_type_to_check.__value__
|
||||
python_type_to_check = de_optionalize_union_types(
|
||||
python_type_to_check
|
||||
)
|
||||
kind = "TypeAliasType"
|
||||
if is_newtype(python_type):
|
||||
python_type_to_check = flatten_newtype(python_type)
|
||||
kind = "NewType"
|
||||
|
||||
if python_type_to_check is not None:
|
||||
res_after_fallback = self._resolve_type(
|
||||
python_type_to_check, False
|
||||
)
|
||||
if res_after_fallback is not None:
|
||||
assert kind is not None
|
||||
warn_deprecated(
|
||||
f"Matching the provided {kind} '{python_type}' on "
|
||||
"its resolved value without matching it in the "
|
||||
"type_annotation_map is deprecated; add this type to "
|
||||
"the type_annotation_map to allow it to match "
|
||||
"explicitly.",
|
||||
"2.0",
|
||||
)
|
||||
return res_after_fallback
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
@@ -1472,6 +1503,7 @@ class registry:
|
||||
|
||||
Base = mapper_registry.generate_base()
|
||||
|
||||
|
||||
class MyClass(Base):
|
||||
__tablename__ = "my_table"
|
||||
id = Column(Integer, primary_key=True)
|
||||
@@ -1484,6 +1516,7 @@ class registry:
|
||||
|
||||
mapper_registry = registry()
|
||||
|
||||
|
||||
class Base(metaclass=DeclarativeMeta):
|
||||
__abstract__ = True
|
||||
registry = mapper_registry
|
||||
@@ -1649,9 +1682,10 @@ class registry:
|
||||
|
||||
mapper_registry = registry()
|
||||
|
||||
|
||||
@mapper_registry.mapped
|
||||
class Foo:
|
||||
__tablename__ = 'some_table'
|
||||
__tablename__ = "some_table"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String)
|
||||
@@ -1691,15 +1725,17 @@ class registry:
|
||||
|
||||
mapper_registry = registry()
|
||||
|
||||
|
||||
@mapper_registry.as_declarative_base()
|
||||
class Base:
|
||||
@declared_attr
|
||||
def __tablename__(cls):
|
||||
return cls.__name__.lower()
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
class MyMappedClass(Base):
|
||||
# ...
|
||||
|
||||
class MyMappedClass(Base): ...
|
||||
|
||||
All keyword arguments passed to
|
||||
:meth:`_orm.registry.as_declarative_base` are passed
|
||||
@@ -1729,12 +1765,14 @@ class registry:
|
||||
|
||||
mapper_registry = registry()
|
||||
|
||||
|
||||
class Foo:
|
||||
__tablename__ = 'some_table'
|
||||
__tablename__ = "some_table"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String)
|
||||
|
||||
|
||||
mapper = mapper_registry.map_declaratively(Foo)
|
||||
|
||||
This function is more conveniently invoked indirectly via either the
|
||||
@@ -1787,12 +1825,14 @@ class registry:
|
||||
my_table = Table(
|
||||
"my_table",
|
||||
mapper_registry.metadata,
|
||||
Column('id', Integer, primary_key=True)
|
||||
Column("id", Integer, primary_key=True),
|
||||
)
|
||||
|
||||
|
||||
class MyClass:
|
||||
pass
|
||||
|
||||
|
||||
mapper_registry.map_imperatively(MyClass, my_table)
|
||||
|
||||
See the section :ref:`orm_imperative_mapping` for complete background
|
||||
@@ -1839,15 +1879,17 @@ def as_declarative(**kw: Any) -> Callable[[Type[_T]], Type[_T]]:
|
||||
|
||||
from sqlalchemy.orm import as_declarative
|
||||
|
||||
|
||||
@as_declarative()
|
||||
class Base:
|
||||
@declared_attr
|
||||
def __tablename__(cls):
|
||||
return cls.__name__.lower()
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
class MyMappedClass(Base):
|
||||
# ...
|
||||
|
||||
class MyMappedClass(Base): ...
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/decl_base.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -65,11 +65,11 @@ from ..sql.schema import Column
|
||||
from ..sql.schema import Table
|
||||
from ..util import topological
|
||||
from ..util.typing import _AnnotationScanType
|
||||
from ..util.typing import get_args
|
||||
from ..util.typing import is_fwd_ref
|
||||
from ..util.typing import is_literal
|
||||
from ..util.typing import Protocol
|
||||
from ..util.typing import TypedDict
|
||||
from ..util.typing import typing_get_args
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._typing import _ClassDict
|
||||
@@ -431,7 +431,7 @@ class _ImperativeMapperConfig(_MapperConfig):
|
||||
class _CollectedAnnotation(NamedTuple):
|
||||
raw_annotation: _AnnotationScanType
|
||||
mapped_container: Optional[Type[Mapped[Any]]]
|
||||
extracted_mapped_annotation: Union[Type[Any], str]
|
||||
extracted_mapped_annotation: Union[_AnnotationScanType, str]
|
||||
is_dataclass: bool
|
||||
attr_value: Any
|
||||
originating_module: str
|
||||
@@ -453,6 +453,7 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
"tablename",
|
||||
"mapper_args",
|
||||
"mapper_args_fn",
|
||||
"table_fn",
|
||||
"inherits",
|
||||
"single",
|
||||
"allow_dataclass_fields",
|
||||
@@ -759,7 +760,7 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
_include_dunders = self._include_dunders
|
||||
mapper_args_fn = None
|
||||
table_args = inherited_table_args = None
|
||||
|
||||
table_fn = None
|
||||
tablename = None
|
||||
fixed_table = "__table__" in clsdict_view
|
||||
|
||||
@@ -840,6 +841,22 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
)
|
||||
if not tablename and (not class_mapped or check_decl):
|
||||
tablename = cls_as_Decl.__tablename__
|
||||
elif name == "__table__":
|
||||
check_decl = _check_declared_props_nocascade(
|
||||
obj, name, cls
|
||||
)
|
||||
# if a @declared_attr using "__table__" is detected,
|
||||
# wrap up a callable to look for "__table__" from
|
||||
# the final concrete class when we set up a table.
|
||||
# this was fixed by
|
||||
# #11509, regression in 2.0 from version 1.4.
|
||||
if check_decl and not table_fn:
|
||||
# don't even invoke __table__ until we're ready
|
||||
def _table_fn() -> FromClause:
|
||||
return cls_as_Decl.__table__
|
||||
|
||||
table_fn = _table_fn
|
||||
|
||||
elif name == "__table_args__":
|
||||
check_decl = _check_declared_props_nocascade(
|
||||
obj, name, cls
|
||||
@@ -856,9 +873,10 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
if base is not cls:
|
||||
inherited_table_args = True
|
||||
else:
|
||||
# skip all other dunder names, which at the moment
|
||||
# should only be __table__
|
||||
continue
|
||||
# any other dunder names; should not be here
|
||||
# as we have tested for all four names in
|
||||
# _include_dunders
|
||||
assert False
|
||||
elif class_mapped:
|
||||
if _is_declarative_props(obj) and not obj._quiet:
|
||||
util.warn(
|
||||
@@ -1031,6 +1049,7 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
self.table_args = table_args
|
||||
self.tablename = tablename
|
||||
self.mapper_args_fn = mapper_args_fn
|
||||
self.table_fn = table_fn
|
||||
|
||||
def _setup_dataclasses_transforms(self) -> None:
|
||||
dataclass_setup_arguments = self.dataclass_setup_arguments
|
||||
@@ -1048,6 +1067,16 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
"'@registry.mapped_as_dataclass'"
|
||||
)
|
||||
|
||||
# can't create a dataclass if __table__ is already there. This would
|
||||
# fail an assertion when calling _get_arguments_for_make_dataclass:
|
||||
# assert False, "Mapped[] received without a mapping declaration"
|
||||
if "__table__" in self.cls.__dict__:
|
||||
raise exc.InvalidRequestError(
|
||||
f"Class {self.cls} already defines a '__table__'. "
|
||||
"ORM Annotated Dataclasses do not support a pre-existing "
|
||||
"'__table__' element"
|
||||
)
|
||||
|
||||
warn_for_non_dc_attrs = collections.defaultdict(list)
|
||||
|
||||
def _allow_dataclass_field(
|
||||
@@ -1279,10 +1308,8 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
type(attr_value),
|
||||
required=False,
|
||||
is_dataclass_field=is_dataclass_field,
|
||||
expect_mapped=expect_mapped
|
||||
and not is_dataclass, # self.allow_dataclass_fields,
|
||||
expect_mapped=expect_mapped and not is_dataclass,
|
||||
)
|
||||
|
||||
if extracted is None:
|
||||
# ClassVar can come out here
|
||||
return None
|
||||
@@ -1290,9 +1317,9 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
extracted_mapped_annotation, mapped_container = extracted
|
||||
|
||||
if attr_value is None and not is_literal(extracted_mapped_annotation):
|
||||
for elem in typing_get_args(extracted_mapped_annotation):
|
||||
if isinstance(elem, str) or is_fwd_ref(
|
||||
elem, check_generic=True
|
||||
for elem in get_args(extracted_mapped_annotation):
|
||||
if is_fwd_ref(
|
||||
elem, check_generic=True, check_for_plain_string=True
|
||||
):
|
||||
elem = de_stringify_annotation(
|
||||
self.cls,
|
||||
@@ -1687,7 +1714,11 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
|
||||
manager = attributes.manager_of_class(cls)
|
||||
|
||||
if "__table__" not in clsdict_view and table is None:
|
||||
if (
|
||||
self.table_fn is None
|
||||
and "__table__" not in clsdict_view
|
||||
and table is None
|
||||
):
|
||||
if hasattr(cls, "__table_cls__"):
|
||||
table_cls = cast(
|
||||
Type[Table],
|
||||
@@ -1733,7 +1764,12 @@ class _ClassScanMapperConfig(_MapperConfig):
|
||||
)
|
||||
else:
|
||||
if table is None:
|
||||
table = cls_as_Decl.__table__
|
||||
if self.table_fn:
|
||||
table = self.set_cls_attribute(
|
||||
"__table__", self.table_fn()
|
||||
)
|
||||
else:
|
||||
table = cls_as_Decl.__table__
|
||||
if declared_columns:
|
||||
for c in declared_columns:
|
||||
if not table.c.contains_column(c):
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/dependency.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/descriptor_props.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -53,9 +53,10 @@ from .. import util
|
||||
from ..sql import expression
|
||||
from ..sql import operators
|
||||
from ..sql.elements import BindParameter
|
||||
from ..util.typing import get_args
|
||||
from ..util.typing import is_fwd_ref
|
||||
from ..util.typing import is_pep593
|
||||
from ..util.typing import typing_get_args
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ._typing import _InstanceDict
|
||||
@@ -364,7 +365,7 @@ class CompositeProperty(
|
||||
argument = extracted_mapped_annotation
|
||||
|
||||
if is_pep593(argument):
|
||||
argument = typing_get_args(argument)[0]
|
||||
argument = get_args(argument)[0]
|
||||
|
||||
if argument and self.composite_class is None:
|
||||
if isinstance(argument, str) or is_fwd_ref(
|
||||
@@ -781,7 +782,9 @@ class CompositeProperty(
|
||||
elif isinstance(self.prop.composite_class, type) and isinstance(
|
||||
value, self.prop.composite_class
|
||||
):
|
||||
values = self.prop._composite_values_from_instance(value)
|
||||
values = self.prop._composite_values_from_instance(
|
||||
value # type: ignore[arg-type]
|
||||
)
|
||||
else:
|
||||
raise sa_exc.ArgumentError(
|
||||
"Can't UPDATE composite attribute %s to %r"
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/dynamic.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -161,10 +161,12 @@ class AppenderMixin(AbstractCollectionWriter[_T]):
|
||||
|
||||
return result.IteratorResult(
|
||||
result.SimpleResultMetaData([self.attr.class_.__name__]),
|
||||
self.attr._get_collection_history( # type: ignore[arg-type]
|
||||
attributes.instance_state(self.instance),
|
||||
PassiveFlag.PASSIVE_NO_INITIALIZE,
|
||||
).added_items,
|
||||
iter(
|
||||
self.attr._get_collection_history(
|
||||
attributes.instance_state(self.instance),
|
||||
PassiveFlag.PASSIVE_NO_INITIALIZE,
|
||||
).added_items
|
||||
),
|
||||
_source_supports_scalars=True,
|
||||
).scalars()
|
||||
else:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/evaluator.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -28,6 +28,7 @@ from .. import exc
|
||||
from .. import inspect
|
||||
from ..sql import and_
|
||||
from ..sql import operators
|
||||
from ..sql.sqltypes import Concatenable
|
||||
from ..sql.sqltypes import Integer
|
||||
from ..sql.sqltypes import Numeric
|
||||
from ..util import warn_deprecated
|
||||
@@ -311,6 +312,16 @@ class _EvaluatorCompiler:
|
||||
def visit_concat_op_binary_op(
|
||||
self, operator, eval_left, eval_right, clause
|
||||
):
|
||||
|
||||
if not issubclass(
|
||||
clause.left.type._type_affinity, Concatenable
|
||||
) or not issubclass(clause.right.type._type_affinity, Concatenable):
|
||||
raise UnevaluatableError(
|
||||
f"Cannot evaluate concatenate operator "
|
||||
f'"{operator.__name__}" for '
|
||||
f"datatypes {clause.left.type}, {clause.right.type}"
|
||||
)
|
||||
|
||||
return self._straight_evaluate(
|
||||
lambda a, b: a + b, eval_left, eval_right, clause
|
||||
)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/events.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -207,10 +207,12 @@ class InstanceEvents(event.Events[ClassManager[Any]]):
|
||||
|
||||
from sqlalchemy import event
|
||||
|
||||
|
||||
def my_load_listener(target, context):
|
||||
print("on load!")
|
||||
|
||||
event.listen(SomeClass, 'load', my_load_listener)
|
||||
|
||||
event.listen(SomeClass, "load", my_load_listener)
|
||||
|
||||
Available targets include:
|
||||
|
||||
@@ -466,8 +468,7 @@ class InstanceEvents(event.Events[ClassManager[Any]]):
|
||||
the existing loading context is maintained for the object after the
|
||||
event is called::
|
||||
|
||||
@event.listens_for(
|
||||
SomeClass, "load", restore_load_context=True)
|
||||
@event.listens_for(SomeClass, "load", restore_load_context=True)
|
||||
def on_load(instance, context):
|
||||
instance.some_unloaded_attribute
|
||||
|
||||
@@ -502,7 +503,7 @@ class InstanceEvents(event.Events[ClassManager[Any]]):
|
||||
|
||||
:meth:`.SessionEvents.loaded_as_persistent`
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
def refresh(
|
||||
self, target: _O, context: QueryContext, attrs: Optional[Iterable[str]]
|
||||
@@ -749,6 +750,7 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
|
||||
from sqlalchemy import event
|
||||
|
||||
|
||||
def my_before_insert_listener(mapper, connection, target):
|
||||
# execute a stored procedure upon INSERT,
|
||||
# apply the value to the row to be inserted
|
||||
@@ -756,10 +758,10 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
text("select my_special_function(%d)" % target.special_number)
|
||||
).scalar()
|
||||
|
||||
|
||||
# associate the listener function with SomeClass,
|
||||
# to execute during the "before_insert" hook
|
||||
event.listen(
|
||||
SomeClass, 'before_insert', my_before_insert_listener)
|
||||
event.listen(SomeClass, "before_insert", my_before_insert_listener)
|
||||
|
||||
Available targets include:
|
||||
|
||||
@@ -925,9 +927,10 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
@event.listens_for(Base, "instrument_class", propagate=True)
|
||||
def on_new_class(mapper, cls_):
|
||||
" ... "
|
||||
"..."
|
||||
|
||||
:param mapper: the :class:`_orm.Mapper` which is the target
|
||||
of this event.
|
||||
@@ -983,7 +986,7 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
symbol which indicates to the :func:`.configure_mappers` call that this
|
||||
particular mapper (or hierarchy of mappers, if ``propagate=True`` is
|
||||
used) should be skipped in the current configuration run. When one or
|
||||
more mappers are skipped, the he "new mappers" flag will remain set,
|
||||
more mappers are skipped, the "new mappers" flag will remain set,
|
||||
meaning the :func:`.configure_mappers` function will continue to be
|
||||
called when mappers are used, to continue to try to configure all
|
||||
available mappers.
|
||||
@@ -992,7 +995,7 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
:meth:`.MapperEvents.before_configured`,
|
||||
:meth:`.MapperEvents.after_configured`, and
|
||||
:meth:`.MapperEvents.mapper_configured`, the
|
||||
:meth;`.MapperEvents.before_mapper_configured` event provides for a
|
||||
:meth:`.MapperEvents.before_mapper_configured` event provides for a
|
||||
meaningful return value when it is registered with the ``retval=True``
|
||||
parameter.
|
||||
|
||||
@@ -1006,13 +1009,16 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
|
||||
DontConfigureBase = declarative_base()
|
||||
|
||||
|
||||
@event.listens_for(
|
||||
DontConfigureBase,
|
||||
"before_mapper_configured", retval=True, propagate=True)
|
||||
"before_mapper_configured",
|
||||
retval=True,
|
||||
propagate=True,
|
||||
)
|
||||
def dont_configure(mapper, cls):
|
||||
return EXT_SKIP
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.MapperEvents.before_configured`
|
||||
@@ -1094,9 +1100,9 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
|
||||
from sqlalchemy.orm import Mapper
|
||||
|
||||
|
||||
@event.listens_for(Mapper, "before_configured")
|
||||
def go():
|
||||
...
|
||||
def go(): ...
|
||||
|
||||
Contrast this event to :meth:`.MapperEvents.after_configured`,
|
||||
which is invoked after the series of mappers has been configured,
|
||||
@@ -1114,10 +1120,9 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
@event.listens_for(mapper, "before_configured", once=True)
|
||||
def go():
|
||||
...
|
||||
|
||||
@event.listens_for(mapper, "before_configured", once=True)
|
||||
def go(): ...
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -1154,9 +1159,9 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
|
||||
from sqlalchemy.orm import Mapper
|
||||
|
||||
|
||||
@event.listens_for(Mapper, "after_configured")
|
||||
def go():
|
||||
# ...
|
||||
def go(): ...
|
||||
|
||||
Theoretically this event is called once per
|
||||
application, but is actually called any time new mappers
|
||||
@@ -1168,9 +1173,9 @@ class MapperEvents(event.Events[mapperlib.Mapper[Any]]):
|
||||
|
||||
from sqlalchemy.orm import mapper
|
||||
|
||||
|
||||
@event.listens_for(mapper, "after_configured", once=True)
|
||||
def go():
|
||||
# ...
|
||||
def go(): ...
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -1557,9 +1562,11 @@ class SessionEvents(event.Events[Session]):
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
def my_before_commit(session):
|
||||
print("before commit!")
|
||||
|
||||
|
||||
Session = sessionmaker()
|
||||
|
||||
event.listen(Session, "before_commit", my_before_commit)
|
||||
@@ -1779,7 +1786,7 @@ class SessionEvents(event.Events[Session]):
|
||||
@event.listens_for(session, "after_transaction_create")
|
||||
def after_transaction_create(session, transaction):
|
||||
if transaction.parent is None:
|
||||
# work with top-level transaction
|
||||
... # work with top-level transaction
|
||||
|
||||
To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the
|
||||
:attr:`.SessionTransaction.nested` attribute::
|
||||
@@ -1787,8 +1794,7 @@ class SessionEvents(event.Events[Session]):
|
||||
@event.listens_for(session, "after_transaction_create")
|
||||
def after_transaction_create(session, transaction):
|
||||
if transaction.nested:
|
||||
# work with SAVEPOINT transaction
|
||||
|
||||
... # work with SAVEPOINT transaction
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -1820,7 +1826,7 @@ class SessionEvents(event.Events[Session]):
|
||||
@event.listens_for(session, "after_transaction_create")
|
||||
def after_transaction_end(session, transaction):
|
||||
if transaction.parent is None:
|
||||
# work with top-level transaction
|
||||
... # work with top-level transaction
|
||||
|
||||
To detect if the :class:`.SessionTransaction` is a SAVEPOINT, use the
|
||||
:attr:`.SessionTransaction.nested` attribute::
|
||||
@@ -1828,8 +1834,7 @@ class SessionEvents(event.Events[Session]):
|
||||
@event.listens_for(session, "after_transaction_create")
|
||||
def after_transaction_end(session, transaction):
|
||||
if transaction.nested:
|
||||
# work with SAVEPOINT transaction
|
||||
|
||||
... # work with SAVEPOINT transaction
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -1939,7 +1944,7 @@ class SessionEvents(event.Events[Session]):
|
||||
@event.listens_for(Session, "after_soft_rollback")
|
||||
def do_something(session, previous_transaction):
|
||||
if session.is_active:
|
||||
session.execute("select * from some_table")
|
||||
session.execute(text("select * from some_table"))
|
||||
|
||||
:param session: The target :class:`.Session`.
|
||||
:param previous_transaction: The :class:`.SessionTransaction`
|
||||
@@ -2455,11 +2460,11 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]):
|
||||
|
||||
from sqlalchemy import event
|
||||
|
||||
@event.listens_for(MyClass.collection, 'append', propagate=True)
|
||||
|
||||
@event.listens_for(MyClass.collection, "append", propagate=True)
|
||||
def my_append_listener(target, value, initiator):
|
||||
print("received append event for target: %s" % target)
|
||||
|
||||
|
||||
Listeners have the option to return a possibly modified version of the
|
||||
value, when the :paramref:`.AttributeEvents.retval` flag is passed to
|
||||
:func:`.event.listen` or :func:`.event.listens_for`, such as below,
|
||||
@@ -2468,11 +2473,12 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]):
|
||||
def validate_phone(target, value, oldvalue, initiator):
|
||||
"Strip non-numeric characters from a phone number"
|
||||
|
||||
return re.sub(r'\D', '', value)
|
||||
return re.sub(r"\D", "", value)
|
||||
|
||||
|
||||
# setup listener on UserContact.phone attribute, instructing
|
||||
# it to use the return value
|
||||
listen(UserContact.phone, 'set', validate_phone, retval=True)
|
||||
listen(UserContact.phone, "set", validate_phone, retval=True)
|
||||
|
||||
A validation function like the above can also raise an exception
|
||||
such as :exc:`ValueError` to halt the operation.
|
||||
@@ -2482,7 +2488,7 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]):
|
||||
as when using mapper inheritance patterns::
|
||||
|
||||
|
||||
@event.listens_for(MySuperClass.attr, 'set', propagate=True)
|
||||
@event.listens_for(MySuperClass.attr, "set", propagate=True)
|
||||
def receive_set(target, value, initiator):
|
||||
print("value set: %s" % target)
|
||||
|
||||
@@ -2715,10 +2721,12 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]):
|
||||
|
||||
from sqlalchemy.orm.attributes import OP_BULK_REPLACE
|
||||
|
||||
|
||||
@event.listens_for(SomeObject.collection, "bulk_replace")
|
||||
def process_collection(target, values, initiator):
|
||||
values[:] = [_make_value(value) for value in values]
|
||||
|
||||
|
||||
@event.listens_for(SomeObject.collection, "append", retval=True)
|
||||
def process_collection(target, value, initiator):
|
||||
# make sure bulk_replace didn't already do it
|
||||
@@ -2866,16 +2874,18 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]):
|
||||
|
||||
SOME_CONSTANT = 3.1415926
|
||||
|
||||
|
||||
class MyClass(Base):
|
||||
# ...
|
||||
|
||||
some_attribute = Column(Numeric, default=SOME_CONSTANT)
|
||||
|
||||
|
||||
@event.listens_for(
|
||||
MyClass.some_attribute, "init_scalar",
|
||||
retval=True, propagate=True)
|
||||
MyClass.some_attribute, "init_scalar", retval=True, propagate=True
|
||||
)
|
||||
def _init_some_attribute(target, dict_, value):
|
||||
dict_['some_attribute'] = SOME_CONSTANT
|
||||
dict_["some_attribute"] = SOME_CONSTANT
|
||||
return SOME_CONSTANT
|
||||
|
||||
Above, we initialize the attribute ``MyClass.some_attribute`` to the
|
||||
@@ -2911,9 +2921,10 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]):
|
||||
|
||||
SOME_CONSTANT = 3.1415926
|
||||
|
||||
|
||||
@event.listens_for(
|
||||
MyClass.some_attribute, "init_scalar",
|
||||
retval=True, propagate=True)
|
||||
MyClass.some_attribute, "init_scalar", retval=True, propagate=True
|
||||
)
|
||||
def _init_some_attribute(target, dict_, value):
|
||||
# will also fire off attribute set events
|
||||
target.some_attribute = SOME_CONSTANT
|
||||
@@ -2950,7 +2961,7 @@ class AttributeEvents(event.Events[QueryableAttribute[Any]]):
|
||||
:ref:`examples_instrumentation` - see the
|
||||
``active_column_defaults.py`` example.
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
def init_collection(
|
||||
self,
|
||||
@@ -3088,8 +3099,8 @@ class QueryEvents(event.Events[Query[Any]]):
|
||||
@event.listens_for(Query, "before_compile", retval=True)
|
||||
def no_deleted(query):
|
||||
for desc in query.column_descriptions:
|
||||
if desc['type'] is User:
|
||||
entity = desc['entity']
|
||||
if desc["type"] is User:
|
||||
entity = desc["entity"]
|
||||
query = query.filter(entity.deleted == False)
|
||||
return query
|
||||
|
||||
@@ -3105,12 +3116,11 @@ class QueryEvents(event.Events[Query[Any]]):
|
||||
re-establish the query being cached, apply the event adding the
|
||||
``bake_ok`` flag::
|
||||
|
||||
@event.listens_for(
|
||||
Query, "before_compile", retval=True, bake_ok=True)
|
||||
@event.listens_for(Query, "before_compile", retval=True, bake_ok=True)
|
||||
def my_event(query):
|
||||
for desc in query.column_descriptions:
|
||||
if desc['type'] is User:
|
||||
entity = desc['entity']
|
||||
if desc["type"] is User:
|
||||
entity = desc["entity"]
|
||||
query = query.filter(entity.deleted == False)
|
||||
return query
|
||||
|
||||
@@ -3131,7 +3141,7 @@ class QueryEvents(event.Events[Query[Any]]):
|
||||
|
||||
:ref:`baked_with_before_compile`
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
def before_compile_update(
|
||||
self, query: Query[Any], update_context: BulkUpdate
|
||||
@@ -3151,11 +3161,13 @@ class QueryEvents(event.Events[Query[Any]]):
|
||||
@event.listens_for(Query, "before_compile_update", retval=True)
|
||||
def no_deleted(query, update_context):
|
||||
for desc in query.column_descriptions:
|
||||
if desc['type'] is User:
|
||||
entity = desc['entity']
|
||||
if desc["type"] is User:
|
||||
entity = desc["entity"]
|
||||
query = query.filter(entity.deleted == False)
|
||||
|
||||
update_context.values['timestamp'] = datetime.utcnow()
|
||||
update_context.values["timestamp"] = datetime.datetime.now(
|
||||
datetime.UTC
|
||||
)
|
||||
return query
|
||||
|
||||
The ``.values`` dictionary of the "update context" object can also
|
||||
@@ -3183,7 +3195,7 @@ class QueryEvents(event.Events[Query[Any]]):
|
||||
:meth:`.QueryEvents.before_compile_delete`
|
||||
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
def before_compile_delete(
|
||||
self, query: Query[Any], delete_context: BulkDelete
|
||||
@@ -3202,8 +3214,8 @@ class QueryEvents(event.Events[Query[Any]]):
|
||||
@event.listens_for(Query, "before_compile_delete", retval=True)
|
||||
def no_deleted(query, delete_context):
|
||||
for desc in query.column_descriptions:
|
||||
if desc['type'] is User:
|
||||
entity = desc['entity']
|
||||
if desc["type"] is User:
|
||||
entity = desc["entity"]
|
||||
query = query.filter(entity.deleted == False)
|
||||
return query
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/exc.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -16,6 +16,7 @@ from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
from .util import _mapper_property_as_plain_name
|
||||
from .. import exc as sa_exc
|
||||
from .. import util
|
||||
from ..exc import MultipleResultsFound # noqa
|
||||
@@ -191,8 +192,8 @@ class LoaderStrategyException(sa_exc.InvalidRequestError):
|
||||
% (
|
||||
util.clsname_as_plain_name(actual_strategy_type),
|
||||
requesting_property,
|
||||
util.clsname_as_plain_name(applied_to_property_type),
|
||||
util.clsname_as_plain_name(applies_to),
|
||||
_mapper_property_as_plain_name(applied_to_property_type),
|
||||
_mapper_property_as_plain_name(applies_to),
|
||||
),
|
||||
)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/identity.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/instrumentation.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/interfaces.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -149,13 +149,17 @@ class ORMColumnDescription(TypedDict):
|
||||
class _IntrospectsAnnotations:
|
||||
__slots__ = ()
|
||||
|
||||
@classmethod
|
||||
def _mapper_property_name(cls) -> str:
|
||||
return cls.__name__
|
||||
|
||||
def found_in_pep593_annotated(self) -> Any:
|
||||
"""return a copy of this object to use in declarative when the
|
||||
object is found inside of an Annotated object."""
|
||||
|
||||
raise NotImplementedError(
|
||||
f"Use of the {self.__class__} construct inside of an "
|
||||
f"Annotated object is not yet supported."
|
||||
f"Use of the {self._mapper_property_name()!r} "
|
||||
"construct inside of an Annotated object is not yet supported."
|
||||
)
|
||||
|
||||
def declarative_scan(
|
||||
@@ -181,7 +185,8 @@ class _IntrospectsAnnotations:
|
||||
raise sa_exc.ArgumentError(
|
||||
f"Python typing annotation is required for attribute "
|
||||
f'"{cls.__name__}.{key}" when primary argument(s) for '
|
||||
f'"{self.__class__.__name__}" construct are None or not present'
|
||||
f'"{self._mapper_property_name()}" '
|
||||
"construct are None or not present"
|
||||
)
|
||||
|
||||
|
||||
@@ -201,6 +206,7 @@ class _AttributeOptions(NamedTuple):
|
||||
dataclasses_default_factory: Union[_NoArg, Callable[[], Any]]
|
||||
dataclasses_compare: Union[_NoArg, bool]
|
||||
dataclasses_kw_only: Union[_NoArg, bool]
|
||||
dataclasses_hash: Union[_NoArg, bool, None]
|
||||
|
||||
def _as_dataclass_field(self, key: str) -> Any:
|
||||
"""Return a ``dataclasses.Field`` object given these arguments."""
|
||||
@@ -218,6 +224,8 @@ class _AttributeOptions(NamedTuple):
|
||||
kw["compare"] = self.dataclasses_compare
|
||||
if self.dataclasses_kw_only is not _NoArg.NO_ARG:
|
||||
kw["kw_only"] = self.dataclasses_kw_only
|
||||
if self.dataclasses_hash is not _NoArg.NO_ARG:
|
||||
kw["hash"] = self.dataclasses_hash
|
||||
|
||||
if "default" in kw and callable(kw["default"]):
|
||||
# callable defaults are ambiguous. deprecate them in favour of
|
||||
@@ -297,6 +305,7 @@ _DEFAULT_ATTRIBUTE_OPTIONS = _AttributeOptions(
|
||||
_NoArg.NO_ARG,
|
||||
_NoArg.NO_ARG,
|
||||
_NoArg.NO_ARG,
|
||||
_NoArg.NO_ARG,
|
||||
)
|
||||
|
||||
_DEFAULT_READONLY_ATTRIBUTE_OPTIONS = _AttributeOptions(
|
||||
@@ -306,6 +315,7 @@ _DEFAULT_READONLY_ATTRIBUTE_OPTIONS = _AttributeOptions(
|
||||
_NoArg.NO_ARG,
|
||||
_NoArg.NO_ARG,
|
||||
_NoArg.NO_ARG,
|
||||
_NoArg.NO_ARG,
|
||||
)
|
||||
|
||||
|
||||
@@ -675,27 +685,37 @@ class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators):
|
||||
|
||||
# definition of custom PropComparator subclasses
|
||||
|
||||
from sqlalchemy.orm.properties import \
|
||||
ColumnProperty,\
|
||||
Composite,\
|
||||
Relationship
|
||||
from sqlalchemy.orm.properties import (
|
||||
ColumnProperty,
|
||||
Composite,
|
||||
Relationship,
|
||||
)
|
||||
|
||||
|
||||
class MyColumnComparator(ColumnProperty.Comparator):
|
||||
def __eq__(self, other):
|
||||
return self.__clause_element__() == other
|
||||
|
||||
|
||||
class MyRelationshipComparator(Relationship.Comparator):
|
||||
def any(self, expression):
|
||||
"define the 'any' operation"
|
||||
# ...
|
||||
|
||||
|
||||
class MyCompositeComparator(Composite.Comparator):
|
||||
def __gt__(self, other):
|
||||
"redefine the 'greater than' operation"
|
||||
|
||||
return sql.and_(*[a>b for a, b in
|
||||
zip(self.__clause_element__().clauses,
|
||||
other.__composite_values__())])
|
||||
return sql.and_(
|
||||
*[
|
||||
a > b
|
||||
for a, b in zip(
|
||||
self.__clause_element__().clauses,
|
||||
other.__composite_values__(),
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
# application of custom PropComparator subclasses
|
||||
@@ -703,17 +723,22 @@ class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators):
|
||||
from sqlalchemy.orm import column_property, relationship, composite
|
||||
from sqlalchemy import Column, String
|
||||
|
||||
class SomeMappedClass(Base):
|
||||
some_column = column_property(Column("some_column", String),
|
||||
comparator_factory=MyColumnComparator)
|
||||
|
||||
some_relationship = relationship(SomeOtherClass,
|
||||
comparator_factory=MyRelationshipComparator)
|
||||
class SomeMappedClass(Base):
|
||||
some_column = column_property(
|
||||
Column("some_column", String),
|
||||
comparator_factory=MyColumnComparator,
|
||||
)
|
||||
|
||||
some_relationship = relationship(
|
||||
SomeOtherClass, comparator_factory=MyRelationshipComparator
|
||||
)
|
||||
|
||||
some_composite = composite(
|
||||
Column("a", String), Column("b", String),
|
||||
comparator_factory=MyCompositeComparator
|
||||
)
|
||||
Column("a", String),
|
||||
Column("b", String),
|
||||
comparator_factory=MyCompositeComparator,
|
||||
)
|
||||
|
||||
Note that for column-level operator redefinition, it's usually
|
||||
simpler to define the operators at the Core level, using the
|
||||
@@ -855,8 +880,9 @@ class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators):
|
||||
|
||||
e.g.::
|
||||
|
||||
query.join(Company.employees.of_type(Engineer)).\
|
||||
filter(Engineer.name=='foo')
|
||||
query.join(Company.employees.of_type(Engineer)).filter(
|
||||
Engineer.name == "foo"
|
||||
)
|
||||
|
||||
:param \class_: a class or mapper indicating that criterion will be
|
||||
against this specific subclass.
|
||||
@@ -882,11 +908,11 @@ class PropComparator(SQLORMOperations[_T_co], Generic[_T_co], ColumnOperators):
|
||||
|
||||
|
||||
stmt = select(User).join(
|
||||
User.addresses.and_(Address.email_address != 'foo')
|
||||
User.addresses.and_(Address.email_address != "foo")
|
||||
)
|
||||
|
||||
stmt = select(User).options(
|
||||
joinedload(User.addresses.and_(Address.email_address != 'foo'))
|
||||
joinedload(User.addresses.and_(Address.email_address != "foo"))
|
||||
)
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/loading.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -1010,21 +1010,38 @@ def _instance_processor(
|
||||
# loading does not apply
|
||||
assert only_load_props is None
|
||||
|
||||
callable_ = _load_subclass_via_in(
|
||||
context,
|
||||
path,
|
||||
selectin_load_via,
|
||||
_polymorphic_from,
|
||||
option_entities,
|
||||
)
|
||||
PostLoad.callable_for_path(
|
||||
context,
|
||||
load_path,
|
||||
selectin_load_via.mapper,
|
||||
selectin_load_via,
|
||||
callable_,
|
||||
selectin_load_via,
|
||||
)
|
||||
if selectin_load_via.is_mapper:
|
||||
_load_supers = []
|
||||
_endmost_mapper = selectin_load_via
|
||||
while (
|
||||
_endmost_mapper
|
||||
and _endmost_mapper is not _polymorphic_from
|
||||
):
|
||||
_load_supers.append(_endmost_mapper)
|
||||
_endmost_mapper = _endmost_mapper.inherits
|
||||
else:
|
||||
_load_supers = [selectin_load_via]
|
||||
|
||||
for _selectinload_entity in _load_supers:
|
||||
if PostLoad.path_exists(
|
||||
context, load_path, _selectinload_entity
|
||||
):
|
||||
continue
|
||||
callable_ = _load_subclass_via_in(
|
||||
context,
|
||||
path,
|
||||
_selectinload_entity,
|
||||
_polymorphic_from,
|
||||
option_entities,
|
||||
)
|
||||
PostLoad.callable_for_path(
|
||||
context,
|
||||
load_path,
|
||||
_selectinload_entity.mapper,
|
||||
_selectinload_entity,
|
||||
callable_,
|
||||
_selectinload_entity,
|
||||
)
|
||||
|
||||
post_load = PostLoad.for_context(context, load_path, only_load_props)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/mapped_collection.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -29,6 +29,8 @@ from .. import util
|
||||
from ..sql import coercions
|
||||
from ..sql import expression
|
||||
from ..sql import roles
|
||||
from ..util.langhelpers import Missing
|
||||
from ..util.langhelpers import MissingOr
|
||||
from ..util.typing import Literal
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -40,8 +42,6 @@ if TYPE_CHECKING:
|
||||
_KT = TypeVar("_KT", bound=Any)
|
||||
_VT = TypeVar("_VT", bound=Any)
|
||||
|
||||
_F = TypeVar("_F", bound=Callable[[Any], Any])
|
||||
|
||||
|
||||
class _PlainColumnGetter(Generic[_KT]):
|
||||
"""Plain column getter, stores collection of Column objects
|
||||
@@ -70,7 +70,7 @@ class _PlainColumnGetter(Generic[_KT]):
|
||||
def _cols(self, mapper: Mapper[_KT]) -> Sequence[ColumnElement[_KT]]:
|
||||
return self.cols
|
||||
|
||||
def __call__(self, value: _KT) -> Union[_KT, Tuple[_KT, ...]]:
|
||||
def __call__(self, value: _KT) -> MissingOr[Union[_KT, Tuple[_KT, ...]]]:
|
||||
state = base.instance_state(value)
|
||||
m = base._state_mapper(state)
|
||||
|
||||
@@ -83,7 +83,7 @@ class _PlainColumnGetter(Generic[_KT]):
|
||||
else:
|
||||
obj = key[0]
|
||||
if obj is None:
|
||||
return _UNMAPPED_AMBIGUOUS_NONE
|
||||
return Missing
|
||||
else:
|
||||
return obj
|
||||
|
||||
@@ -198,9 +198,6 @@ def column_keyed_dict(
|
||||
)
|
||||
|
||||
|
||||
_UNMAPPED_AMBIGUOUS_NONE = object()
|
||||
|
||||
|
||||
class _AttrGetter:
|
||||
__slots__ = ("attr_name", "getter")
|
||||
|
||||
@@ -217,9 +214,9 @@ class _AttrGetter:
|
||||
dict_ = state.dict
|
||||
obj = dict_.get(self.attr_name, base.NO_VALUE)
|
||||
if obj is None:
|
||||
return _UNMAPPED_AMBIGUOUS_NONE
|
||||
return Missing
|
||||
else:
|
||||
return _UNMAPPED_AMBIGUOUS_NONE
|
||||
return Missing
|
||||
|
||||
return obj
|
||||
|
||||
@@ -277,7 +274,7 @@ def attribute_keyed_dict(
|
||||
|
||||
|
||||
def keyfunc_mapping(
|
||||
keyfunc: _F,
|
||||
keyfunc: Callable[[Any], Any],
|
||||
*,
|
||||
ignore_unpopulated_attribute: bool = False,
|
||||
) -> Type[KeyFuncDict[_KT, Any]]:
|
||||
@@ -353,7 +350,7 @@ class KeyFuncDict(Dict[_KT, _VT]):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
keyfunc: _F,
|
||||
keyfunc: Callable[[Any], Any],
|
||||
*dict_args: Any,
|
||||
ignore_unpopulated_attribute: bool = False,
|
||||
) -> None:
|
||||
@@ -377,7 +374,7 @@ class KeyFuncDict(Dict[_KT, _VT]):
|
||||
@classmethod
|
||||
def _unreduce(
|
||||
cls,
|
||||
keyfunc: _F,
|
||||
keyfunc: Callable[[Any], Any],
|
||||
values: Dict[_KT, _KT],
|
||||
adapter: Optional[CollectionAdapter] = None,
|
||||
) -> "KeyFuncDict[_KT, _KT]":
|
||||
@@ -464,7 +461,7 @@ class KeyFuncDict(Dict[_KT, _VT]):
|
||||
)
|
||||
else:
|
||||
return
|
||||
elif key is _UNMAPPED_AMBIGUOUS_NONE:
|
||||
elif key is Missing:
|
||||
if not self.ignore_unpopulated_attribute:
|
||||
self._raise_for_unpopulated(
|
||||
value, _sa_initiator, warn_only=True
|
||||
@@ -492,7 +489,7 @@ class KeyFuncDict(Dict[_KT, _VT]):
|
||||
value, _sa_initiator, warn_only=False
|
||||
)
|
||||
return
|
||||
elif key is _UNMAPPED_AMBIGUOUS_NONE:
|
||||
elif key is Missing:
|
||||
if not self.ignore_unpopulated_attribute:
|
||||
self._raise_for_unpopulated(
|
||||
value, _sa_initiator, warn_only=True
|
||||
@@ -514,7 +511,7 @@ class KeyFuncDict(Dict[_KT, _VT]):
|
||||
|
||||
|
||||
def _mapped_collection_cls(
|
||||
keyfunc: _F, ignore_unpopulated_attribute: bool
|
||||
keyfunc: Callable[[Any], Any], ignore_unpopulated_attribute: bool
|
||||
) -> Type[KeyFuncDict[_KT, _KT]]:
|
||||
class _MKeyfuncMapped(KeyFuncDict[_KT, _KT]):
|
||||
def __init__(self, *dict_args: Any) -> None:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/mapper.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -296,6 +296,17 @@ class Mapper(
|
||||
particular primary key value. A "partial primary key" can occur if
|
||||
one has mapped to an OUTER JOIN, for example.
|
||||
|
||||
The :paramref:`.orm.Mapper.allow_partial_pks` parameter also
|
||||
indicates to the ORM relationship lazy loader, when loading a
|
||||
many-to-one related object, if a composite primary key that has
|
||||
partial NULL values should result in an attempt to load from the
|
||||
database, or if a load attempt is not necessary.
|
||||
|
||||
.. versionadded:: 2.0.36 :paramref:`.orm.Mapper.allow_partial_pks`
|
||||
is consulted by the relationship lazy loader strategy, such that
|
||||
when set to False, a SELECT for a composite primary key that
|
||||
has partial NULL values will not be emitted.
|
||||
|
||||
:param batch: Defaults to ``True``, indicating that save operations
|
||||
of multiple entities can be batched together for efficiency.
|
||||
Setting to False indicates
|
||||
@@ -318,7 +329,7 @@ class Mapper(
|
||||
|
||||
class User(Base):
|
||||
__table__ = user_table
|
||||
__mapper_args__ = {'column_prefix':'_'}
|
||||
__mapper_args__ = {"column_prefix": "_"}
|
||||
|
||||
The above mapping will assign the ``user_id``, ``user_name``, and
|
||||
``password`` columns to attributes named ``_user_id``,
|
||||
@@ -442,7 +453,7 @@ class Mapper(
|
||||
mapping of the class to an alternate selectable, for loading
|
||||
only.
|
||||
|
||||
.. seealso::
|
||||
.. seealso::
|
||||
|
||||
:ref:`relationship_aliased_class` - the new pattern that removes
|
||||
the need for the :paramref:`_orm.Mapper.non_primary` flag.
|
||||
@@ -534,14 +545,14 @@ class Mapper(
|
||||
base-most mapped :class:`.Table`::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
__tablename__ = "employee"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
discriminator: Mapped[str] = mapped_column(String(50))
|
||||
|
||||
__mapper_args__ = {
|
||||
"polymorphic_on":discriminator,
|
||||
"polymorphic_identity":"employee"
|
||||
"polymorphic_on": discriminator,
|
||||
"polymorphic_identity": "employee",
|
||||
}
|
||||
|
||||
It may also be specified
|
||||
@@ -550,17 +561,18 @@ class Mapper(
|
||||
approach::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
__tablename__ = "employee"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
discriminator: Mapped[str] = mapped_column(String(50))
|
||||
|
||||
__mapper_args__ = {
|
||||
"polymorphic_on":case(
|
||||
"polymorphic_on": case(
|
||||
(discriminator == "EN", "engineer"),
|
||||
(discriminator == "MA", "manager"),
|
||||
else_="employee"),
|
||||
"polymorphic_identity":"employee"
|
||||
else_="employee",
|
||||
),
|
||||
"polymorphic_identity": "employee",
|
||||
}
|
||||
|
||||
It may also refer to any attribute using its string name,
|
||||
@@ -568,14 +580,14 @@ class Mapper(
|
||||
configurations::
|
||||
|
||||
class Employee(Base):
|
||||
__tablename__ = 'employee'
|
||||
__tablename__ = "employee"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
discriminator: Mapped[str]
|
||||
|
||||
__mapper_args__ = {
|
||||
"polymorphic_on": "discriminator",
|
||||
"polymorphic_identity": "employee"
|
||||
"polymorphic_identity": "employee",
|
||||
}
|
||||
|
||||
When setting ``polymorphic_on`` to reference an
|
||||
@@ -592,6 +604,7 @@ class Mapper(
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.orm import object_mapper
|
||||
|
||||
|
||||
@event.listens_for(Employee, "init", propagate=True)
|
||||
def set_identity(instance, *arg, **kw):
|
||||
mapper = object_mapper(instance)
|
||||
@@ -3248,14 +3261,9 @@ class Mapper(
|
||||
The resulting structure is a dictionary of columns mapped
|
||||
to lists of equivalent columns, e.g.::
|
||||
|
||||
{
|
||||
tablea.col1:
|
||||
{tableb.col1, tablec.col1},
|
||||
tablea.col2:
|
||||
{tabled.col2}
|
||||
}
|
||||
{tablea.col1: {tableb.col1, tablec.col1}, tablea.col2: {tabled.col2}}
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
result: _EquivalentColumnMap = {}
|
||||
|
||||
def visit_binary(binary):
|
||||
@@ -3728,14 +3736,15 @@ class Mapper(
|
||||
|
||||
given::
|
||||
|
||||
class A:
|
||||
...
|
||||
class A: ...
|
||||
|
||||
|
||||
class B(A):
|
||||
__mapper_args__ = {"polymorphic_load": "selectin"}
|
||||
|
||||
class C(B):
|
||||
...
|
||||
|
||||
class C(B): ...
|
||||
|
||||
|
||||
class D(B):
|
||||
__mapper_args__ = {"polymorphic_load": "selectin"}
|
||||
@@ -3805,6 +3814,7 @@ class Mapper(
|
||||
this subclass as a SELECT with IN.
|
||||
|
||||
"""
|
||||
|
||||
strategy_options = util.preloaded.orm_strategy_options
|
||||
|
||||
assert self.inherits
|
||||
@@ -3828,7 +3838,7 @@ class Mapper(
|
||||
classes_to_include.add(m)
|
||||
m = m.inherits
|
||||
|
||||
for prop in self.attrs:
|
||||
for prop in self.column_attrs + self.relationships:
|
||||
# skip prop keys that are not instrumented on the mapped class.
|
||||
# this is primarily the "_sa_polymorphic_on" property that gets
|
||||
# created for an ad-hoc polymorphic_on SQL expression, issue #8704
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/path_registry.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -35,7 +35,7 @@ from ..sql.cache_key import HasCacheKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._typing import _InternalEntityType
|
||||
from .interfaces import MapperProperty
|
||||
from .interfaces import StrategizedProperty
|
||||
from .mapper import Mapper
|
||||
from .relationships import RelationshipProperty
|
||||
from .util import AliasedInsp
|
||||
@@ -57,13 +57,13 @@ else:
|
||||
_SerializedPath = List[Any]
|
||||
_StrPathToken = str
|
||||
_PathElementType = Union[
|
||||
_StrPathToken, "_InternalEntityType[Any]", "MapperProperty[Any]"
|
||||
_StrPathToken, "_InternalEntityType[Any]", "StrategizedProperty[Any]"
|
||||
]
|
||||
|
||||
# the representation is in fact
|
||||
# a tuple with alternating:
|
||||
# [_InternalEntityType[Any], Union[str, MapperProperty[Any]],
|
||||
# _InternalEntityType[Any], Union[str, MapperProperty[Any]], ...]
|
||||
# [_InternalEntityType[Any], Union[str, StrategizedProperty[Any]],
|
||||
# _InternalEntityType[Any], Union[str, StrategizedProperty[Any]], ...]
|
||||
# this might someday be a tuple of 2-tuples instead, but paths can be
|
||||
# chopped at odd intervals as well so this is less flexible
|
||||
_PathRepresentation = Tuple[_PathElementType, ...]
|
||||
@@ -71,7 +71,7 @@ _PathRepresentation = Tuple[_PathElementType, ...]
|
||||
# NOTE: these names are weird since the array is 0-indexed,
|
||||
# the "_Odd" entries are at 0, 2, 4, etc
|
||||
_OddPathRepresentation = Sequence["_InternalEntityType[Any]"]
|
||||
_EvenPathRepresentation = Sequence[Union["MapperProperty[Any]", str]]
|
||||
_EvenPathRepresentation = Sequence[Union["StrategizedProperty[Any]", str]]
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -197,7 +197,9 @@ class PathRegistry(HasCacheKey):
|
||||
) -> AbstractEntityRegistry: ...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, entity: MapperProperty[Any]) -> PropRegistry: ...
|
||||
def __getitem__(
|
||||
self, entity: StrategizedProperty[Any]
|
||||
) -> PropRegistry: ...
|
||||
|
||||
def __getitem__(
|
||||
self,
|
||||
@@ -206,7 +208,7 @@ class PathRegistry(HasCacheKey):
|
||||
int,
|
||||
slice,
|
||||
_InternalEntityType[Any],
|
||||
MapperProperty[Any],
|
||||
StrategizedProperty[Any],
|
||||
],
|
||||
) -> Union[
|
||||
TokenRegistry,
|
||||
@@ -225,7 +227,7 @@ class PathRegistry(HasCacheKey):
|
||||
def pairs(
|
||||
self,
|
||||
) -> Iterator[
|
||||
Tuple[_InternalEntityType[Any], Union[str, MapperProperty[Any]]]
|
||||
Tuple[_InternalEntityType[Any], Union[str, StrategizedProperty[Any]]]
|
||||
]:
|
||||
odd_path = cast(_OddPathRepresentation, self.path)
|
||||
even_path = cast(_EvenPathRepresentation, odd_path)
|
||||
@@ -531,15 +533,16 @@ class PropRegistry(PathRegistry):
|
||||
inherit_cache = True
|
||||
is_property = True
|
||||
|
||||
prop: MapperProperty[Any]
|
||||
prop: StrategizedProperty[Any]
|
||||
mapper: Optional[Mapper[Any]]
|
||||
entity: Optional[_InternalEntityType[Any]]
|
||||
|
||||
def __init__(
|
||||
self, parent: AbstractEntityRegistry, prop: MapperProperty[Any]
|
||||
self, parent: AbstractEntityRegistry, prop: StrategizedProperty[Any]
|
||||
):
|
||||
|
||||
# restate this path in terms of the
|
||||
# given MapperProperty's parent.
|
||||
# given StrategizedProperty's parent.
|
||||
insp = cast("_InternalEntityType[Any]", parent[-1])
|
||||
natural_parent: AbstractEntityRegistry = parent
|
||||
|
||||
@@ -563,7 +566,7 @@ class PropRegistry(PathRegistry):
|
||||
# entities are used.
|
||||
#
|
||||
# here we are trying to distinguish between a path that starts
|
||||
# on a the with_polymorhpic entity vs. one that starts on a
|
||||
# on a with_polymorphic entity vs. one that starts on a
|
||||
# normal entity that introduces a with_polymorphic() in the
|
||||
# middle using of_type():
|
||||
#
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/persistence.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/properties.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -43,7 +43,6 @@ from .interfaces import PropComparator
|
||||
from .interfaces import StrategizedProperty
|
||||
from .relationships import RelationshipProperty
|
||||
from .util import de_stringify_annotation
|
||||
from .util import de_stringify_union_elements
|
||||
from .. import exc as sa_exc
|
||||
from .. import ForeignKey
|
||||
from .. import log
|
||||
@@ -55,12 +54,12 @@ from ..sql.schema import Column
|
||||
from ..sql.schema import SchemaConst
|
||||
from ..sql.type_api import TypeEngine
|
||||
from ..util.typing import de_optionalize_union_types
|
||||
from ..util.typing import get_args
|
||||
from ..util.typing import includes_none
|
||||
from ..util.typing import is_fwd_ref
|
||||
from ..util.typing import is_optional_union
|
||||
from ..util.typing import is_pep593
|
||||
from ..util.typing import is_union
|
||||
from ..util.typing import is_pep695
|
||||
from ..util.typing import Self
|
||||
from ..util.typing import typing_get_args
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._typing import _IdentityKeyType
|
||||
@@ -279,8 +278,8 @@ class ColumnProperty(
|
||||
|
||||
name = Column(String(64))
|
||||
extension = Column(String(8))
|
||||
filename = column_property(name + '.' + extension)
|
||||
path = column_property('C:/' + filename.expression)
|
||||
filename = column_property(name + "." + extension)
|
||||
path = column_property("C:/" + filename.expression)
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -688,7 +687,7 @@ class MappedColumn(
|
||||
supercls_mapper = class_mapper(decl_scan.inherits, False)
|
||||
|
||||
colname = column.name if column.name is not None else key
|
||||
column = self.column = supercls_mapper.local_table.c.get( # type: ignore # noqa: E501
|
||||
column = self.column = supercls_mapper.local_table.c.get( # type: ignore[assignment] # noqa: E501
|
||||
colname, column
|
||||
)
|
||||
|
||||
@@ -737,47 +736,44 @@ class MappedColumn(
|
||||
) -> None:
|
||||
sqltype = self.column.type
|
||||
|
||||
if isinstance(argument, str) or is_fwd_ref(
|
||||
argument, check_generic=True
|
||||
if is_fwd_ref(
|
||||
argument, check_generic=True, check_for_plain_string=True
|
||||
):
|
||||
assert originating_module is not None
|
||||
argument = de_stringify_annotation(
|
||||
cls, argument, originating_module, include_generic=True
|
||||
)
|
||||
|
||||
if is_union(argument):
|
||||
assert originating_module is not None
|
||||
argument = de_stringify_union_elements(
|
||||
cls, argument, originating_module
|
||||
)
|
||||
|
||||
nullable = is_optional_union(argument)
|
||||
nullable = includes_none(argument)
|
||||
|
||||
if not self._has_nullable:
|
||||
self.column.nullable = nullable
|
||||
|
||||
our_type = de_optionalize_union_types(argument)
|
||||
|
||||
use_args_from = None
|
||||
find_mapped_in: Tuple[Any, ...] = ()
|
||||
our_type_is_pep593 = False
|
||||
raw_pep_593_type = None
|
||||
|
||||
if is_pep593(our_type):
|
||||
our_type_is_pep593 = True
|
||||
|
||||
pep_593_components = typing_get_args(our_type)
|
||||
pep_593_components = get_args(our_type)
|
||||
raw_pep_593_type = pep_593_components[0]
|
||||
if is_optional_union(raw_pep_593_type):
|
||||
if nullable:
|
||||
raw_pep_593_type = de_optionalize_union_types(raw_pep_593_type)
|
||||
find_mapped_in = pep_593_components[1:]
|
||||
elif is_pep695(argument) and is_pep593(argument.__value__):
|
||||
# do not support nested annotation inside unions ets
|
||||
find_mapped_in = get_args(argument.__value__)[1:]
|
||||
|
||||
nullable = True
|
||||
if not self._has_nullable:
|
||||
self.column.nullable = nullable
|
||||
for elem in pep_593_components[1:]:
|
||||
if isinstance(elem, MappedColumn):
|
||||
use_args_from = elem
|
||||
break
|
||||
use_args_from: Optional[MappedColumn[Any]]
|
||||
for elem in find_mapped_in:
|
||||
if isinstance(elem, MappedColumn):
|
||||
use_args_from = elem
|
||||
break
|
||||
else:
|
||||
our_type_is_pep593 = False
|
||||
raw_pep_593_type = None
|
||||
use_args_from = None
|
||||
|
||||
if use_args_from is not None:
|
||||
if (
|
||||
@@ -851,6 +847,7 @@ class MappedColumn(
|
||||
if sqltype._isnull and not self.column.foreign_keys:
|
||||
new_sqltype = None
|
||||
|
||||
checks: List[Any]
|
||||
if our_type_is_pep593:
|
||||
checks = [our_type, raw_pep_593_type]
|
||||
else:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/query.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -672,41 +672,38 @@ class Query(
|
||||
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
|
||||
class Part(Base):
|
||||
__tablename__ = 'part'
|
||||
__tablename__ = "part"
|
||||
part = Column(String, primary_key=True)
|
||||
sub_part = Column(String, primary_key=True)
|
||||
quantity = Column(Integer)
|
||||
|
||||
included_parts = session.query(
|
||||
Part.sub_part,
|
||||
Part.part,
|
||||
Part.quantity).\
|
||||
filter(Part.part=="our part").\
|
||||
cte(name="included_parts", recursive=True)
|
||||
|
||||
included_parts = (
|
||||
session.query(Part.sub_part, Part.part, Part.quantity)
|
||||
.filter(Part.part == "our part")
|
||||
.cte(name="included_parts", recursive=True)
|
||||
)
|
||||
|
||||
incl_alias = aliased(included_parts, name="pr")
|
||||
parts_alias = aliased(Part, name="p")
|
||||
included_parts = included_parts.union_all(
|
||||
session.query(
|
||||
parts_alias.sub_part,
|
||||
parts_alias.part,
|
||||
parts_alias.quantity).\
|
||||
filter(parts_alias.part==incl_alias.c.sub_part)
|
||||
)
|
||||
parts_alias.sub_part, parts_alias.part, parts_alias.quantity
|
||||
).filter(parts_alias.part == incl_alias.c.sub_part)
|
||||
)
|
||||
|
||||
q = session.query(
|
||||
included_parts.c.sub_part,
|
||||
func.sum(included_parts.c.quantity).
|
||||
label('total_quantity')
|
||||
).\
|
||||
group_by(included_parts.c.sub_part)
|
||||
included_parts.c.sub_part,
|
||||
func.sum(included_parts.c.quantity).label("total_quantity"),
|
||||
).group_by(included_parts.c.sub_part)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_sql.Select.cte` - v2 equivalent method.
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
return (
|
||||
self.enable_eagerloads(False)
|
||||
._get_select_statement_only()
|
||||
@@ -731,7 +728,7 @@ class Query(
|
||||
)
|
||||
|
||||
@overload
|
||||
def as_scalar(
|
||||
def as_scalar( # type: ignore[overload-overlap]
|
||||
self: Query[Tuple[_MAYBE_ENTITY]],
|
||||
) -> ScalarSelect[_MAYBE_ENTITY]: ...
|
||||
|
||||
@@ -941,9 +938,7 @@ class Query(
|
||||
:attr:`_query.Query.statement` using :meth:`.Session.execute`::
|
||||
|
||||
result = session.execute(
|
||||
query
|
||||
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
|
||||
.statement
|
||||
query.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).statement
|
||||
)
|
||||
|
||||
.. versionadded:: 1.4
|
||||
@@ -1052,8 +1047,7 @@ class Query(
|
||||
|
||||
some_object = session.query(VersionedFoo).get((5, 10))
|
||||
|
||||
some_object = session.query(VersionedFoo).get(
|
||||
{"id": 5, "version_id": 10})
|
||||
some_object = session.query(VersionedFoo).get({"id": 5, "version_id": 10})
|
||||
|
||||
:meth:`_query.Query.get` is special in that it provides direct
|
||||
access to the identity map of the owning :class:`.Session`.
|
||||
@@ -1119,7 +1113,7 @@ class Query(
|
||||
|
||||
:return: The object instance, or ``None``.
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
self._no_criterion_assertion("get", order_by=False, distinct=False)
|
||||
|
||||
# we still implement _get_impl() so that baked query can override
|
||||
@@ -1563,19 +1557,22 @@ class Query(
|
||||
|
||||
# Users, filtered on some arbitrary criterion
|
||||
# and then ordered by related email address
|
||||
q = session.query(User).\
|
||||
join(User.address).\
|
||||
filter(User.name.like('%ed%')).\
|
||||
order_by(Address.email)
|
||||
q = (
|
||||
session.query(User)
|
||||
.join(User.address)
|
||||
.filter(User.name.like("%ed%"))
|
||||
.order_by(Address.email)
|
||||
)
|
||||
|
||||
# given *only* User.id==5, Address.email, and 'q', what
|
||||
# would the *next* User in the result be ?
|
||||
subq = q.with_entities(Address.email).\
|
||||
order_by(None).\
|
||||
filter(User.id==5).\
|
||||
subquery()
|
||||
q = q.join((subq, subq.c.email < Address.email)).\
|
||||
limit(1)
|
||||
subq = (
|
||||
q.with_entities(Address.email)
|
||||
.order_by(None)
|
||||
.filter(User.id == 5)
|
||||
.subquery()
|
||||
)
|
||||
q = q.join((subq, subq.c.email < Address.email)).limit(1)
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -1671,9 +1668,11 @@ class Query(
|
||||
def filter_something(criterion):
|
||||
def transform(q):
|
||||
return q.filter(criterion)
|
||||
|
||||
return transform
|
||||
|
||||
q = q.with_transformation(filter_something(x==5))
|
||||
|
||||
q = q.with_transformation(filter_something(x == 5))
|
||||
|
||||
This allows ad-hoc recipes to be created for :class:`_query.Query`
|
||||
objects.
|
||||
@@ -1710,6 +1709,7 @@ class Query(
|
||||
schema_translate_map: Optional[SchemaTranslateMapType] = ...,
|
||||
populate_existing: bool = False,
|
||||
autoflush: bool = False,
|
||||
preserve_rowcount: bool = False,
|
||||
**opt: Any,
|
||||
) -> Self: ...
|
||||
|
||||
@@ -1789,9 +1789,15 @@ class Query(
|
||||
|
||||
E.g.::
|
||||
|
||||
q = sess.query(User).populate_existing().with_for_update(nowait=True, of=User)
|
||||
q = (
|
||||
sess.query(User)
|
||||
.populate_existing()
|
||||
.with_for_update(nowait=True, of=User)
|
||||
)
|
||||
|
||||
The above query on a PostgreSQL backend will render like::
|
||||
The above query on a PostgreSQL backend will render like:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT users.id AS users_id FROM users FOR UPDATE OF users NOWAIT
|
||||
|
||||
@@ -1869,14 +1875,13 @@ class Query(
|
||||
|
||||
e.g.::
|
||||
|
||||
session.query(MyClass).filter(MyClass.name == 'some name')
|
||||
session.query(MyClass).filter(MyClass.name == "some name")
|
||||
|
||||
Multiple criteria may be specified as comma separated; the effect
|
||||
is that they will be joined together using the :func:`.and_`
|
||||
function::
|
||||
|
||||
session.query(MyClass).\
|
||||
filter(MyClass.name == 'some name', MyClass.id > 5)
|
||||
session.query(MyClass).filter(MyClass.name == "some name", MyClass.id > 5)
|
||||
|
||||
The criterion is any SQL expression object applicable to the
|
||||
WHERE clause of a select. String expressions are coerced
|
||||
@@ -1889,7 +1894,7 @@ class Query(
|
||||
|
||||
:meth:`_sql.Select.where` - v2 equivalent method.
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
for crit in list(criterion):
|
||||
crit = coercions.expect(
|
||||
roles.WhereHavingRole, crit, apply_propagate_attrs=self
|
||||
@@ -1957,14 +1962,13 @@ class Query(
|
||||
|
||||
e.g.::
|
||||
|
||||
session.query(MyClass).filter_by(name = 'some name')
|
||||
session.query(MyClass).filter_by(name="some name")
|
||||
|
||||
Multiple criteria may be specified as comma separated; the effect
|
||||
is that they will be joined together using the :func:`.and_`
|
||||
function::
|
||||
|
||||
session.query(MyClass).\
|
||||
filter_by(name = 'some name', id = 5)
|
||||
session.query(MyClass).filter_by(name="some name", id=5)
|
||||
|
||||
The keyword expressions are extracted from the primary
|
||||
entity of the query, or the last entity that was the
|
||||
@@ -2091,10 +2095,12 @@ class Query(
|
||||
HAVING criterion makes it possible to use filters on aggregate
|
||||
functions like COUNT, SUM, AVG, MAX, and MIN, eg.::
|
||||
|
||||
q = session.query(User.id).\
|
||||
join(User.addresses).\
|
||||
group_by(User.id).\
|
||||
having(func.count(Address.id) > 2)
|
||||
q = (
|
||||
session.query(User.id)
|
||||
.join(User.addresses)
|
||||
.group_by(User.id)
|
||||
.having(func.count(Address.id) > 2)
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -2118,8 +2124,8 @@ class Query(
|
||||
|
||||
e.g.::
|
||||
|
||||
q1 = sess.query(SomeClass).filter(SomeClass.foo=='bar')
|
||||
q2 = sess.query(SomeClass).filter(SomeClass.bar=='foo')
|
||||
q1 = sess.query(SomeClass).filter(SomeClass.foo == "bar")
|
||||
q2 = sess.query(SomeClass).filter(SomeClass.bar == "foo")
|
||||
|
||||
q3 = q1.union(q2)
|
||||
|
||||
@@ -2128,7 +2134,9 @@ class Query(
|
||||
|
||||
x.union(y).union(z).all()
|
||||
|
||||
will nest on each ``union()``, and produces::
|
||||
will nest on each ``union()``, and produces:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION
|
||||
SELECT * FROM y) UNION SELECT * FROM Z)
|
||||
@@ -2137,7 +2145,9 @@ class Query(
|
||||
|
||||
x.union(y, z).all()
|
||||
|
||||
produces::
|
||||
produces:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION
|
||||
SELECT * FROM Z)
|
||||
@@ -2249,7 +2259,9 @@ class Query(
|
||||
q = session.query(User).join(User.addresses)
|
||||
|
||||
Where above, the call to :meth:`_query.Query.join` along
|
||||
``User.addresses`` will result in SQL approximately equivalent to::
|
||||
``User.addresses`` will result in SQL approximately equivalent to:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT user.id, user.name
|
||||
FROM user JOIN address ON user.id = address.user_id
|
||||
@@ -2262,10 +2274,12 @@ class Query(
|
||||
calls may be used. The relationship-bound attribute implies both
|
||||
the left and right side of the join at once::
|
||||
|
||||
q = session.query(User).\
|
||||
join(User.orders).\
|
||||
join(Order.items).\
|
||||
join(Item.keywords)
|
||||
q = (
|
||||
session.query(User)
|
||||
.join(User.orders)
|
||||
.join(Order.items)
|
||||
.join(Item.keywords)
|
||||
)
|
||||
|
||||
.. note:: as seen in the above example, **the order in which each
|
||||
call to the join() method occurs is important**. Query would not,
|
||||
@@ -2304,7 +2318,7 @@ class Query(
|
||||
as the ON clause to be passed explicitly. A example that includes
|
||||
a SQL expression as the ON clause is as follows::
|
||||
|
||||
q = session.query(User).join(Address, User.id==Address.user_id)
|
||||
q = session.query(User).join(Address, User.id == Address.user_id)
|
||||
|
||||
The above form may also use a relationship-bound attribute as the
|
||||
ON clause as well::
|
||||
@@ -2319,11 +2333,13 @@ class Query(
|
||||
a1 = aliased(Address)
|
||||
a2 = aliased(Address)
|
||||
|
||||
q = session.query(User).\
|
||||
join(a1, User.addresses).\
|
||||
join(a2, User.addresses).\
|
||||
filter(a1.email_address=='ed@foo.com').\
|
||||
filter(a2.email_address=='ed@bar.com')
|
||||
q = (
|
||||
session.query(User)
|
||||
.join(a1, User.addresses)
|
||||
.join(a2, User.addresses)
|
||||
.filter(a1.email_address == "ed@foo.com")
|
||||
.filter(a2.email_address == "ed@bar.com")
|
||||
)
|
||||
|
||||
The relationship-bound calling form can also specify a target entity
|
||||
using the :meth:`_orm.PropComparator.of_type` method; a query
|
||||
@@ -2332,11 +2348,13 @@ class Query(
|
||||
a1 = aliased(Address)
|
||||
a2 = aliased(Address)
|
||||
|
||||
q = session.query(User).\
|
||||
join(User.addresses.of_type(a1)).\
|
||||
join(User.addresses.of_type(a2)).\
|
||||
filter(a1.email_address == 'ed@foo.com').\
|
||||
filter(a2.email_address == 'ed@bar.com')
|
||||
q = (
|
||||
session.query(User)
|
||||
.join(User.addresses.of_type(a1))
|
||||
.join(User.addresses.of_type(a2))
|
||||
.filter(a1.email_address == "ed@foo.com")
|
||||
.filter(a2.email_address == "ed@bar.com")
|
||||
)
|
||||
|
||||
**Augmenting Built-in ON Clauses**
|
||||
|
||||
@@ -2347,7 +2365,7 @@ class Query(
|
||||
with the default criteria using AND::
|
||||
|
||||
q = session.query(User).join(
|
||||
User.addresses.and_(Address.email_address != 'foo@bar.com')
|
||||
User.addresses.and_(Address.email_address != "foo@bar.com")
|
||||
)
|
||||
|
||||
.. versionadded:: 1.4
|
||||
@@ -2360,29 +2378,28 @@ class Query(
|
||||
appropriate ``.subquery()`` method in order to make a subquery
|
||||
out of a query::
|
||||
|
||||
subq = session.query(Address).\
|
||||
filter(Address.email_address == 'ed@foo.com').\
|
||||
subquery()
|
||||
|
||||
|
||||
q = session.query(User).join(
|
||||
subq, User.id == subq.c.user_id
|
||||
subq = (
|
||||
session.query(Address)
|
||||
.filter(Address.email_address == "ed@foo.com")
|
||||
.subquery()
|
||||
)
|
||||
|
||||
|
||||
q = session.query(User).join(subq, User.id == subq.c.user_id)
|
||||
|
||||
Joining to a subquery in terms of a specific relationship and/or
|
||||
target entity may be achieved by linking the subquery to the
|
||||
entity using :func:`_orm.aliased`::
|
||||
|
||||
subq = session.query(Address).\
|
||||
filter(Address.email_address == 'ed@foo.com').\
|
||||
subquery()
|
||||
subq = (
|
||||
session.query(Address)
|
||||
.filter(Address.email_address == "ed@foo.com")
|
||||
.subquery()
|
||||
)
|
||||
|
||||
address_subq = aliased(Address, subq)
|
||||
|
||||
q = session.query(User).join(
|
||||
User.addresses.of_type(address_subq)
|
||||
)
|
||||
|
||||
q = session.query(User).join(User.addresses.of_type(address_subq))
|
||||
|
||||
**Controlling what to Join From**
|
||||
|
||||
@@ -2390,11 +2407,16 @@ class Query(
|
||||
:class:`_query.Query` is not in line with what we want to join from,
|
||||
the :meth:`_query.Query.select_from` method may be used::
|
||||
|
||||
q = session.query(Address).select_from(User).\
|
||||
join(User.addresses).\
|
||||
filter(User.name == 'ed')
|
||||
q = (
|
||||
session.query(Address)
|
||||
.select_from(User)
|
||||
.join(User.addresses)
|
||||
.filter(User.name == "ed")
|
||||
)
|
||||
|
||||
Which will produce SQL similar to::
|
||||
Which will produce SQL similar to:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT address.* FROM user
|
||||
JOIN address ON user.id=address.user_id
|
||||
@@ -2498,11 +2520,16 @@ class Query(
|
||||
|
||||
A typical example::
|
||||
|
||||
q = session.query(Address).select_from(User).\
|
||||
join(User.addresses).\
|
||||
filter(User.name == 'ed')
|
||||
q = (
|
||||
session.query(Address)
|
||||
.select_from(User)
|
||||
.join(User.addresses)
|
||||
.filter(User.name == "ed")
|
||||
)
|
||||
|
||||
Which produces SQL equivalent to::
|
||||
Which produces SQL equivalent to:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT address.* FROM user
|
||||
JOIN address ON user.id=address.user_id
|
||||
@@ -2865,7 +2892,7 @@ class Query(
|
||||
|
||||
Format is a list of dictionaries::
|
||||
|
||||
user_alias = aliased(User, name='user2')
|
||||
user_alias = aliased(User, name="user2")
|
||||
q = sess.query(User, User.id, user_alias)
|
||||
|
||||
# this expression:
|
||||
@@ -2874,26 +2901,26 @@ class Query(
|
||||
# would return:
|
||||
[
|
||||
{
|
||||
'name':'User',
|
||||
'type':User,
|
||||
'aliased':False,
|
||||
'expr':User,
|
||||
'entity': User
|
||||
"name": "User",
|
||||
"type": User,
|
||||
"aliased": False,
|
||||
"expr": User,
|
||||
"entity": User,
|
||||
},
|
||||
{
|
||||
'name':'id',
|
||||
'type':Integer(),
|
||||
'aliased':False,
|
||||
'expr':User.id,
|
||||
'entity': User
|
||||
"name": "id",
|
||||
"type": Integer(),
|
||||
"aliased": False,
|
||||
"expr": User.id,
|
||||
"entity": User,
|
||||
},
|
||||
{
|
||||
'name':'user2',
|
||||
'type':User,
|
||||
'aliased':True,
|
||||
'expr':user_alias,
|
||||
'entity': user_alias
|
||||
}
|
||||
"name": "user2",
|
||||
"type": User,
|
||||
"aliased": True,
|
||||
"expr": user_alias,
|
||||
"entity": user_alias,
|
||||
},
|
||||
]
|
||||
|
||||
.. seealso::
|
||||
@@ -2938,6 +2965,7 @@ class Query(
|
||||
context = QueryContext(
|
||||
compile_state,
|
||||
compile_state.statement,
|
||||
compile_state.statement,
|
||||
self._params,
|
||||
self.session,
|
||||
self.load_options,
|
||||
@@ -3001,10 +3029,12 @@ class Query(
|
||||
|
||||
e.g.::
|
||||
|
||||
q = session.query(User).filter(User.name == 'fred')
|
||||
q = session.query(User).filter(User.name == "fred")
|
||||
session.query(q.exists())
|
||||
|
||||
Producing SQL similar to::
|
||||
Producing SQL similar to:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM users WHERE users.name = :name_1
|
||||
@@ -3053,7 +3083,9 @@ class Query(
|
||||
r"""Return a count of rows this the SQL formed by this :class:`Query`
|
||||
would return.
|
||||
|
||||
This generates the SQL for this Query as follows::
|
||||
This generates the SQL for this Query as follows:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT count(1) AS count_1 FROM (
|
||||
SELECT <rest of query follows...>
|
||||
@@ -3093,8 +3125,7 @@ class Query(
|
||||
|
||||
# return count of user "id" grouped
|
||||
# by "name"
|
||||
session.query(func.count(User.id)).\
|
||||
group_by(User.name)
|
||||
session.query(func.count(User.id)).group_by(User.name)
|
||||
|
||||
from sqlalchemy import distinct
|
||||
|
||||
@@ -3112,7 +3143,9 @@ class Query(
|
||||
)
|
||||
|
||||
def delete(
|
||||
self, synchronize_session: SynchronizeSessionArgument = "auto"
|
||||
self,
|
||||
synchronize_session: SynchronizeSessionArgument = "auto",
|
||||
delete_args: Optional[Dict[Any, Any]] = None,
|
||||
) -> int:
|
||||
r"""Perform a DELETE with an arbitrary WHERE clause.
|
||||
|
||||
@@ -3120,11 +3153,11 @@ class Query(
|
||||
|
||||
E.g.::
|
||||
|
||||
sess.query(User).filter(User.age == 25).\
|
||||
delete(synchronize_session=False)
|
||||
sess.query(User).filter(User.age == 25).delete(synchronize_session=False)
|
||||
|
||||
sess.query(User).filter(User.age == 25).\
|
||||
delete(synchronize_session='evaluate')
|
||||
sess.query(User).filter(User.age == 25).delete(
|
||||
synchronize_session="evaluate"
|
||||
)
|
||||
|
||||
.. warning::
|
||||
|
||||
@@ -3137,6 +3170,13 @@ class Query(
|
||||
:ref:`orm_expression_update_delete` for a discussion of these
|
||||
strategies.
|
||||
|
||||
:param delete_args: Optional dictionary, if present will be passed
|
||||
to the underlying :func:`_expression.delete` construct as the ``**kw``
|
||||
for the object. May be used to pass dialect-specific arguments such
|
||||
as ``mysql_limit``.
|
||||
|
||||
.. versionadded:: 2.0.37
|
||||
|
||||
:return: the count of rows matched as returned by the database's
|
||||
"row count" feature.
|
||||
|
||||
@@ -3144,9 +3184,9 @@ class Query(
|
||||
|
||||
:ref:`orm_expression_update_delete`
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
bulk_del = BulkDelete(self)
|
||||
bulk_del = BulkDelete(self, delete_args)
|
||||
if self.dispatch.before_compile_delete:
|
||||
for fn in self.dispatch.before_compile_delete:
|
||||
new_query = fn(bulk_del.query, bulk_del)
|
||||
@@ -3156,6 +3196,10 @@ class Query(
|
||||
self = bulk_del.query
|
||||
|
||||
delete_ = sql.delete(*self._raw_columns) # type: ignore
|
||||
|
||||
if delete_args:
|
||||
delete_ = delete_.with_dialect_options(**delete_args)
|
||||
|
||||
delete_._where_criteria = self._where_criteria
|
||||
result: CursorResult[Any] = self.session.execute(
|
||||
delete_,
|
||||
@@ -3182,11 +3226,13 @@ class Query(
|
||||
|
||||
E.g.::
|
||||
|
||||
sess.query(User).filter(User.age == 25).\
|
||||
update({User.age: User.age - 10}, synchronize_session=False)
|
||||
sess.query(User).filter(User.age == 25).update(
|
||||
{User.age: User.age - 10}, synchronize_session=False
|
||||
)
|
||||
|
||||
sess.query(User).filter(User.age == 25).\
|
||||
update({"age": User.age - 10}, synchronize_session='evaluate')
|
||||
sess.query(User).filter(User.age == 25).update(
|
||||
{"age": User.age - 10}, synchronize_session="evaluate"
|
||||
)
|
||||
|
||||
.. warning::
|
||||
|
||||
@@ -3209,9 +3255,8 @@ class Query(
|
||||
strategies.
|
||||
|
||||
:param update_args: Optional dictionary, if present will be passed
|
||||
to the underlying :func:`_expression.update`
|
||||
construct as the ``**kw`` for
|
||||
the object. May be used to pass dialect-specific arguments such
|
||||
to the underlying :func:`_expression.update` construct as the ``**kw``
|
||||
for the object. May be used to pass dialect-specific arguments such
|
||||
as ``mysql_limit``, as well as other special arguments such as
|
||||
:paramref:`~sqlalchemy.sql.expression.update.preserve_parameter_order`.
|
||||
|
||||
@@ -3297,6 +3342,7 @@ class Query(
|
||||
context = QueryContext(
|
||||
compile_state,
|
||||
compile_state.statement,
|
||||
compile_state.statement,
|
||||
self._params,
|
||||
self.session,
|
||||
self.load_options,
|
||||
@@ -3385,6 +3431,14 @@ class BulkUpdate(BulkUD):
|
||||
class BulkDelete(BulkUD):
|
||||
"""BulkUD which handles DELETEs."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
query: Query[Any],
|
||||
delete_kwargs: Optional[Dict[Any, Any]],
|
||||
):
|
||||
super().__init__(query)
|
||||
self.delete_kwargs = delete_kwargs
|
||||
|
||||
|
||||
class RowReturningQuery(Query[Row[_TP]]):
|
||||
if TYPE_CHECKING:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/relationships.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -19,6 +19,7 @@ import collections
|
||||
from collections import abc
|
||||
import dataclasses
|
||||
import inspect as _py_inspect
|
||||
import itertools
|
||||
import re
|
||||
import typing
|
||||
from typing import Any
|
||||
@@ -26,6 +27,7 @@ from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Collection
|
||||
from typing import Dict
|
||||
from typing import FrozenSet
|
||||
from typing import Generic
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
@@ -707,12 +709,16 @@ class RelationshipProperty(
|
||||
def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
|
||||
"""Implement the ``==`` operator.
|
||||
|
||||
In a many-to-one context, such as::
|
||||
In a many-to-one context, such as:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
MyClass.some_prop == <some object>
|
||||
|
||||
this will typically produce a
|
||||
clause such as::
|
||||
clause such as:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
mytable.related_id == <some id>
|
||||
|
||||
@@ -875,11 +881,12 @@ class RelationshipProperty(
|
||||
An expression like::
|
||||
|
||||
session.query(MyClass).filter(
|
||||
MyClass.somereference.any(SomeRelated.x==2)
|
||||
MyClass.somereference.any(SomeRelated.x == 2)
|
||||
)
|
||||
|
||||
Will produce a query like:
|
||||
|
||||
Will produce a query like::
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT * FROM my_table WHERE
|
||||
EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
|
||||
@@ -893,11 +900,11 @@ class RelationshipProperty(
|
||||
:meth:`~.Relationship.Comparator.any` is particularly
|
||||
useful for testing for empty collections::
|
||||
|
||||
session.query(MyClass).filter(
|
||||
~MyClass.somereference.any()
|
||||
)
|
||||
session.query(MyClass).filter(~MyClass.somereference.any())
|
||||
|
||||
will produce::
|
||||
will produce:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT * FROM my_table WHERE
|
||||
NOT (EXISTS (SELECT 1 FROM related WHERE
|
||||
@@ -928,11 +935,12 @@ class RelationshipProperty(
|
||||
An expression like::
|
||||
|
||||
session.query(MyClass).filter(
|
||||
MyClass.somereference.has(SomeRelated.x==2)
|
||||
MyClass.somereference.has(SomeRelated.x == 2)
|
||||
)
|
||||
|
||||
Will produce a query like:
|
||||
|
||||
Will produce a query like::
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT * FROM my_table WHERE
|
||||
EXISTS (SELECT 1 FROM related WHERE
|
||||
@@ -971,7 +979,9 @@ class RelationshipProperty(
|
||||
|
||||
MyClass.contains(other)
|
||||
|
||||
Produces a clause like::
|
||||
Produces a clause like:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
mytable.id == <some id>
|
||||
|
||||
@@ -991,7 +1001,9 @@ class RelationshipProperty(
|
||||
|
||||
query(MyClass).filter(MyClass.contains(other))
|
||||
|
||||
Produces a query like::
|
||||
Produces a query like:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
SELECT * FROM my_table, my_association_table AS
|
||||
my_association_table_1 WHERE
|
||||
@@ -1087,11 +1099,15 @@ class RelationshipProperty(
|
||||
def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
|
||||
"""Implement the ``!=`` operator.
|
||||
|
||||
In a many-to-one context, such as::
|
||||
In a many-to-one context, such as:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
MyClass.some_prop != <some object>
|
||||
|
||||
This will typically produce a clause such as::
|
||||
This will typically produce a clause such as:
|
||||
|
||||
.. sourcecode:: sql
|
||||
|
||||
mytable.related_id != <some id>
|
||||
|
||||
@@ -1753,19 +1769,17 @@ class RelationshipProperty(
|
||||
argument = extracted_mapped_annotation
|
||||
assert originating_module is not None
|
||||
|
||||
is_write_only = mapped_container is not None and issubclass(
|
||||
mapped_container, WriteOnlyMapped
|
||||
)
|
||||
if is_write_only:
|
||||
self.lazy = "write_only"
|
||||
self.strategy_key = (("lazy", self.lazy),)
|
||||
|
||||
is_dynamic = mapped_container is not None and issubclass(
|
||||
mapped_container, DynamicMapped
|
||||
)
|
||||
if is_dynamic:
|
||||
self.lazy = "dynamic"
|
||||
self.strategy_key = (("lazy", self.lazy),)
|
||||
if mapped_container is not None:
|
||||
is_write_only = issubclass(mapped_container, WriteOnlyMapped)
|
||||
is_dynamic = issubclass(mapped_container, DynamicMapped)
|
||||
if is_write_only:
|
||||
self.lazy = "write_only"
|
||||
self.strategy_key = (("lazy", self.lazy),)
|
||||
elif is_dynamic:
|
||||
self.lazy = "dynamic"
|
||||
self.strategy_key = (("lazy", self.lazy),)
|
||||
else:
|
||||
is_write_only = is_dynamic = False
|
||||
|
||||
argument = de_optionalize_union_types(argument)
|
||||
|
||||
@@ -3235,6 +3249,15 @@ class JoinCondition:
|
||||
if annotation_set.issubset(col._annotations)
|
||||
}
|
||||
|
||||
@util.memoized_property
|
||||
def _secondary_lineage_set(self) -> FrozenSet[ColumnElement[Any]]:
|
||||
if self.secondary is not None:
|
||||
return frozenset(
|
||||
itertools.chain(*[c.proxy_set for c in self.secondary.c])
|
||||
)
|
||||
else:
|
||||
return util.EMPTY_SET
|
||||
|
||||
def join_targets(
|
||||
self,
|
||||
source_selectable: Optional[FromClause],
|
||||
@@ -3285,23 +3308,25 @@ class JoinCondition:
|
||||
|
||||
if extra_criteria:
|
||||
|
||||
def mark_unrelated_columns_as_ok_to_adapt(
|
||||
def mark_exclude_cols(
|
||||
elem: SupportsAnnotations, annotations: _AnnotationDict
|
||||
) -> SupportsAnnotations:
|
||||
"""note unrelated columns in the "extra criteria" as OK
|
||||
to adapt, even though they are not part of our "local"
|
||||
or "remote" side.
|
||||
"""note unrelated columns in the "extra criteria" as either
|
||||
should be adapted or not adapted, even though they are not
|
||||
part of our "local" or "remote" side.
|
||||
|
||||
see #9779 for this case
|
||||
see #9779 for this case, as well as #11010 for a follow up
|
||||
|
||||
"""
|
||||
|
||||
parentmapper_for_element = elem._annotations.get(
|
||||
"parentmapper", None
|
||||
)
|
||||
|
||||
if (
|
||||
parentmapper_for_element is not self.prop.parent
|
||||
and parentmapper_for_element is not self.prop.mapper
|
||||
and elem not in self._secondary_lineage_set
|
||||
):
|
||||
return _safe_annotate(elem, annotations)
|
||||
else:
|
||||
@@ -3310,8 +3335,8 @@ class JoinCondition:
|
||||
extra_criteria = tuple(
|
||||
_deep_annotate(
|
||||
elem,
|
||||
{"ok_to_adapt_in_join_condition": True},
|
||||
annotate_callable=mark_unrelated_columns_as_ok_to_adapt,
|
||||
{"should_not_adapt": True},
|
||||
annotate_callable=mark_exclude_cols,
|
||||
)
|
||||
for elem in extra_criteria
|
||||
)
|
||||
@@ -3325,14 +3350,16 @@ class JoinCondition:
|
||||
if secondary is not None:
|
||||
secondary = secondary._anonymous_fromclause(flat=True)
|
||||
primary_aliasizer = ClauseAdapter(
|
||||
secondary, exclude_fn=_ColInAnnotations("local")
|
||||
secondary,
|
||||
exclude_fn=_local_col_exclude,
|
||||
)
|
||||
secondary_aliasizer = ClauseAdapter(
|
||||
dest_selectable, equivalents=self.child_equivalents
|
||||
).chain(primary_aliasizer)
|
||||
if source_selectable is not None:
|
||||
primary_aliasizer = ClauseAdapter(
|
||||
secondary, exclude_fn=_ColInAnnotations("local")
|
||||
secondary,
|
||||
exclude_fn=_local_col_exclude,
|
||||
).chain(
|
||||
ClauseAdapter(
|
||||
source_selectable,
|
||||
@@ -3344,14 +3371,14 @@ class JoinCondition:
|
||||
else:
|
||||
primary_aliasizer = ClauseAdapter(
|
||||
dest_selectable,
|
||||
exclude_fn=_ColInAnnotations("local"),
|
||||
exclude_fn=_local_col_exclude,
|
||||
equivalents=self.child_equivalents,
|
||||
)
|
||||
if source_selectable is not None:
|
||||
primary_aliasizer.chain(
|
||||
ClauseAdapter(
|
||||
source_selectable,
|
||||
exclude_fn=_ColInAnnotations("remote"),
|
||||
exclude_fn=_remote_col_exclude,
|
||||
equivalents=self.parent_equivalents,
|
||||
)
|
||||
)
|
||||
@@ -3430,25 +3457,29 @@ class JoinCondition:
|
||||
|
||||
|
||||
class _ColInAnnotations:
|
||||
"""Serializable object that tests for a name in c._annotations."""
|
||||
"""Serializable object that tests for names in c._annotations.
|
||||
|
||||
__slots__ = ("name",)
|
||||
TODO: does this need to be serializable anymore? can we find what the
|
||||
use case was for that?
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
"""
|
||||
|
||||
__slots__ = ("names",)
|
||||
|
||||
def __init__(self, *names: str):
|
||||
self.names = frozenset(names)
|
||||
|
||||
def __call__(self, c: ClauseElement) -> bool:
|
||||
return (
|
||||
self.name in c._annotations
|
||||
or "ok_to_adapt_in_join_condition" in c._annotations
|
||||
)
|
||||
return bool(self.names.intersection(c._annotations))
|
||||
|
||||
|
||||
class Relationship( # type: ignore
|
||||
_local_col_exclude = _ColInAnnotations("local", "should_not_adapt")
|
||||
_remote_col_exclude = _ColInAnnotations("remote", "should_not_adapt")
|
||||
|
||||
|
||||
class Relationship(
|
||||
RelationshipProperty[_T],
|
||||
_DeclarativeMapped[_T],
|
||||
WriteOnlyMapped[_T], # not compatible with Mapped[_T]
|
||||
DynamicMapped[_T], # not compatible with Mapped[_T]
|
||||
):
|
||||
"""Describes an object property that holds a single item or list
|
||||
of items that correspond to a related database table.
|
||||
@@ -3466,3 +3497,18 @@ class Relationship( # type: ignore
|
||||
|
||||
inherit_cache = True
|
||||
""":meta private:"""
|
||||
|
||||
|
||||
class _RelationshipDeclared( # type: ignore[misc]
|
||||
Relationship[_T],
|
||||
WriteOnlyMapped[_T], # not compatible with Mapped[_T]
|
||||
DynamicMapped[_T], # not compatible with Mapped[_T]
|
||||
):
|
||||
"""Relationship subclass used implicitly for declarative mapping."""
|
||||
|
||||
inherit_cache = True
|
||||
""":meta private:"""
|
||||
|
||||
@classmethod
|
||||
def _mapper_property_name(cls) -> str:
|
||||
return "Relationship"
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/scoping.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -280,11 +280,13 @@ class scoped_session(Generic[_S]):
|
||||
|
||||
Session = scoped_session(sessionmaker())
|
||||
|
||||
|
||||
class MyClass:
|
||||
query: QueryPropertyDescriptor = Session.query_property()
|
||||
|
||||
|
||||
# after mappers are defined
|
||||
result = MyClass.query.filter(MyClass.name=='foo').all()
|
||||
result = MyClass.query.filter(MyClass.name == "foo").all()
|
||||
|
||||
Produces instances of the session's configured query class by
|
||||
default. To override and use a custom implementation, provide
|
||||
@@ -730,9 +732,8 @@ class scoped_session(Generic[_S]):
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy import select
|
||||
result = session.execute(
|
||||
select(User).where(User.id == 5)
|
||||
)
|
||||
|
||||
result = session.execute(select(User).where(User.id == 5))
|
||||
|
||||
The API contract of :meth:`_orm.Session.execute` is similar to that
|
||||
of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version
|
||||
@@ -962,10 +963,7 @@ class scoped_session(Generic[_S]):
|
||||
|
||||
some_object = session.get(VersionedFoo, (5, 10))
|
||||
|
||||
some_object = session.get(
|
||||
VersionedFoo,
|
||||
{"id": 5, "version_id": 10}
|
||||
)
|
||||
some_object = session.get(VersionedFoo, {"id": 5, "version_id": 10})
|
||||
|
||||
.. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved
|
||||
from the now legacy :meth:`_orm.Query.get` method.
|
||||
@@ -1228,7 +1226,7 @@ class scoped_session(Generic[_S]):
|
||||
|
||||
This method retrieves the history for each instrumented
|
||||
attribute on the instance and performs a comparison of the current
|
||||
value to its previously committed value, if any.
|
||||
value to its previously flushed or committed value, if any.
|
||||
|
||||
It is in effect a more expensive and accurate
|
||||
version of checking for the given instance in the
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/session.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -575,22 +575,67 @@ class ORMExecuteState(util.MemoizedSlots):
|
||||
|
||||
@property
|
||||
def is_select(self) -> bool:
|
||||
"""return True if this is a SELECT operation."""
|
||||
"""return True if this is a SELECT operation.
|
||||
|
||||
.. versionchanged:: 2.0.30 - the attribute is also True for a
|
||||
:meth:`_sql.Select.from_statement` construct that is itself against
|
||||
a :class:`_sql.Select` construct, such as
|
||||
``select(Entity).from_statement(select(..))``
|
||||
|
||||
"""
|
||||
return self.statement.is_select
|
||||
|
||||
@property
|
||||
def is_from_statement(self) -> bool:
|
||||
"""return True if this operation is a
|
||||
:meth:`_sql.Select.from_statement` operation.
|
||||
|
||||
This is independent from :attr:`_orm.ORMExecuteState.is_select`, as a
|
||||
``select().from_statement()`` construct can be used with
|
||||
INSERT/UPDATE/DELETE RETURNING types of statements as well.
|
||||
:attr:`_orm.ORMExecuteState.is_select` will only be set if the
|
||||
:meth:`_sql.Select.from_statement` is itself against a
|
||||
:class:`_sql.Select` construct.
|
||||
|
||||
.. versionadded:: 2.0.30
|
||||
|
||||
"""
|
||||
return self.statement.is_from_statement
|
||||
|
||||
@property
|
||||
def is_insert(self) -> bool:
|
||||
"""return True if this is an INSERT operation."""
|
||||
"""return True if this is an INSERT operation.
|
||||
|
||||
.. versionchanged:: 2.0.30 - the attribute is also True for a
|
||||
:meth:`_sql.Select.from_statement` construct that is itself against
|
||||
a :class:`_sql.Insert` construct, such as
|
||||
``select(Entity).from_statement(insert(..))``
|
||||
|
||||
"""
|
||||
return self.statement.is_dml and self.statement.is_insert
|
||||
|
||||
@property
|
||||
def is_update(self) -> bool:
|
||||
"""return True if this is an UPDATE operation."""
|
||||
"""return True if this is an UPDATE operation.
|
||||
|
||||
.. versionchanged:: 2.0.30 - the attribute is also True for a
|
||||
:meth:`_sql.Select.from_statement` construct that is itself against
|
||||
a :class:`_sql.Update` construct, such as
|
||||
``select(Entity).from_statement(update(..))``
|
||||
|
||||
"""
|
||||
return self.statement.is_dml and self.statement.is_update
|
||||
|
||||
@property
|
||||
def is_delete(self) -> bool:
|
||||
"""return True if this is a DELETE operation."""
|
||||
"""return True if this is a DELETE operation.
|
||||
|
||||
.. versionchanged:: 2.0.30 - the attribute is also True for a
|
||||
:meth:`_sql.Select.from_statement` construct that is itself against
|
||||
a :class:`_sql.Delete` construct, such as
|
||||
``select(Entity).from_statement(delete(..))``
|
||||
|
||||
"""
|
||||
return self.statement.is_dml and self.statement.is_delete
|
||||
|
||||
@property
|
||||
@@ -1166,6 +1211,17 @@ class SessionTransaction(_StateChange, TransactionalContext):
|
||||
else:
|
||||
join_transaction_mode = "rollback_only"
|
||||
|
||||
if local_connect:
|
||||
util.warn(
|
||||
"The engine provided as bind produced a "
|
||||
"connection that is already in a transaction. "
|
||||
"This is usually caused by a core event, "
|
||||
"such as 'engine_connect', that has left a "
|
||||
"transaction open. The effective join "
|
||||
"transaction mode used by this session is "
|
||||
f"{join_transaction_mode!r}. To silence this "
|
||||
"warning, do not leave transactions open"
|
||||
)
|
||||
if join_transaction_mode in (
|
||||
"control_fully",
|
||||
"rollback_only",
|
||||
@@ -1513,12 +1569,16 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
operation. The complete heuristics for resolution are
|
||||
described at :meth:`.Session.get_bind`. Usage looks like::
|
||||
|
||||
Session = sessionmaker(binds={
|
||||
SomeMappedClass: create_engine('postgresql+psycopg2://engine1'),
|
||||
SomeDeclarativeBase: create_engine('postgresql+psycopg2://engine2'),
|
||||
some_mapper: create_engine('postgresql+psycopg2://engine3'),
|
||||
some_table: create_engine('postgresql+psycopg2://engine4'),
|
||||
})
|
||||
Session = sessionmaker(
|
||||
binds={
|
||||
SomeMappedClass: create_engine("postgresql+psycopg2://engine1"),
|
||||
SomeDeclarativeBase: create_engine(
|
||||
"postgresql+psycopg2://engine2"
|
||||
),
|
||||
some_mapper: create_engine("postgresql+psycopg2://engine3"),
|
||||
some_table: create_engine("postgresql+psycopg2://engine4"),
|
||||
}
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -1713,7 +1773,7 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
|
||||
# the idea is that at some point NO_ARG will warn that in the future
|
||||
# the default will switch to close_resets_only=False.
|
||||
if close_resets_only or close_resets_only is _NoArg.NO_ARG:
|
||||
if close_resets_only in (True, _NoArg.NO_ARG):
|
||||
self._close_state = _SessionCloseState.CLOSE_IS_RESET
|
||||
else:
|
||||
self._close_state = _SessionCloseState.ACTIVE
|
||||
@@ -2260,9 +2320,8 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy import select
|
||||
result = session.execute(
|
||||
select(User).where(User.id == 5)
|
||||
)
|
||||
|
||||
result = session.execute(select(User).where(User.id == 5))
|
||||
|
||||
The API contract of :meth:`_orm.Session.execute` is similar to that
|
||||
of :meth:`_engine.Connection.execute`, the :term:`2.0 style` version
|
||||
@@ -2914,7 +2973,7 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
|
||||
e.g.::
|
||||
|
||||
obj = session._identity_lookup(inspect(SomeClass), (1, ))
|
||||
obj = session._identity_lookup(inspect(SomeClass), (1,))
|
||||
|
||||
:param mapper: mapper in use
|
||||
:param primary_key_identity: the primary key we are searching for, as
|
||||
@@ -2985,7 +3044,8 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
@util.langhelpers.tag_method_for_warnings(
|
||||
"This warning originated from the Session 'autoflush' process, "
|
||||
"which was invoked automatically in response to a user-initiated "
|
||||
"operation.",
|
||||
"operation. Consider using ``no_autoflush`` context manager if this "
|
||||
"warning happended while initializing objects.",
|
||||
sa_exc.SAWarning,
|
||||
)
|
||||
def _autoflush(self) -> None:
|
||||
@@ -3541,10 +3601,7 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
|
||||
some_object = session.get(VersionedFoo, (5, 10))
|
||||
|
||||
some_object = session.get(
|
||||
VersionedFoo,
|
||||
{"id": 5, "version_id": 10}
|
||||
)
|
||||
some_object = session.get(VersionedFoo, {"id": 5, "version_id": 10})
|
||||
|
||||
.. versionadded:: 1.4 Added :meth:`_orm.Session.get`, which is moved
|
||||
from the now legacy :meth:`_orm.Query.get` method.
|
||||
@@ -3633,7 +3690,7 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
|
||||
:return: The object instance, or ``None``.
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
return self._get_impl(
|
||||
entity,
|
||||
ident,
|
||||
@@ -4529,11 +4586,11 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
self._bulk_save_mappings(
|
||||
mapper,
|
||||
states,
|
||||
isupdate,
|
||||
True,
|
||||
return_defaults,
|
||||
update_changed_only,
|
||||
False,
|
||||
isupdate=isupdate,
|
||||
isstates=True,
|
||||
return_defaults=return_defaults,
|
||||
update_changed_only=update_changed_only,
|
||||
render_nulls=False,
|
||||
)
|
||||
|
||||
def bulk_insert_mappings(
|
||||
@@ -4612,11 +4669,11 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
self._bulk_save_mappings(
|
||||
mapper,
|
||||
mappings,
|
||||
False,
|
||||
False,
|
||||
return_defaults,
|
||||
False,
|
||||
render_nulls,
|
||||
isupdate=False,
|
||||
isstates=False,
|
||||
return_defaults=return_defaults,
|
||||
update_changed_only=False,
|
||||
render_nulls=render_nulls,
|
||||
)
|
||||
|
||||
def bulk_update_mappings(
|
||||
@@ -4658,13 +4715,20 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
|
||||
"""
|
||||
self._bulk_save_mappings(
|
||||
mapper, mappings, True, False, False, False, False
|
||||
mapper,
|
||||
mappings,
|
||||
isupdate=True,
|
||||
isstates=False,
|
||||
return_defaults=False,
|
||||
update_changed_only=False,
|
||||
render_nulls=False,
|
||||
)
|
||||
|
||||
def _bulk_save_mappings(
|
||||
self,
|
||||
mapper: Mapper[_O],
|
||||
mappings: Union[Iterable[InstanceState[_O]], Iterable[Dict[str, Any]]],
|
||||
*,
|
||||
isupdate: bool,
|
||||
isstates: bool,
|
||||
return_defaults: bool,
|
||||
@@ -4681,17 +4745,17 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
mapper,
|
||||
mappings,
|
||||
transaction,
|
||||
isstates,
|
||||
update_changed_only,
|
||||
isstates=isstates,
|
||||
update_changed_only=update_changed_only,
|
||||
)
|
||||
else:
|
||||
bulk_persistence._bulk_insert(
|
||||
mapper,
|
||||
mappings,
|
||||
transaction,
|
||||
isstates,
|
||||
return_defaults,
|
||||
render_nulls,
|
||||
isstates=isstates,
|
||||
return_defaults=return_defaults,
|
||||
render_nulls=render_nulls,
|
||||
)
|
||||
transaction.commit()
|
||||
|
||||
@@ -4709,7 +4773,7 @@ class Session(_SessionClassMethods, EventTarget):
|
||||
|
||||
This method retrieves the history for each instrumented
|
||||
attribute on the instance and performs a comparison of the current
|
||||
value to its previously committed value, if any.
|
||||
value to its previously flushed or committed value, if any.
|
||||
|
||||
It is in effect a more expensive and accurate
|
||||
version of checking for the given instance in the
|
||||
@@ -4879,7 +4943,7 @@ class sessionmaker(_SessionClassMethods, Generic[_S]):
|
||||
|
||||
# an Engine, which the Session will use for connection
|
||||
# resources
|
||||
engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/')
|
||||
engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/")
|
||||
|
||||
Session = sessionmaker(engine)
|
||||
|
||||
@@ -4932,7 +4996,7 @@ class sessionmaker(_SessionClassMethods, Generic[_S]):
|
||||
|
||||
with engine.connect() as connection:
|
||||
with Session(bind=connection) as session:
|
||||
# work with session
|
||||
... # work with session
|
||||
|
||||
The class also includes a method :meth:`_orm.sessionmaker.configure`, which
|
||||
can be used to specify additional keyword arguments to the factory, which
|
||||
@@ -4947,7 +5011,7 @@ class sessionmaker(_SessionClassMethods, Generic[_S]):
|
||||
|
||||
# ... later, when an engine URL is read from a configuration
|
||||
# file or other events allow the engine to be created
|
||||
engine = create_engine('sqlite:///foo.db')
|
||||
engine = create_engine("sqlite:///foo.db")
|
||||
Session.configure(bind=engine)
|
||||
|
||||
sess = Session()
|
||||
@@ -5085,7 +5149,7 @@ class sessionmaker(_SessionClassMethods, Generic[_S]):
|
||||
|
||||
Session = sessionmaker()
|
||||
|
||||
Session.configure(bind=create_engine('sqlite://'))
|
||||
Session.configure(bind=create_engine("sqlite://"))
|
||||
"""
|
||||
self.kw.update(new_kw)
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/state.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -98,7 +98,7 @@ class _InstallLoaderCallableProto(Protocol[_O]):
|
||||
|
||||
@inspection._self_inspects
|
||||
class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
|
||||
"""tracks state information at the instance level.
|
||||
"""Tracks state information at the instance level.
|
||||
|
||||
The :class:`.InstanceState` is a key object used by the
|
||||
SQLAlchemy ORM in order to track the state of an object;
|
||||
@@ -148,7 +148,14 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
|
||||
committed_state: Dict[str, Any]
|
||||
|
||||
modified: bool = False
|
||||
"""When ``True`` the object was modified."""
|
||||
expired: bool = False
|
||||
"""When ``True`` the object is :term:`expired`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_expire`
|
||||
"""
|
||||
_deleted: bool = False
|
||||
_load_pending: bool = False
|
||||
_orphaned_outside_of_session: bool = False
|
||||
@@ -169,11 +176,12 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
|
||||
|
||||
expired_attributes: Set[str]
|
||||
"""The set of keys which are 'expired' to be loaded by
|
||||
the manager's deferred scalar loader, assuming no pending
|
||||
changes.
|
||||
the manager's deferred scalar loader, assuming no pending
|
||||
changes.
|
||||
|
||||
see also the ``unmodified`` collection which is intersected
|
||||
against this set when a refresh operation occurs."""
|
||||
See also the ``unmodified`` collection which is intersected
|
||||
against this set when a refresh operation occurs.
|
||||
"""
|
||||
|
||||
callables: Dict[str, Callable[[InstanceState[_O], PassiveFlag], Any]]
|
||||
"""A namespace where a per-state loader callable can be associated.
|
||||
@@ -228,7 +236,6 @@ class InstanceState(interfaces.InspectionAttrInfo, Generic[_O]):
|
||||
def pending(self) -> bool:
|
||||
"""Return ``True`` if the object is :term:`pending`.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`session_object_states`
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/state_changes.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/strategies.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -16,8 +16,10 @@ import collections
|
||||
import itertools
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Union
|
||||
|
||||
from . import attributes
|
||||
from . import exc as orm_exc
|
||||
@@ -45,7 +47,7 @@ from .interfaces import StrategizedProperty
|
||||
from .session import _state_session
|
||||
from .state import InstanceState
|
||||
from .strategy_options import Load
|
||||
from .util import _none_set
|
||||
from .util import _none_only_set
|
||||
from .util import AliasedClass
|
||||
from .. import event
|
||||
from .. import exc as sa_exc
|
||||
@@ -57,8 +59,10 @@ from ..sql import util as sql_util
|
||||
from ..sql import visitors
|
||||
from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
|
||||
from ..sql.selectable import Select
|
||||
from ..util.typing import Literal
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .mapper import Mapper
|
||||
from .relationships import RelationshipProperty
|
||||
from ..sql.elements import ColumnElement
|
||||
|
||||
@@ -932,8 +936,15 @@ class LazyLoader(
|
||||
elif LoaderCallableStatus.NEVER_SET in primary_key_identity:
|
||||
return LoaderCallableStatus.NEVER_SET
|
||||
|
||||
if _none_set.issuperset(primary_key_identity):
|
||||
return None
|
||||
# test for None alone in primary_key_identity based on
|
||||
# allow_partial_pks preference. PASSIVE_NO_RESULT and NEVER_SET
|
||||
# have already been tested above
|
||||
if not self.mapper.allow_partial_pks:
|
||||
if _none_only_set.intersection(primary_key_identity):
|
||||
return None
|
||||
else:
|
||||
if _none_only_set.issuperset(primary_key_identity):
|
||||
return None
|
||||
|
||||
if (
|
||||
self.key in state.dict
|
||||
@@ -1373,12 +1384,16 @@ class ImmediateLoader(PostLoader):
|
||||
adapter,
|
||||
populators,
|
||||
):
|
||||
if not context.compile_state.compile_options._enable_eagerloads:
|
||||
return
|
||||
|
||||
(
|
||||
effective_path,
|
||||
run_loader,
|
||||
execution_options,
|
||||
recursion_depth,
|
||||
) = self._setup_for_recursion(context, path, loadopt, self.join_depth)
|
||||
|
||||
if not run_loader:
|
||||
# this will not emit SQL and will only emit for a many-to-one
|
||||
# "use get" load. the "_RELATED" part means it may return
|
||||
@@ -1957,6 +1972,18 @@ class SubqueryLoader(PostLoader):
|
||||
adapter,
|
||||
populators,
|
||||
):
|
||||
if (
|
||||
loadopt
|
||||
and context.compile_state.statement is not None
|
||||
and context.compile_state.statement.is_dml
|
||||
):
|
||||
util.warn_deprecated(
|
||||
"The subqueryload loader option is not compatible with DML "
|
||||
"statements such as INSERT, UPDATE. Only SELECT may be used."
|
||||
"This warning will become an exception in a future release.",
|
||||
"2.0",
|
||||
)
|
||||
|
||||
if context.refresh_state:
|
||||
return self._immediateload_create_row_processor(
|
||||
context,
|
||||
@@ -2122,6 +2149,17 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
|
||||
if not compile_state.compile_options._enable_eagerloads:
|
||||
return
|
||||
elif (
|
||||
loadopt
|
||||
and compile_state.statement is not None
|
||||
and compile_state.statement.is_dml
|
||||
):
|
||||
util.warn_deprecated(
|
||||
"The joinedload loader option is not compatible with DML "
|
||||
"statements such as INSERT, UPDATE. Only SELECT may be used."
|
||||
"This warning will become an exception in a future release.",
|
||||
"2.0",
|
||||
)
|
||||
elif self.uselist:
|
||||
compile_state.multi_row_eager_loaders = True
|
||||
|
||||
@@ -2506,13 +2544,13 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
or query_entity.entity_zero.represents_outer_join
|
||||
or (chained_from_outerjoin and isinstance(towrap, sql.Join)),
|
||||
_left_memo=self.parent,
|
||||
_right_memo=self.mapper,
|
||||
_right_memo=path[self.mapper],
|
||||
_extra_criteria=extra_join_criteria,
|
||||
)
|
||||
else:
|
||||
# all other cases are innerjoin=='nested' approach
|
||||
eagerjoin = self._splice_nested_inner_join(
|
||||
path, towrap, clauses, onclause, extra_join_criteria
|
||||
path, path[-2], towrap, clauses, onclause, extra_join_criteria
|
||||
)
|
||||
|
||||
compile_state.eager_joins[query_entity_key] = eagerjoin
|
||||
@@ -2546,93 +2584,177 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
)
|
||||
|
||||
def _splice_nested_inner_join(
|
||||
self, path, join_obj, clauses, onclause, extra_criteria, splicing=False
|
||||
self,
|
||||
path,
|
||||
entity_we_want_to_splice_onto,
|
||||
join_obj,
|
||||
clauses,
|
||||
onclause,
|
||||
extra_criteria,
|
||||
entity_inside_join_structure: Union[
|
||||
Mapper, None, Literal[False]
|
||||
] = False,
|
||||
detected_existing_path: Optional[path_registry.PathRegistry] = None,
|
||||
):
|
||||
# recursive fn to splice a nested join into an existing one.
|
||||
# splicing=False means this is the outermost call, and it
|
||||
# should return a value. splicing=<from object> is the recursive
|
||||
# form, where it can return None to indicate the end of the recursion
|
||||
# entity_inside_join_structure=False means this is the outermost call,
|
||||
# and it should return a value. entity_inside_join_structure=<mapper>
|
||||
# indicates we've descended into a join and are looking at a FROM
|
||||
# clause representing this mapper; if this is not
|
||||
# entity_we_want_to_splice_onto then return None to end the recursive
|
||||
# branch
|
||||
|
||||
if splicing is False:
|
||||
# first call is always handed a join object
|
||||
# from the outside
|
||||
assert entity_we_want_to_splice_onto is path[-2]
|
||||
|
||||
if entity_inside_join_structure is False:
|
||||
assert isinstance(join_obj, orm_util._ORMJoin)
|
||||
elif isinstance(join_obj, sql.selectable.FromGrouping):
|
||||
|
||||
if isinstance(join_obj, sql.selectable.FromGrouping):
|
||||
# FromGrouping - continue descending into the structure
|
||||
return self._splice_nested_inner_join(
|
||||
path,
|
||||
entity_we_want_to_splice_onto,
|
||||
join_obj.element,
|
||||
clauses,
|
||||
onclause,
|
||||
extra_criteria,
|
||||
splicing,
|
||||
entity_inside_join_structure,
|
||||
)
|
||||
elif not isinstance(join_obj, orm_util._ORMJoin):
|
||||
if path[-2].isa(splicing):
|
||||
return orm_util._ORMJoin(
|
||||
join_obj,
|
||||
clauses.aliased_insp,
|
||||
onclause,
|
||||
isouter=False,
|
||||
_left_memo=splicing,
|
||||
_right_memo=path[-1].mapper,
|
||||
_extra_criteria=extra_criteria,
|
||||
)
|
||||
else:
|
||||
return None
|
||||
elif isinstance(join_obj, orm_util._ORMJoin):
|
||||
# _ORMJoin - continue descending into the structure
|
||||
|
||||
target_join = self._splice_nested_inner_join(
|
||||
path,
|
||||
join_obj.right,
|
||||
clauses,
|
||||
onclause,
|
||||
extra_criteria,
|
||||
join_obj._right_memo,
|
||||
)
|
||||
if target_join is None:
|
||||
right_splice = False
|
||||
join_right_path = join_obj._right_memo
|
||||
|
||||
# see if right side of join is viable
|
||||
target_join = self._splice_nested_inner_join(
|
||||
path,
|
||||
join_obj.left,
|
||||
entity_we_want_to_splice_onto,
|
||||
join_obj.right,
|
||||
clauses,
|
||||
onclause,
|
||||
extra_criteria,
|
||||
join_obj._left_memo,
|
||||
entity_inside_join_structure=(
|
||||
join_right_path[-1].mapper
|
||||
if join_right_path is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
if target_join is None:
|
||||
# should only return None when recursively called,
|
||||
# e.g. splicing refers to a from obj
|
||||
assert (
|
||||
splicing is not False
|
||||
), "assertion failed attempting to produce joined eager loads"
|
||||
return None
|
||||
else:
|
||||
right_splice = True
|
||||
|
||||
if right_splice:
|
||||
# for a right splice, attempt to flatten out
|
||||
# a JOIN b JOIN c JOIN .. to avoid needless
|
||||
# parenthesis nesting
|
||||
if not join_obj.isouter and not target_join.isouter:
|
||||
eagerjoin = join_obj._splice_into_center(target_join)
|
||||
if target_join is not None:
|
||||
# for a right splice, attempt to flatten out
|
||||
# a JOIN b JOIN c JOIN .. to avoid needless
|
||||
# parenthesis nesting
|
||||
if not join_obj.isouter and not target_join.isouter:
|
||||
eagerjoin = join_obj._splice_into_center(target_join)
|
||||
else:
|
||||
eagerjoin = orm_util._ORMJoin(
|
||||
join_obj.left,
|
||||
target_join,
|
||||
join_obj.onclause,
|
||||
isouter=join_obj.isouter,
|
||||
_left_memo=join_obj._left_memo,
|
||||
)
|
||||
|
||||
eagerjoin._target_adapter = target_join._target_adapter
|
||||
return eagerjoin
|
||||
|
||||
else:
|
||||
eagerjoin = orm_util._ORMJoin(
|
||||
# see if left side of join is viable
|
||||
target_join = self._splice_nested_inner_join(
|
||||
path,
|
||||
entity_we_want_to_splice_onto,
|
||||
join_obj.left,
|
||||
target_join,
|
||||
join_obj.onclause,
|
||||
isouter=join_obj.isouter,
|
||||
_left_memo=join_obj._left_memo,
|
||||
clauses,
|
||||
onclause,
|
||||
extra_criteria,
|
||||
entity_inside_join_structure=join_obj._left_memo,
|
||||
detected_existing_path=join_right_path,
|
||||
)
|
||||
else:
|
||||
eagerjoin = orm_util._ORMJoin(
|
||||
target_join,
|
||||
join_obj.right,
|
||||
join_obj.onclause,
|
||||
isouter=join_obj.isouter,
|
||||
_right_memo=join_obj._right_memo,
|
||||
)
|
||||
|
||||
eagerjoin._target_adapter = target_join._target_adapter
|
||||
return eagerjoin
|
||||
if target_join is not None:
|
||||
eagerjoin = orm_util._ORMJoin(
|
||||
target_join,
|
||||
join_obj.right,
|
||||
join_obj.onclause,
|
||||
isouter=join_obj.isouter,
|
||||
_right_memo=join_obj._right_memo,
|
||||
)
|
||||
eagerjoin._target_adapter = target_join._target_adapter
|
||||
return eagerjoin
|
||||
|
||||
# neither side viable, return None, or fail if this was the top
|
||||
# most call
|
||||
if entity_inside_join_structure is False:
|
||||
assert (
|
||||
False
|
||||
), "assertion failed attempting to produce joined eager loads"
|
||||
return None
|
||||
|
||||
# reached an endpoint (e.g. a table that's mapped, or an alias of that
|
||||
# table). determine if we can use this endpoint to splice onto
|
||||
|
||||
# is this the entity we want to splice onto in the first place?
|
||||
if not entity_we_want_to_splice_onto.isa(entity_inside_join_structure):
|
||||
return None
|
||||
|
||||
# path check. if we know the path how this join endpoint got here,
|
||||
# lets look at our path we are satisfying and see if we're in the
|
||||
# wrong place. This is specifically for when our entity may
|
||||
# appear more than once in the path, issue #11449
|
||||
# updated in issue #11965.
|
||||
if detected_existing_path and len(detected_existing_path) > 2:
|
||||
# this assertion is currently based on how this call is made,
|
||||
# where given a join_obj, the call will have these parameters as
|
||||
# entity_inside_join_structure=join_obj._left_memo
|
||||
# and entity_inside_join_structure=join_obj._right_memo.mapper
|
||||
assert detected_existing_path[-3] is entity_inside_join_structure
|
||||
|
||||
# from that, see if the path we are targeting matches the
|
||||
# "existing" path of this join all the way up to the midpoint
|
||||
# of this join object (e.g. the relationship).
|
||||
# if not, then this is not our target
|
||||
#
|
||||
# a test condition where this test is false looks like:
|
||||
#
|
||||
# desired splice: Node->kind->Kind
|
||||
# path of desired splice: NodeGroup->nodes->Node->kind
|
||||
# path we've located: NodeGroup->nodes->Node->common_node->Node
|
||||
#
|
||||
# above, because we want to splice kind->Kind onto
|
||||
# NodeGroup->nodes->Node, this is not our path because it actually
|
||||
# goes more steps than we want into self-referential
|
||||
# ->common_node->Node
|
||||
#
|
||||
# a test condition where this test is true looks like:
|
||||
#
|
||||
# desired splice: B->c2s->C2
|
||||
# path of desired splice: A->bs->B->c2s
|
||||
# path we've located: A->bs->B->c1s->C1
|
||||
#
|
||||
# above, we want to splice c2s->C2 onto B, and the located path
|
||||
# shows that the join ends with B->c1s->C1. so we will
|
||||
# add another join onto that, which would create a "branch" that
|
||||
# we might represent in a pseudopath as:
|
||||
#
|
||||
# B->c1s->C1
|
||||
# ->c2s->C2
|
||||
#
|
||||
# i.e. A JOIN B ON <bs> JOIN C1 ON <c1s>
|
||||
# JOIN C2 ON <c2s>
|
||||
#
|
||||
|
||||
if detected_existing_path[0:-2] != path.path[0:-1]:
|
||||
return None
|
||||
|
||||
return orm_util._ORMJoin(
|
||||
join_obj,
|
||||
clauses.aliased_insp,
|
||||
onclause,
|
||||
isouter=False,
|
||||
_left_memo=entity_inside_join_structure,
|
||||
_right_memo=path[path[-1].mapper],
|
||||
_extra_criteria=extra_criteria,
|
||||
)
|
||||
|
||||
def _create_eager_adapter(self, context, result, adapter, path, loadopt):
|
||||
compile_state = context.compile_state
|
||||
@@ -2681,6 +2803,10 @@ class JoinedLoader(AbstractRelationshipLoader):
|
||||
adapter,
|
||||
populators,
|
||||
):
|
||||
|
||||
if not context.compile_state.compile_options._enable_eagerloads:
|
||||
return
|
||||
|
||||
if not self.parent.class_manager[self.key].impl.supports_population:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"'%s' does not support object "
|
||||
@@ -2960,6 +3086,9 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
|
||||
if not run_loader:
|
||||
return
|
||||
|
||||
if not context.compile_state.compile_options._enable_eagerloads:
|
||||
return
|
||||
|
||||
if not self.parent.class_manager[self.key].impl.supports_population:
|
||||
raise sa_exc.InvalidRequestError(
|
||||
"'%s' does not support object "
|
||||
@@ -3117,7 +3246,7 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
|
||||
orig_query = context.compile_state.select_statement
|
||||
|
||||
# the actual statement that was requested is this one:
|
||||
# context_query = context.query
|
||||
# context_query = context.user_passed_query
|
||||
#
|
||||
# that's not the cached one, however. So while it is of the identical
|
||||
# structure, if it has entities like AliasedInsp, which we get from
|
||||
@@ -3141,11 +3270,11 @@ class SelectInLoader(PostLoader, util.MemoizedSlots):
|
||||
|
||||
effective_path = path[self.parent_property]
|
||||
|
||||
if orig_query is context.query:
|
||||
if orig_query is context.user_passed_query:
|
||||
new_options = orig_query._with_options
|
||||
else:
|
||||
cached_options = orig_query._with_options
|
||||
uncached_options = context.query._with_options
|
||||
uncached_options = context.user_passed_query._with_options
|
||||
|
||||
# propagate compile state options from the original query,
|
||||
# updating their "extra_criteria" as necessary.
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/strategy_options.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -98,6 +98,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
attr: _AttrType,
|
||||
alias: Optional[_FromClauseArgument] = None,
|
||||
_is_chain: bool = False,
|
||||
_propagate_to_loaders: bool = False,
|
||||
) -> Self:
|
||||
r"""Indicate that the given attribute should be eagerly loaded from
|
||||
columns stated manually in the query.
|
||||
@@ -108,9 +109,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
The option is used in conjunction with an explicit join that loads
|
||||
the desired rows, i.e.::
|
||||
|
||||
sess.query(Order).\
|
||||
join(Order.user).\
|
||||
options(contains_eager(Order.user))
|
||||
sess.query(Order).join(Order.user).options(contains_eager(Order.user))
|
||||
|
||||
The above query would join from the ``Order`` entity to its related
|
||||
``User`` entity, and the returned ``Order`` objects would have the
|
||||
@@ -121,11 +120,9 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
:ref:`orm_queryguide_populate_existing` execution option assuming the
|
||||
primary collection of parent objects may already have been loaded::
|
||||
|
||||
sess.query(User).\
|
||||
join(User.addresses).\
|
||||
filter(Address.email_address.like('%@aol.com')).\
|
||||
options(contains_eager(User.addresses)).\
|
||||
populate_existing()
|
||||
sess.query(User).join(User.addresses).filter(
|
||||
Address.email_address.like("%@aol.com")
|
||||
).options(contains_eager(User.addresses)).populate_existing()
|
||||
|
||||
See the section :ref:`contains_eager` for complete usage details.
|
||||
|
||||
@@ -160,7 +157,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
cloned = self._set_relationship_strategy(
|
||||
attr,
|
||||
{"lazy": "joined"},
|
||||
propagate_to_loaders=False,
|
||||
propagate_to_loaders=_propagate_to_loaders,
|
||||
opts={"eager_from_alias": coerced_alias},
|
||||
_reconcile_to_other=True if _is_chain else None,
|
||||
)
|
||||
@@ -191,10 +188,18 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
the lead entity can be
|
||||
specifically referred to using the :class:`_orm.Load` constructor::
|
||||
|
||||
stmt = select(User, Address).join(User.addresses).options(
|
||||
Load(User).load_only(User.name, User.fullname),
|
||||
Load(Address).load_only(Address.email_address)
|
||||
)
|
||||
stmt = (
|
||||
select(User, Address)
|
||||
.join(User.addresses)
|
||||
.options(
|
||||
Load(User).load_only(User.name, User.fullname),
|
||||
Load(Address).load_only(Address.email_address),
|
||||
)
|
||||
)
|
||||
|
||||
When used together with the
|
||||
:ref:`populate_existing <orm_queryguide_populate_existing>`
|
||||
execution option only the attributes listed will be refreshed.
|
||||
|
||||
:param \*attrs: Attributes to be loaded, all others will be deferred.
|
||||
|
||||
@@ -247,28 +252,25 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
examples::
|
||||
|
||||
# joined-load the "orders" collection on "User"
|
||||
query(User).options(joinedload(User.orders))
|
||||
select(User).options(joinedload(User.orders))
|
||||
|
||||
# joined-load Order.items and then Item.keywords
|
||||
query(Order).options(
|
||||
joinedload(Order.items).joinedload(Item.keywords))
|
||||
select(Order).options(joinedload(Order.items).joinedload(Item.keywords))
|
||||
|
||||
# lazily load Order.items, but when Items are loaded,
|
||||
# joined-load the keywords collection
|
||||
query(Order).options(
|
||||
lazyload(Order.items).joinedload(Item.keywords))
|
||||
select(Order).options(lazyload(Order.items).joinedload(Item.keywords))
|
||||
|
||||
:param innerjoin: if ``True``, indicates that the joined eager load
|
||||
should use an inner join instead of the default of left outer join::
|
||||
|
||||
query(Order).options(joinedload(Order.user, innerjoin=True))
|
||||
select(Order).options(joinedload(Order.user, innerjoin=True))
|
||||
|
||||
In order to chain multiple eager joins together where some may be
|
||||
OUTER and others INNER, right-nested joins are used to link them::
|
||||
|
||||
query(A).options(
|
||||
joinedload(A.bs, innerjoin=False).
|
||||
joinedload(B.cs, innerjoin=True)
|
||||
select(A).options(
|
||||
joinedload(A.bs, innerjoin=False).joinedload(B.cs, innerjoin=True)
|
||||
)
|
||||
|
||||
The above query, linking A.bs via "outer" join and B.cs via "inner"
|
||||
@@ -283,10 +285,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
will render as LEFT OUTER JOIN. For example, supposing ``A.bs``
|
||||
is an outerjoin::
|
||||
|
||||
query(A).options(
|
||||
joinedload(A.bs).
|
||||
joinedload(B.cs, innerjoin="unnested")
|
||||
)
|
||||
select(A).options(joinedload(A.bs).joinedload(B.cs, innerjoin="unnested"))
|
||||
|
||||
The above join will render as "a LEFT OUTER JOIN b LEFT OUTER JOIN c",
|
||||
rather than as "a LEFT OUTER JOIN (b JOIN c)".
|
||||
@@ -316,7 +315,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
|
||||
:ref:`joined_eager_loading`
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
loader = self._set_relationship_strategy(
|
||||
attr,
|
||||
{"lazy": "joined"},
|
||||
@@ -338,17 +337,16 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
examples::
|
||||
|
||||
# subquery-load the "orders" collection on "User"
|
||||
query(User).options(subqueryload(User.orders))
|
||||
select(User).options(subqueryload(User.orders))
|
||||
|
||||
# subquery-load Order.items and then Item.keywords
|
||||
query(Order).options(
|
||||
subqueryload(Order.items).subqueryload(Item.keywords))
|
||||
select(Order).options(
|
||||
subqueryload(Order.items).subqueryload(Item.keywords)
|
||||
)
|
||||
|
||||
# lazily load Order.items, but when Items are loaded,
|
||||
# subquery-load the keywords collection
|
||||
query(Order).options(
|
||||
lazyload(Order.items).subqueryload(Item.keywords))
|
||||
|
||||
select(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -373,16 +371,16 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
examples::
|
||||
|
||||
# selectin-load the "orders" collection on "User"
|
||||
query(User).options(selectinload(User.orders))
|
||||
select(User).options(selectinload(User.orders))
|
||||
|
||||
# selectin-load Order.items and then Item.keywords
|
||||
query(Order).options(
|
||||
selectinload(Order.items).selectinload(Item.keywords))
|
||||
select(Order).options(
|
||||
selectinload(Order.items).selectinload(Item.keywords)
|
||||
)
|
||||
|
||||
# lazily load Order.items, but when Items are loaded,
|
||||
# selectin-load the keywords collection
|
||||
query(Order).options(
|
||||
lazyload(Order.items).selectinload(Item.keywords))
|
||||
select(Order).options(lazyload(Order.items).selectinload(Item.keywords))
|
||||
|
||||
:param recursion_depth: optional int; when set to a positive integer
|
||||
in conjunction with a self-referential relationship,
|
||||
@@ -493,10 +491,10 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
:func:`_orm.noload` applies to :func:`_orm.relationship` attributes
|
||||
only.
|
||||
|
||||
.. note:: Setting this loading strategy as the default strategy
|
||||
for a relationship using the :paramref:`.orm.relationship.lazy`
|
||||
parameter may cause issues with flushes, such if a delete operation
|
||||
needs to load related objects and instead ``None`` was returned.
|
||||
.. legacy:: The :func:`_orm.noload` option is **legacy**. As it
|
||||
forces collections to be empty, which invariably leads to
|
||||
non-intuitive and difficult to predict results. There are no
|
||||
legitimate uses for this option in modern SQLAlchemy.
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -558,17 +556,20 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
element of an element::
|
||||
|
||||
session.query(MyClass).options(
|
||||
defaultload(MyClass.someattribute).
|
||||
joinedload(MyOtherClass.someotherattribute)
|
||||
defaultload(MyClass.someattribute).joinedload(
|
||||
MyOtherClass.someotherattribute
|
||||
)
|
||||
)
|
||||
|
||||
:func:`.defaultload` is also useful for setting column-level options on
|
||||
a related class, namely that of :func:`.defer` and :func:`.undefer`::
|
||||
|
||||
session.query(MyClass).options(
|
||||
defaultload(MyClass.someattribute).
|
||||
defer("some_column").
|
||||
undefer("some_other_column")
|
||||
session.scalars(
|
||||
select(MyClass).options(
|
||||
defaultload(MyClass.someattribute)
|
||||
.defer("some_column")
|
||||
.undefer("some_other_column")
|
||||
)
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
@@ -592,8 +593,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
from sqlalchemy.orm import defer
|
||||
|
||||
session.query(MyClass).options(
|
||||
defer(MyClass.attribute_one),
|
||||
defer(MyClass.attribute_two)
|
||||
defer(MyClass.attribute_one), defer(MyClass.attribute_two)
|
||||
)
|
||||
|
||||
To specify a deferred load of an attribute on a related class,
|
||||
@@ -609,11 +609,11 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
at once using :meth:`_orm.Load.options`::
|
||||
|
||||
|
||||
session.query(MyClass).options(
|
||||
select(MyClass).options(
|
||||
defaultload(MyClass.someattr).options(
|
||||
defer(RelatedClass.some_column),
|
||||
defer(RelatedClass.some_other_column),
|
||||
defer(RelatedClass.another_column)
|
||||
defer(RelatedClass.another_column),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -659,12 +659,10 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
)
|
||||
|
||||
# undefer all columns specific to a single class using Load + *
|
||||
session.query(MyClass, MyOtherClass).options(
|
||||
Load(MyClass).undefer("*"))
|
||||
session.query(MyClass, MyOtherClass).options(Load(MyClass).undefer("*"))
|
||||
|
||||
# undefer a column on a related object
|
||||
session.query(MyClass).options(
|
||||
defaultload(MyClass.items).undefer(MyClass.text))
|
||||
select(MyClass).options(defaultload(MyClass.items).undefer(MyClass.text))
|
||||
|
||||
:param key: Attribute to be undeferred.
|
||||
|
||||
@@ -677,7 +675,7 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
|
||||
:func:`_orm.undefer_group`
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
return self._set_column_strategy(
|
||||
(key,), {"deferred": False, "instrument": True}
|
||||
)
|
||||
@@ -697,8 +695,9 @@ class _AbstractLoad(traversals.GenerativeOnTraversal, LoaderOption):
|
||||
spelled out using relationship loader options, such as
|
||||
:func:`_orm.defaultload`::
|
||||
|
||||
session.query(MyClass).options(
|
||||
defaultload("someattr").undefer_group("large_attrs"))
|
||||
select(MyClass).options(
|
||||
defaultload("someattr").undefer_group("large_attrs")
|
||||
)
|
||||
|
||||
.. seealso::
|
||||
|
||||
@@ -1034,6 +1033,8 @@ class Load(_AbstractLoad):
|
||||
def _adapt_cached_option_to_uncached_option(
|
||||
self, context: QueryContext, uncached_opt: ORMOption
|
||||
) -> ORMOption:
|
||||
if uncached_opt is self:
|
||||
return self
|
||||
return self._adjust_for_extra_criteria(context)
|
||||
|
||||
def _prepend_path(self, path: PathRegistry) -> Load:
|
||||
@@ -1049,48 +1050,52 @@ class Load(_AbstractLoad):
|
||||
returning a new instance of this ``Load`` object.
|
||||
|
||||
"""
|
||||
orig_query = context.compile_state.select_statement
|
||||
|
||||
orig_cache_key: Optional[CacheKey] = None
|
||||
replacement_cache_key: Optional[CacheKey] = None
|
||||
found_crit = False
|
||||
|
||||
def process(opt: _LoadElement) -> _LoadElement:
|
||||
nonlocal orig_cache_key, replacement_cache_key, found_crit
|
||||
|
||||
found_crit = True
|
||||
|
||||
if orig_cache_key is None or replacement_cache_key is None:
|
||||
orig_cache_key = orig_query._generate_cache_key()
|
||||
replacement_cache_key = context.query._generate_cache_key()
|
||||
|
||||
if replacement_cache_key is not None:
|
||||
assert orig_cache_key is not None
|
||||
|
||||
opt._extra_criteria = tuple(
|
||||
replacement_cache_key._apply_params_to_element(
|
||||
orig_cache_key, crit
|
||||
)
|
||||
for crit in opt._extra_criteria
|
||||
)
|
||||
|
||||
return opt
|
||||
|
||||
# avoid generating cache keys for the queries if we don't
|
||||
# actually have any extra_criteria options, which is the
|
||||
# common case
|
||||
new_context = tuple(
|
||||
process(value._clone()) if value._extra_criteria else value
|
||||
for value in self.context
|
||||
)
|
||||
|
||||
if found_crit:
|
||||
cloned = self._clone()
|
||||
cloned.context = new_context
|
||||
return cloned
|
||||
for value in self.context:
|
||||
if value._extra_criteria:
|
||||
break
|
||||
else:
|
||||
return self
|
||||
|
||||
replacement_cache_key = context.user_passed_query._generate_cache_key()
|
||||
|
||||
if replacement_cache_key is None:
|
||||
return self
|
||||
|
||||
orig_query = context.compile_state.select_statement
|
||||
orig_cache_key = orig_query._generate_cache_key()
|
||||
assert orig_cache_key is not None
|
||||
|
||||
def process(
|
||||
opt: _LoadElement,
|
||||
replacement_cache_key: CacheKey,
|
||||
orig_cache_key: CacheKey,
|
||||
) -> _LoadElement:
|
||||
cloned_opt = opt._clone()
|
||||
|
||||
cloned_opt._extra_criteria = tuple(
|
||||
replacement_cache_key._apply_params_to_element(
|
||||
orig_cache_key, crit
|
||||
)
|
||||
for crit in cloned_opt._extra_criteria
|
||||
)
|
||||
|
||||
return cloned_opt
|
||||
|
||||
cloned = self._clone()
|
||||
cloned.context = tuple(
|
||||
(
|
||||
process(value, replacement_cache_key, orig_cache_key)
|
||||
if value._extra_criteria
|
||||
else value
|
||||
)
|
||||
for value in self.context
|
||||
)
|
||||
return cloned
|
||||
|
||||
def _reconcile_query_entities_with_us(self, mapper_entities, raiseerr):
|
||||
"""called at process time to allow adjustment of the root
|
||||
entity inside of _LoadElement objects.
|
||||
@@ -1121,7 +1126,20 @@ class Load(_AbstractLoad):
|
||||
mapper_entities, raiseerr
|
||||
)
|
||||
|
||||
# if the context has a current path, this is a lazy load
|
||||
has_current_path = bool(compile_state.compile_options._current_path)
|
||||
|
||||
for loader in self.context:
|
||||
# issue #11292
|
||||
# historically, propagate_to_loaders was only considered at
|
||||
# object loading time, whether or not to carry along options
|
||||
# onto an object's loaded state where it would be used by lazyload.
|
||||
# however, the defaultload() option needs to propagate in case
|
||||
# its sub-options propagate_to_loaders, but its sub-options
|
||||
# that dont propagate should not be applied for lazy loaders.
|
||||
# so we check again
|
||||
if has_current_path and not loader.propagate_to_loaders:
|
||||
continue
|
||||
loader.process_compile_state(
|
||||
self,
|
||||
compile_state,
|
||||
@@ -1179,13 +1197,11 @@ class Load(_AbstractLoad):
|
||||
|
||||
query = session.query(Author)
|
||||
query = query.options(
|
||||
joinedload(Author.book).options(
|
||||
load_only(Book.summary, Book.excerpt),
|
||||
joinedload(Book.citations).options(
|
||||
joinedload(Citation.author)
|
||||
)
|
||||
)
|
||||
)
|
||||
joinedload(Author.book).options(
|
||||
load_only(Book.summary, Book.excerpt),
|
||||
joinedload(Book.citations).options(joinedload(Citation.author)),
|
||||
)
|
||||
)
|
||||
|
||||
:param \*opts: A series of loader option objects (ultimately
|
||||
:class:`_orm.Load` objects) which should be applied to the path
|
||||
@@ -1629,13 +1645,17 @@ class _LoadElement(
|
||||
loads, and adjusts the given path to be relative to the
|
||||
current_path.
|
||||
|
||||
E.g. given a loader path and current path::
|
||||
E.g. given a loader path and current path:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
lp: User -> orders -> Order -> items -> Item -> keywords -> Keyword
|
||||
|
||||
cp: User -> orders -> Order -> items
|
||||
|
||||
The adjusted path would be::
|
||||
The adjusted path would be:
|
||||
|
||||
.. sourcecode:: text
|
||||
|
||||
Item -> keywords -> Keyword
|
||||
|
||||
@@ -2116,11 +2136,11 @@ class _TokenStrategyLoad(_LoadElement):
|
||||
|
||||
e.g.::
|
||||
|
||||
raiseload('*')
|
||||
Load(User).lazyload('*')
|
||||
defer('*')
|
||||
raiseload("*")
|
||||
Load(User).lazyload("*")
|
||||
defer("*")
|
||||
load_only(User.name, User.email) # will create a defer('*')
|
||||
joinedload(User.addresses).raiseload('*')
|
||||
joinedload(User.addresses).raiseload("*")
|
||||
|
||||
"""
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/sync.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/unitofwork.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/util.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
@@ -42,6 +42,7 @@ from ._typing import prop_is_relationship
|
||||
from .base import _class_to_mapper as _class_to_mapper
|
||||
from .base import _MappedAnnotationBase
|
||||
from .base import _never_set as _never_set # noqa: F401
|
||||
from .base import _none_only_set as _none_only_set # noqa: F401
|
||||
from .base import _none_set as _none_set # noqa: F401
|
||||
from .base import attribute_str as attribute_str # noqa: F401
|
||||
from .base import class_mapper as class_mapper
|
||||
@@ -85,14 +86,12 @@ from ..sql.elements import KeyedColumnElement
|
||||
from ..sql.selectable import FromClause
|
||||
from ..util.langhelpers import MemoizedSlots
|
||||
from ..util.typing import de_stringify_annotation as _de_stringify_annotation
|
||||
from ..util.typing import (
|
||||
de_stringify_union_elements as _de_stringify_union_elements,
|
||||
)
|
||||
from ..util.typing import eval_name_only as _eval_name_only
|
||||
from ..util.typing import fixup_container_fwd_refs
|
||||
from ..util.typing import get_origin
|
||||
from ..util.typing import is_origin_of_cls
|
||||
from ..util.typing import Literal
|
||||
from ..util.typing import Protocol
|
||||
from ..util.typing import typing_get_origin
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from ._typing import _EntityType
|
||||
@@ -121,7 +120,6 @@ if typing.TYPE_CHECKING:
|
||||
from ..sql.selectable import Selectable
|
||||
from ..sql.visitors import anon_map
|
||||
from ..util.typing import _AnnotationScanType
|
||||
from ..util.typing import ArgsTypeProcotol
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
|
||||
@@ -138,7 +136,6 @@ all_cascades = frozenset(
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
_de_stringify_partial = functools.partial(
|
||||
functools.partial,
|
||||
locals_=util.immutabledict(
|
||||
@@ -171,23 +168,6 @@ de_stringify_annotation = cast(
|
||||
)
|
||||
|
||||
|
||||
class _DeStringifyUnionElements(Protocol):
|
||||
def __call__(
|
||||
self,
|
||||
cls: Type[Any],
|
||||
annotation: ArgsTypeProcotol,
|
||||
originating_module: str,
|
||||
*,
|
||||
str_cleanup_fn: Optional[Callable[[str, str], str]] = None,
|
||||
) -> Type[Any]: ...
|
||||
|
||||
|
||||
de_stringify_union_elements = cast(
|
||||
_DeStringifyUnionElements,
|
||||
_de_stringify_partial(_de_stringify_union_elements),
|
||||
)
|
||||
|
||||
|
||||
class _EvalNameOnly(Protocol):
|
||||
def __call__(self, name: str, module_name: str) -> Any: ...
|
||||
|
||||
@@ -247,7 +227,7 @@ class CascadeOptions(FrozenSet[str]):
|
||||
values.clear()
|
||||
values.discard("all")
|
||||
|
||||
self = super().__new__(cls, values) # type: ignore
|
||||
self = super().__new__(cls, values)
|
||||
self.save_update = "save-update" in values
|
||||
self.delete = "delete" in values
|
||||
self.refresh_expire = "refresh-expire" in values
|
||||
@@ -473,9 +453,7 @@ def identity_key(
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> row = engine.execute(\
|
||||
text("select * from table where a=1 and b=2")\
|
||||
).first()
|
||||
>>> row = engine.execute(text("select * from table where a=1 and b=2")).first()
|
||||
>>> identity_key(MyClass, row=row)
|
||||
(<class '__main__.MyClass'>, (1, 2), None)
|
||||
|
||||
@@ -486,7 +464,7 @@ def identity_key(
|
||||
|
||||
.. versionadded:: 1.2 added identity_token
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
if class_ is not None:
|
||||
mapper = class_mapper(class_)
|
||||
if row is None:
|
||||
@@ -664,9 +642,9 @@ class AliasedClass(
|
||||
|
||||
# find all pairs of users with the same name
|
||||
user_alias = aliased(User)
|
||||
session.query(User, user_alias).\
|
||||
join((user_alias, User.id > user_alias.id)).\
|
||||
filter(User.name == user_alias.name)
|
||||
session.query(User, user_alias).join(
|
||||
(user_alias, User.id > user_alias.id)
|
||||
).filter(User.name == user_alias.name)
|
||||
|
||||
:class:`.AliasedClass` is also capable of mapping an existing mapped
|
||||
class to an entirely new selectable, provided this selectable is column-
|
||||
@@ -690,6 +668,7 @@ class AliasedClass(
|
||||
using :func:`_sa.inspect`::
|
||||
|
||||
from sqlalchemy import inspect
|
||||
|
||||
my_alias = aliased(MyClass)
|
||||
insp = inspect(my_alias)
|
||||
|
||||
@@ -1067,6 +1046,7 @@ class AliasedInsp(
|
||||
aliased: bool = False,
|
||||
innerjoin: bool = False,
|
||||
adapt_on_names: bool = False,
|
||||
name: Optional[str] = None,
|
||||
_use_mapper_path: bool = False,
|
||||
) -> AliasedClass[_O]:
|
||||
primary_mapper = _class_to_mapper(base)
|
||||
@@ -1087,6 +1067,7 @@ class AliasedInsp(
|
||||
return AliasedClass(
|
||||
base,
|
||||
selectable,
|
||||
name=name,
|
||||
with_polymorphic_mappers=mappers,
|
||||
adapt_on_names=adapt_on_names,
|
||||
with_polymorphic_discriminator=polymorphic_on,
|
||||
@@ -1378,7 +1359,10 @@ class LoaderCriteriaOption(CriteriaOption):
|
||||
def __init__(
|
||||
self,
|
||||
entity_or_base: _EntityType[Any],
|
||||
where_criteria: _ColumnExpressionArgument[bool],
|
||||
where_criteria: Union[
|
||||
_ColumnExpressionArgument[bool],
|
||||
Callable[[Any], _ColumnExpressionArgument[bool]],
|
||||
],
|
||||
loader_only: bool = False,
|
||||
include_aliases: bool = False,
|
||||
propagate_to_loaders: bool = True,
|
||||
@@ -1537,7 +1521,7 @@ GenericAlias = type(List[Any])
|
||||
def _inspect_generic_alias(
|
||||
class_: Type[_O],
|
||||
) -> Optional[Mapper[_O]]:
|
||||
origin = cast("Type[_O]", typing_get_origin(class_))
|
||||
origin = cast("Type[_O]", get_origin(class_))
|
||||
return _inspect_mc(origin)
|
||||
|
||||
|
||||
@@ -1594,8 +1578,7 @@ class Bundle(
|
||||
|
||||
bn = Bundle("mybundle", MyClass.x, MyClass.y)
|
||||
|
||||
for row in session.query(bn).filter(
|
||||
bn.c.x == 5).filter(bn.c.y == 4):
|
||||
for row in session.query(bn).filter(bn.c.x == 5).filter(bn.c.y == 4):
|
||||
print(row.mybundle.x, row.mybundle.y)
|
||||
|
||||
:param name: name of the bundle.
|
||||
@@ -1604,7 +1587,7 @@ class Bundle(
|
||||
can be returned as a "single entity" outside of any enclosing tuple
|
||||
in the same manner as a mapped entity.
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
self.name = self._label = name
|
||||
coerced_exprs = [
|
||||
coercions.expect(
|
||||
@@ -1659,24 +1642,24 @@ class Bundle(
|
||||
|
||||
Nesting of bundles is also supported::
|
||||
|
||||
b1 = Bundle("b1",
|
||||
Bundle('b2', MyClass.a, MyClass.b),
|
||||
Bundle('b3', MyClass.x, MyClass.y)
|
||||
)
|
||||
b1 = Bundle(
|
||||
"b1",
|
||||
Bundle("b2", MyClass.a, MyClass.b),
|
||||
Bundle("b3", MyClass.x, MyClass.y),
|
||||
)
|
||||
|
||||
q = sess.query(b1).filter(
|
||||
b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
|
||||
q = sess.query(b1).filter(b1.c.b2.c.a == 5).filter(b1.c.b3.c.y == 9)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Bundle.c`
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
c: ReadOnlyColumnCollection[str, KeyedColumnElement[Any]]
|
||||
"""An alias for :attr:`.Bundle.columns`."""
|
||||
|
||||
def _clone(self):
|
||||
def _clone(self, **kw):
|
||||
cloned = self.__class__.__new__(self.__class__)
|
||||
cloned.__dict__.update(self.__dict__)
|
||||
return cloned
|
||||
@@ -1737,25 +1720,24 @@ class Bundle(
|
||||
|
||||
from sqlalchemy.orm import Bundle
|
||||
|
||||
|
||||
class DictBundle(Bundle):
|
||||
def create_row_processor(self, query, procs, labels):
|
||||
'Override create_row_processor to return values as
|
||||
dictionaries'
|
||||
"Override create_row_processor to return values as dictionaries"
|
||||
|
||||
def proc(row):
|
||||
return dict(
|
||||
zip(labels, (proc(row) for proc in procs))
|
||||
)
|
||||
return dict(zip(labels, (proc(row) for proc in procs)))
|
||||
|
||||
return proc
|
||||
|
||||
A result from the above :class:`_orm.Bundle` will return dictionary
|
||||
values::
|
||||
|
||||
bn = DictBundle('mybundle', MyClass.data1, MyClass.data2)
|
||||
for row in session.execute(select(bn)).where(bn.c.data1 == 'd1'):
|
||||
print(row.mybundle['data1'], row.mybundle['data2'])
|
||||
bn = DictBundle("mybundle", MyClass.data1, MyClass.data2)
|
||||
for row in session.execute(select(bn)).where(bn.c.data1 == "d1"):
|
||||
print(row.mybundle["data1"], row.mybundle["data2"])
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
keyed_tuple = result_tuple(labels, [() for l in labels])
|
||||
|
||||
def proc(row: Row[Any]) -> Any:
|
||||
@@ -1938,7 +1920,7 @@ class _ORMJoin(expression.Join):
|
||||
self.onclause,
|
||||
isouter=self.isouter,
|
||||
_left_memo=self._left_memo,
|
||||
_right_memo=other._left_memo,
|
||||
_right_memo=other._left_memo._path_registry,
|
||||
)
|
||||
|
||||
return _ORMJoin(
|
||||
@@ -1981,7 +1963,6 @@ def with_parent(
|
||||
|
||||
stmt = select(Address).where(with_parent(some_user, User.addresses))
|
||||
|
||||
|
||||
The SQL rendered is the same as that rendered when a lazy loader
|
||||
would fire off from the given parent on that attribute, meaning
|
||||
that the appropriate state is taken from the parent object in
|
||||
@@ -1994,9 +1975,7 @@ def with_parent(
|
||||
|
||||
a1 = aliased(Address)
|
||||
a2 = aliased(Address)
|
||||
stmt = select(a1, a2).where(
|
||||
with_parent(u1, User.addresses.of_type(a2))
|
||||
)
|
||||
stmt = select(a1, a2).where(with_parent(u1, User.addresses.of_type(a2)))
|
||||
|
||||
The above use is equivalent to using the
|
||||
:func:`_orm.with_parent.from_entity` argument::
|
||||
@@ -2021,7 +2000,7 @@ def with_parent(
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
"""
|
||||
""" # noqa: E501
|
||||
prop_t: RelationshipProperty[Any]
|
||||
|
||||
if isinstance(prop, str):
|
||||
@@ -2115,14 +2094,13 @@ def _entity_corresponds_to_use_path_impl(
|
||||
someoption(A).someoption(C.d) # -> fn(A, C) -> False
|
||||
|
||||
a1 = aliased(A)
|
||||
someoption(a1).someoption(A.b) # -> fn(a1, A) -> False
|
||||
someoption(a1).someoption(a1.b) # -> fn(a1, a1) -> True
|
||||
someoption(a1).someoption(A.b) # -> fn(a1, A) -> False
|
||||
someoption(a1).someoption(a1.b) # -> fn(a1, a1) -> True
|
||||
|
||||
wp = with_polymorphic(A, [A1, A2])
|
||||
someoption(wp).someoption(A1.foo) # -> fn(wp, A1) -> False
|
||||
someoption(wp).someoption(wp.A1.foo) # -> fn(wp, wp.A1) -> True
|
||||
|
||||
|
||||
"""
|
||||
if insp_is_aliased_class(given):
|
||||
return (
|
||||
@@ -2149,7 +2127,7 @@ def _entity_isa(given: _InternalEntityType[Any], mapper: Mapper[Any]) -> bool:
|
||||
mapper
|
||||
)
|
||||
elif given.with_polymorphic_mappers:
|
||||
return mapper in given.with_polymorphic_mappers
|
||||
return mapper in given.with_polymorphic_mappers or given.isa(mapper)
|
||||
else:
|
||||
return given.isa(mapper)
|
||||
|
||||
@@ -2231,7 +2209,7 @@ def _cleanup_mapped_str_annotation(
|
||||
|
||||
inner: Optional[Match[str]]
|
||||
|
||||
mm = re.match(r"^(.+?)\[(.+)\]$", annotation)
|
||||
mm = re.match(r"^([^ \|]+?)\[(.+)\]$", annotation)
|
||||
|
||||
if not mm:
|
||||
return annotation
|
||||
@@ -2271,7 +2249,7 @@ def _cleanup_mapped_str_annotation(
|
||||
while True:
|
||||
stack.append(real_symbol if mm is inner else inner.group(1))
|
||||
g2 = inner.group(2)
|
||||
inner = re.match(r"^(.+?)\[(.+)\]$", g2)
|
||||
inner = re.match(r"^([^ \|]+?)\[(.+)\]$", g2)
|
||||
if inner is None:
|
||||
stack.append(g2)
|
||||
break
|
||||
@@ -2293,8 +2271,10 @@ def _cleanup_mapped_str_annotation(
|
||||
# ['Mapped', "'Optional[Dict[str, str]]'"]
|
||||
not re.match(r"""^["'].*["']$""", stack[-1])
|
||||
# avoid further generics like Dict[] such as
|
||||
# ['Mapped', 'dict[str, str] | None']
|
||||
and not re.match(r".*\[.*\]", stack[-1])
|
||||
# ['Mapped', 'dict[str, str] | None'],
|
||||
# ['Mapped', 'list[int] | list[str]'],
|
||||
# ['Mapped', 'Union[list[int], list[str]]'],
|
||||
and not re.search(r"[\[\]]", stack[-1])
|
||||
):
|
||||
stripchars = "\"' "
|
||||
stack[-1] = ", ".join(
|
||||
@@ -2316,7 +2296,7 @@ def _extract_mapped_subtype(
|
||||
is_dataclass_field: bool,
|
||||
expect_mapped: bool = True,
|
||||
raiseerr: bool = True,
|
||||
) -> Optional[Tuple[Union[type, str], Optional[type]]]:
|
||||
) -> Optional[Tuple[Union[_AnnotationScanType, str], Optional[type]]]:
|
||||
"""given an annotation, figure out if it's ``Mapped[something]`` and if
|
||||
so, return the ``something`` part.
|
||||
|
||||
@@ -2334,6 +2314,11 @@ def _extract_mapped_subtype(
|
||||
return None
|
||||
|
||||
try:
|
||||
# destringify the "outside" of the annotation. note we are not
|
||||
# adding include_generic so it will *not* dig into generic contents,
|
||||
# which will remain as ForwardRef or plain str under future annotations
|
||||
# mode. The full destringify happens later when mapped_column goes
|
||||
# to do a full lookup in the registry type_annotations_map.
|
||||
annotated = de_stringify_annotation(
|
||||
cls,
|
||||
raw_annotation,
|
||||
@@ -2402,4 +2387,16 @@ def _extract_mapped_subtype(
|
||||
"Expected sub-type for Mapped[] annotation"
|
||||
)
|
||||
|
||||
return annotated.__args__[0], annotated.__origin__
|
||||
return (
|
||||
# fix dict/list/set args to be ForwardRef, see #11814
|
||||
fixup_container_fwd_refs(annotated.__args__[0]),
|
||||
annotated.__origin__,
|
||||
)
|
||||
|
||||
|
||||
def _mapper_property_as_plain_name(prop: Type[Any]) -> str:
|
||||
if hasattr(prop, "_mapper_property_name"):
|
||||
name = prop._mapper_property_name()
|
||||
else:
|
||||
name = None
|
||||
return util.clsname_as_plain_name(prop, name)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# orm/writeonly.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
|
Reference in New Issue
Block a user