connect(dsn, user='', password='',
clear_auto_commit=1, errorhandler=None)
- This method translates the values in the provided uri
+ This method translates the values in the provided URI
into args and kwargs needed to instantiate an mxODBC Connection.
The arg 'errorhandler' is not used by SQLAlchemy and will
.. seealso::
- :class:`_types.JSON` - main documenation for the generic
+ :class:`_types.JSON` - main documentation for the generic
cross-platform JSON datatype.
The :class:`_mssql.JSON` type supports persistence of JSON values
mysql_key_block_size="1024"
)
-When supporing :ref:`mysql_mariadb_only_mode` mode, similar keys against
+When supporting :ref:`mysql_mariadb_only_mode` mode, similar keys against
the "mariadb" prefix must be included as well. The values can of course
vary independently so that different settings on MySQL vs. MariaDB may
be maintained::
-------------------
Server-side cursor support is available for the mysqlclient, PyMySQL,
-maridbconnector dialects and may also be available in others. This makes use
+mariadbconnector dialects and may also be available in others. This makes use
of either the "buffered=True/False" flag if available or by using a class such
as ``MySQLdb.cursors.SSCursor`` or ``pymysql.cursors.SSCursor`` internally.
.. seealso::
- :class:`_types.JSON` - main documenation for the generic
+ :class:`_types.JSON` - main documentation for the generic
cross-platform JSON datatype.
The :class:`.mysql.JSON` type supports persistence of JSON values
# NOTE: at the moment, tests are running mariadbconnector
# against both mariadb and mysql backends. if we want this to be
- # limited, do the decisionmaking here to reject a "mysql+mariadbconnector"
+ # limited, do the decision making here to reject a "mysql+mariadbconnector"
# URL. Optionally also re-enable the module level
# MySQLDialect_mariadbconnector.is_mysql flag as well, which must include
# a unit and/or functional test.
# CONSTRAINT `CONSTRAINT_1` CHECK (`x` > 5)'
# testing on MariaDB 10.2 shows that the CHECK constraint
# is returned on a line by itself, so to match without worrying
- # about parenthesis in the expresion we go to the end of the line
+ # about parenthesis in the expression we go to the end of the line
self._re_ck_constraint = _re_compile(
r" "
r"CONSTRAINT +"
def _parse_identity_options(self, identity_options, default_on_nul):
# identity_options is a string that starts with 'ALWAYS,' or
- # 'BY DEFAULT,' and contues with
+ # 'BY DEFAULT,' and continues with
# START WITH: 1, INCREMENT BY: 1, MAX_VALUE: 123, MIN_VALUE: 1,
# CYCLE_FLAG: N, CACHE_SIZE: 1, ORDER_FLAG: N, SCALE_FLAG: N,
# EXTEND_FLAG: N, SESSION_FLAG: N, KEEP_VALUE: N
:paramref:`_sa.create_engine.connect_args` parameter also accepts all
cx_Oracle DBAPI connect arguments.
-To pass arguments directly to ``.connect()`` wihtout using the query
+To pass arguments directly to ``.connect()`` without using the query
string, use the :paramref:`_sa.create_engine.connect_args` dictionary.
Any cx_Oracle parameter value and/or constant may be passed, such as::
engine = create_engine("oracle+cx_oracle://dsn", coerce_to_decimal=False)
The ``coerce_to_decimal`` flag only impacts the results of plain string
-SQL staements that are not otherwise associated with a :class:`.Numeric`
+SQL statements that are not otherwise associated with a :class:`.Numeric`
SQLAlchemy type (or a subclass of such).
.. versionchanged:: 1.2 The numeric handling system for cx_Oracle has been
object is referenced in a SQL statement. As detailed in the next section
:ref:`postgresql_schema_reflection`, SQLAlchemy is generally organized around
the concept of keeping this variable at its default value of ``public``,
-however, in order to have it set to any arbirary name or names when connections
+however, in order to have it set to any arbitrary name or names when connections
are used automatically, the "SET SESSION search_path" command may be invoked
for all connections in a pool using the following event handler, as discussed
at :ref:`schema_set_default_connections`::
The reason the recipe is complicated by use of the ``.autocommit`` DBAPI
attribute is so that when the ``SET SESSION search_path`` directive is invoked,
-it is invoked outside of the scope of any tranasction and therefore will not
+it is invoked outside of the scope of any transaction and therefore will not
be reverted when the DBAPI connection has a rollback.
.. seealso::
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Similar to using ENUM, prior to SQLAlchemy 1.3.17, for an ARRAY of JSON/JSONB
-we need to render the appropriate CAST. Current psycopg2 drivers accomodate
+we need to render the appropriate CAST. Current psycopg2 drivers accommodate
the result set correctly without any special steps.
.. versionchanged:: 1.3.17 The combination of JSON/JSONB and ARRAY is now
def fetch_clause(self, select, **kw):
# pg requires parens for non literal clauses. It's also required for
# bind parameters if a ::type casts is used by the driver (asyncpg),
- # so it's easies to just always add it
+ # so it's easiest to just always add it
text = ""
if select._offset_clause is not None:
text += "\n OFFSET (%s) ROWS" % self.process(
class ExcludeConstraint(ColumnCollectionConstraint):
"""A table-level EXCLUDE constraint.
- Defines an EXCLUDE constraint as described in the `postgres
+ Defines an EXCLUDE constraint as described in the `PostgreSQL
documentation`__.
__ http://www.postgresql.org/docs/9.0/static/sql-createtable.html#SQL-CREATETABLE-EXCLUDE
.. seealso::
- :class:`_types.JSON` - main documenation for the generic
+ :class:`_types.JSON` - main documentation for the generic
cross-platform JSON datatype.
The operators provided by the PostgreSQL version of :class:`_types.JSON`
.. _psycopg2_multi_host:
-Specfiying multiple fallback hosts
-------------------------------------
+Specifiying multiple fallback hosts
+-----------------------------------
psycopg2 supports multiple connection points in the connection string.
When the ``host`` parameter is used multiple times in the query section of
class RangeOperators(object):
"""
This mixin provides functionality for the Range Operators
- listed in Table 9-44 of the `postgres documentation`__ for Range
+ listed in Table 9-44 of the `PostgreSQL documentation`__ for Range
Functions and Operators. It is used by all the range types
provided in the ``postgres`` dialect and can likely be used for
any range types you create yourself.
__ http://www.postgresql.org/docs/devel/static/functions-range.html
No extra support is provided for the Range Functions listed in
- Table 9-45 of the postgres documentation. For these, the normal
+ Table 9-45 of the PostgreSQL documentation. For these, the normal
:func:`~sqlalchemy.sql.expression.func` object should be used.
"""
def __add__(self, other):
"""Range expression. Returns the union of the two ranges.
Will raise an exception if the resulting range is not
- contigous.
+ contiguous.
"""
return self.expr.op("+")(other)
return colspec
def _resolve_type_affinity(self, type_):
- """Return a data type from a reflected column, using affinity tules.
+ """Return a data type from a reflected column, using affinity rules.
SQLite's goal for universal compatibility introduces some complexity
during reflection, as a column's defined type might not actually be a
listed in http://www.sqlite.org/datatype3.html section 2.1.
This method allows SQLAlchemy to support that algorithm, while still
- providing access to smarter reflection utilities by regcognizing
+ providing access to smarter reflection utilities by recognizing
column definitions that SQLite only supports through affinity (like
DATE and DOUBLE).
referred_pk = self.get_pk_constraint(
connection, rtbl, schema=schema, **kw
)
- # note that if table doesnt exist, we still get back a record,
+ # note that if table doesn't exist, we still get back a record,
# just it has no columns in it
referred_columns = referred_pk["constrained_columns"]
else:
.. seealso::
- :class:`_types.JSON` - main documenation for the generic
+ :class:`_types.JSON` - main documentation for the generic
cross-platform JSON datatype.
The :class:`_sqlite.JSON` type supports persistence of JSON values
that additional driver-level arguments can be passed including options such as
"read only". The Python sqlite3 driver supports this mode under modern Python
3 versions. The SQLAlchemy pysqlite driver supports this mode of use by
-specifing "uri=true" in the URL query string. The SQLite-level "URI" is kept
+specifying "uri=true" in the URL query string. The SQLite-level "URI" is kept
as the "database" portion of the SQLAlchemy url (that is, following a slash)::
e = create_engine("sqlite:///file:path/to/database?mode=ro&uri=true")
The :class:`_engine.URL` object is now immutable, so a
:class:`_engine.CreateEnginePlugin` that needs to alter the
- :class:`_engine.URL` object should impliement the
+ :class:`_engine.URL` object should implement the
:meth:`_engine.CreateEnginePlugin.update_url` method.
:param kwargs: The keyword arguments passed to
# type: () -> Any
"""Return exactly one scalar result or raise an exception.
- This is equvalent to calling :meth:`.Result.scalars` and then
+ This is equivalent to calling :meth:`.Result.scalars` and then
:meth:`.Result.one`.
.. seealso::
# type: () -> Optional[Any]
"""Return exactly one or no scalar result.
- This is equvalent to calling :meth:`.Result.scalars` and then
+ This is equivalent to calling :meth:`.Result.scalars` and then
:meth:`.Result.one_or_none`.
.. seealso::
:meth:`_engine.Result.freeze` method of any :class:`_engine.Result`
object.
- A new iterable :class:`.Result` object is generatged from a fixed
+ A new iterable :class:`.Result` object is generated from a fixed
set of data each time the :class:`.FrozenResult` is invoked as
a callable::
self._unique_filter_state = results[0]._unique_filter_state
self._yield_per = results[0]._yield_per
- # going to try someting w/ this in next rev
+ # going to try something w/ this in next rev
self._source_supports_scalars = results[0]._source_supports_scalars
self._attributes = self._attributes.merge_with(
# type: () -> Any
"""Return exactly one scalar result or raise an exception.
- This is equvalent to calling :meth:`_asyncio.AsyncResult.scalars` and
+ This is equivalent to calling :meth:`_asyncio.AsyncResult.scalars` and
then :meth:`_asyncio.AsyncResult.one`.
.. seealso::
# type: () -> Optional[Any]
"""Return exactly one or no scalar result.
- This is equvalent to calling :meth:`_asyncio.AsyncResult.scalars` and
+ This is equivalent to calling :meth:`_asyncio.AsyncResult.scalars` and
then :meth:`_asyncio.AsyncResult.one_or_none`.
.. seealso::
),
)
- # contoversy! do we resolve it here? or leave
+ # controversy! do we resolve it here? or leave
# it deferred? I think doing it here is necessary
# so the connection does not leak.
rel.secondary = rel.secondary()
Base = declarative_base()
- Is equvialent to::
+ Is equivalent to::
from sqlalchemy.orm import registry
"""Map a class declaratively.
In this form of mapping, the class is scanned for mapping information,
- including for columns to be associaed with a table, and/or an
+ including for columns to be associated with a table, and/or an
actual table object.
Returns the :class:`_orm.Mapper` object.
# in the case of inheritance, particularly concrete and abstract
# concrete inheritance, the class manager might have some keys
# of attributes on the superclass that we didn't actually map.
- # These could be mapped as "concrete, dont load" or could be completely
+ # These could be mapped as "concrete, don't load" or could be completely
# excluded from the mapping and we know nothing about them. Filter them
# here to prevent them from coming through.
if attribute_names:
legacy_is_orphan=False,
_compiled_cache_size=100,
):
- r"""Direct consructor for a new :class:`_orm.Mapper` object.
+ r"""Direct constructor for a new :class:`_orm.Mapper` object.
The :func:`_orm.mapper` function is normally invoked through the
use of the :class:`_orm.registry` object through either the
and eval_condition(state.obj())
and (
update_options._refresh_identity_token is None
- # TODO: coverage for the case where horiziontal sharding
+ # TODO: coverage for the case where horizontal sharding
# invokes an update() or delete() given an explicit identity
# token up front
or state.identity_token
:meth:`_query.Query.with_session`
method.
- For a full walkthrough of :class:`_query.Query` usage, see the
+ For a full walk through of :class:`_query.Query` usage, see the
:ref:`ormtutorial_toplevel`.
"""
rows (which are most).
As of SQLAlchemy 1.4, the :meth:`_orm.Query.yield_per` method is
- equvalent to using the ``yield_per`` execution option at the ORM level.
- See the section :ref:`orm_queryguide_yield_per` for further background
- on this option.
+ equivalent to using the ``yield_per`` execution option at the ORM
+ level. See the section :ref:`orm_queryguide_yield_per` for further
+ background on this option.
"""
self.load_options += {"_yield_per": count}
"""Return a Query with a specific 'autoflush' setting.
As of SQLAlchemy 1.4, the :meth:`_orm.Query.autoflush` method
- is equvalent to using the ``autoflush`` execution option at the
+ is equivalent to using the ``autoflush`` execution option at the
ORM level. See the section :ref:`orm_queryguide_autoflush` for
further background on this option.
as they are loaded, or reused from the current :class:`.Session`.
As of SQLAlchemy 1.4, the :meth:`_orm.Query.populate_existing` method
- is equvalent to using the ``populate_existing`` execution option at the
- ORM level. See the section :ref:`orm_queryguide_populate_existing` for
- further background on this option.
+ is equivalent to using the ``populate_existing`` execution option at
+ the ORM level. See the section :ref:`orm_queryguide_populate_existing`
+ for further background on this option.
"""
self.load_options += {"_populate_existing": True}
**automatic aliasing** to the entities inside the subquery, when
they are referenced on the outside. Above, if we continue to
refer to the ``User`` entity without any additional aliasing applied
- to it, those references wil be in terms of the subquery::
+ to it, those references will be in terms of the subquery::
q = session.query(User).filter(User.name.like('e%')).\
limit(5).from_self().\
# this enables clause adaptation for non-ORM
# expressions.
# legacy. see test/orm/test_froms.py for various
- # "oldstyle" tests that rely on this and the correspoinding
+ # "oldstyle" tests that rely on this and the corresponding
# "newtyle" that do not.
self._compile_options += {"_orm_only_from_obj_alias": False}
class _ColInAnnotations(object):
- """Seralizable object that tests for a name in c._annotations."""
+ """Serializable object that tests for a name in c._annotations."""
__slots__ = ("name",)
Handlers will very likely not want to add any options to queries
when such an operation is occurring, as loader options are already
- capable of being propigated to relationship loaders and should
+ capable of being propagated to relationship loaders and should
be already present.
.. seealso::
:param autocommit:
Defaults to ``False``. When ``True``, the
:class:`.Session` does not automatically begin transactions for
- individual statement exections, will acquire connections from the
+ individual statement executions, will acquire connections from the
engine on an as-needed basis, releasing to the connection pool
after each statement. Flushes will begin and commit (or possibly
rollback) their own transaction if no transaction is present.
"""Annotate the given ClauseElement and copy its internals so that
internal objects refer to the new annotated object.
- Basically used to apply a "dont traverse" annotation to a
+ Basically used to apply a "don't traverse" annotation to a
selectable, without digging throughout the whole
structure wasting time.
"""
class InPlaceGenerative(HasMemoized):
"""Provide a method-chaining pattern in conjunction with the
- @_generative decorator taht mutates in place."""
+ @_generative decorator that mutates in place."""
def _generate(self):
skip = self._memoized_keys
d = other._state_dict()
# only support a merge with another object of our class
- # and which does not have attrs that we dont. otherwise
+ # and which does not have attrs that we don't. otherwise
# we risk having state that might not be part of our cache
# key strategy
expect that they are to be invoked in an "executemany" style,
which may impact how the statement will be expected to return the
values of defaults and autoincrement / sequences and similar.
- Depending on the backend and driver in use, support for retreiving
+ Depending on the backend and driver in use, support for retrieving
these values may be disabled which means SQL expressions may
be rendered inline, RETURNING may not be rendered, etc.
return self.quote(schema)
def quote(self, ident, force=None):
- """Conditionally quote an identfier.
+ """Conditionally quote an identifier.
The identifier is quoted if it is a reserved word, contains
quote-necessary characters, or is an instance of
# if we have to invoke a server-side function, we need
# to pre-execute it. or if this is a straight
# autoincrement column and the dialect supports it
- # we can use curosr.lastrowid.
+ # we can use cursor.lastrowid.
_append_param_insert_pk_no_returning(
compiler, stmt, c, values, kw
)
or
# column has no default on it, but dialect can run the
- # "autoincrement" mechanism explictly, e.g. PostrgreSQL
+ # "autoincrement" mechanism explicitly, e.g. PostgreSQL
# SERIAL we know the sequence name
(
c.default is None
lcc = 1
else:
against = operator
- # techincally this would be len(convert_clauses) + 1
+ # technically this would be len(convert_clauses) + 1
# however this only needs to indicate "greater than one"
lcc = 2
convert_clauses.append(clause)
)
def self_group(self, against=None):
- # Tuple is parenthsized by definition.
+ # Tuple is parenthesized by definition.
return self
# additionally, each PyWrapper will log that it did in fact
# create a parameter, otherwise, it's some kind of Python
# object in the closure and we want to track that, to make
- # sure it doesn't change to somehting else, or if it does,
+ # sure it doesn't change to something else, or if it does,
# that we create a different tracked function with that
# variable.
self.expr = lambda_element._invoke_user_fn(tracker_instrumented_fn)
In this calling form, the expression renders an "empty set"
expression. These expressions are tailored to individual backends
- and are generaly trying to get an empty SELECT statement as a
+ and are generally trying to get an empty SELECT statement as a
subquery. Such as on SQLite, the expression is::
WHERE col IN (SELECT 1 FROM (SELECT 1) WHERE 1!=1)
class ReturnsRows(roles.ReturnsRowsRole, ClauseElement):
- """The basemost class for Core constructs that have some concept of
+ """The base-most class for Core constructs that have some concept of
columns that can represent rows.
While the SELECT statement and TABLE are the primary things we think
this is used to "ping" a derived selectable to add a new column
to its .c. collection when a Column has been added to one of the
- Table objects it ultimtely derives from.
+ Table objects it ultimately derives from.
If the given selectable hasn't populated its .c. collection yet,
it should at least pass on the message to the contained selectables,
.. versionadded:: 1.4 - The :func:`_sql.select` function now accepts
column arguments positionally. The top-level :func:`_sql.select`
function will automatically use the 1.x or 2.x style API based on
- the incoming argumnents; using :func:`_future.select` from the
+ the incoming arguments; using :func:`_future.select` from the
``sqlalchemy.future`` module will enforce that only the 2.x style
constructor is used.
stmt = select(user_table).join(address_table, user_table.c.id == address_table.c.user_id)
- The above statement generages SQL similar to::
+ The above statement generates SQL similar to::
SELECT user.id, user.name FROM user JOIN address ON user.id = address.user_id
user_table, address_table, user_table.c.id == address_table.c.user_id
)
- The above statement generages SQL similar to::
+ The above statement generates SQL similar to::
SELECT user.id, user.name, address.id, address.email, address.user_id
FROM user JOIN address ON user.id = address.user_id
.. versionchanged:: 1.4 the :meth:`_sql.Select.with_only_columns`
method accepts the list of column expressions positionally;
- passing the expressions as a list is deprecateed.
+ passing the expressions as a list is deprecated.
"""
# when use_labels is on:
# in all cases == if we see the same label name, use _label_anon_label
- # for subsequent occurences of that label
+ # for subsequent occurrences of that label
#
# anon_for_dupe_key == if we see the same column object multiple
# times under a particular name, whether it's the _label name or the
class _repr_params(_repr_base):
"""Provide a string view of bound parameters.
- Truncates display to a given numnber of 'multi' parameter sets,
+ Truncates display to a given number of 'multi' parameter sets,
as well as long values to a given number of characters.
"""
):
# we are a SELECT statement and not derived from an alias of a
# table (which nonetheless may be a table our SELECT derives
- # from), so return the alias to prevent futher traversal
+ # from), so return the alias to prevent further traversal
# or
# we are an alias of a table and we are not derived from an
# alias of a table (which nonetheless may be the same table
@property
def no_sequences(self):
- """the oppopsite of "sequences", DB does not support sequences at
+ """the opposite of "sequences", DB does not support sequences at
all."""
return exclusions.NotPredicate(self.sequences)
def await_only(awaitable: Coroutine) -> Any:
"""Awaits an async function in a sync method.
- The sync method must be insice a :func:`greenlet_spawn` context.
+ The sync method must be inside a :func:`greenlet_spawn` context.
:func:`await_` calls cannot be nested.
:param awaitable: The coroutine to call.
def await_fallback(awaitable: Coroutine) -> Any:
"""Awaits an async function in a sync method.
- The sync method must be insice a :func:`greenlet_spawn` context.
+ The sync method must be inside a :func:`greenlet_spawn` context.
:func:`await_` calls cannot be nested.
:param awaitable: The coroutine to call.
Instead of introducing all the object-creation overhead and having
to reinvent from scratch, just copy their compatibility routine.
- Utimately we would need to rewrite our "decorator" routine completely
+ Ultimately we would need to rewrite our "decorator" routine completely
which is not really worth it right now, until all Python 2.x support
is dropped.
)
elif spec[1]:
- # im not sure what this is
+ # I'm not sure what this is
self_arg = "%s[0]" % spec[1]
apply_pos_proxied = apply_pos
indent = " " * len(m.group(1)) + " "
# but if the next line has text, use that line's
- # indentntation
+ # indentation
if doclines:
m2 = re.match(r"(\s+)\S", doclines[0])
if m2:
# Delay creation of the queue until it is first used, to avoid
# binding it to a possibly wrong event loop.
# By delaying the creation of the pool we accommodate the common
- # usage pattern of instanciating the engine at module level, where a
+ # usage pattern of instantiating the engine at module level, where a
# different event loop is in present compared to when the application
# is actually run.