# pool.py - Connection pooling for SQLAlchemy
-# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
+# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer
+# mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
def invalidate(self, e=None):
if e is not None:
- self.__pool.logger.info("Invalidate connection %r (reason: %s:%s)",
- self.connection, e.__class__.__name__, e)
+ self.__pool.logger.info(
+ "Invalidate connection %r (reason: %s:%s)",
+ self.connection, e.__class__.__name__, e)
else:
- self.__pool.logger.info("Invalidate connection %r", self.connection)
+ self.__pool.logger.info(
+ "Invalidate connection %r", self.connection)
self.__close()
self.connection = None
l.connect(self.connection, self)
elif self.__pool._recycle > -1 and \
time.time() - self.starttime > self.__pool._recycle:
- self.__pool.logger.info("Connection %r exceeded timeout; recycling",
- self.connection)
+ self.__pool.logger.info(
+ "Connection %r exceeded timeout; recycling",
+ self.connection)
self.__close()
self.connection = self.__connect()
self.info.clear()
except (SystemExit, KeyboardInterrupt):
raise
except Exception, e:
- self.__pool.logger.debug("Connection %r threw an error on close: %s",
- self.connection, e)
+ self.__pool.logger.debug(
+ "Connection %r threw an error on close: %s",
+ self.connection, e)
def __connect(self):
try:
def _finalize_fairy(connection, connection_record, pool, ref=None):
_refs.discard(connection_record)
- if ref is not None and (connection_record.fairy is not ref or isinstance(pool, AssertionPool)):
+ if ref is not None and \
+ (connection_record.fairy is not ref or
+ isinstance(pool, AssertionPool)):
return
if connection is not None:
_refs = set()
class _ConnectionFairy(object):
- """Proxies a DB-API connection and provides return-on-dereference support."""
+ """Proxies a DB-API connection and provides return-on-dereference
+ support."""
__slots__ = '_pool', '__counter', 'connection', \
'_connection_record', '__weakref__', '_detached_info'
try:
rec = self._connection_record = pool.get()
conn = self.connection = self._connection_record.get_connection()
- rec.fairy = weakref.ref(self, lambda ref:_finalize_fairy(conn, rec, pool, ref))
+ rec.fairy = weakref.ref(
+ self,
+ lambda ref:_finalize_fairy(conn, rec, pool, ref)
+ )
_refs.add(rec)
except:
- self.connection = None # helps with endless __getattr__ loops later on
+ # helps with endless __getattr__ loops later on
+ self.connection = None
self._connection_record = None
raise
self._pool.logger.debug("Connection %r checked out from pool" %
self._all_conns.pop()
def status(self):
- return "SingletonThreadPool id:%d size: %d" % (id(self), len(self._all_conns))
+ return "SingletonThreadPool id:%d size: %d" % \
+ (id(self), len(self._all_conns))
def do_return_conn(self, conn):
pass
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
- self._overflow_lock = self._max_overflow > -1 and threading.Lock() or None
+ self._overflow_lock = self._max_overflow > -1 and \
+ threading.Lock() or None
def recreate(self):
self.logger.info("Pool recreating")
return QueuePool(self._creator, pool_size=self._pool.maxsize,
- max_overflow=self._max_overflow, timeout=self._timeout,
+ max_overflow=self._max_overflow,
+ timeout=self._timeout,
recycle=self._recycle, echo=self.echo,
logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal, listeners=self.listeners)
+ use_threadlocal=self._use_threadlocal,
+ listeners=self.listeners)
def do_return_conn(self, conn):
try:
def do_get(self):
try:
- wait = self._max_overflow > -1 and self._overflow >= self._max_overflow
+ wait = self._max_overflow > -1 and \
+ self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
except sqla_queue.Empty:
- if self._max_overflow > -1 and self._overflow >= self._max_overflow:
+ if self._max_overflow > -1 and \
+ self._overflow >= self._max_overflow:
if not wait:
return self.do_get()
else:
raise exc.TimeoutError(
- "QueuePool limit of size %d overflow %d reached, "
- "connection timed out, timeout %d" %
- (self.size(), self.overflow(), self._timeout))
+ "QueuePool limit of size %d overflow %d reached, "
+ "connection timed out, timeout %d" %
+ (self.size(), self.overflow(), self._timeout))
if self._overflow_lock is not None:
self._overflow_lock.acquire()
- if self._max_overflow > -1 and self._overflow >= self._max_overflow:
+ if self._max_overflow > -1 and \
+ self._overflow >= self._max_overflow:
if self._overflow_lock is not None:
self._overflow_lock.release()
return self.do_get()
return self.connection
class AssertionPool(Pool):
- """A Pool that allows at most one checked out connection at any given time.
+ """A Pool that allows at most one checked out connection at any given
+ time.
This will raise an exception if more than one connection is checked out
at a time. Useful for debugging code that is using more connections
self._create_pool_mutex.acquire()
try:
if key not in self.pools:
- pool = self.poolclass(lambda: self.module.connect(*args, **kw), **self.kw)
+ pool = self.poolclass(lambda:
+ self.module.connect(*args, **kw), **self.kw)
self.pools[key] = pool
return pool
else:
# schema.py
-# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
+# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer
+# mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
created and dropped, or is otherwise part of such an entity. Examples include
tables, columns, sequences, and indexes.
-All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as defined
-in this module they are intended to be agnostic of any vendor-specific
+All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as
+defined in this module they are intended to be agnostic of any vendor-specific
constructs.
A collection of entities are grouped into a unit called
-:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of schema
-elements, and can also be associated with an actual database connection such
-that operations involving the contained elements can contact the database as
-needed.
+:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of
+schema elements, and can also be associated with an actual database connection
+such that operations involving the contained elements can contact the database
+as needed.
Two of the elements here also build upon their "syntactic" counterparts, which
are defined in :class:`~sqlalchemy.sql.expression.`, specifically
-:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`. Since these objects
-are part of the SQL expression language, they are usable as components in SQL
-expressions.
+:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`.
+Since these objects are part of the SQL expression language, they are usable
+as components in SQL expressions.
"""
import re, inspect
:class:`~sqlalchemy.engine.base.Connectable`.
:param \*args: Additional positional arguments are used primarily
- to add the list of :class:`Column` objects contained within this table.
- Similar to the style of a CREATE TABLE statement, other :class:`SchemaItem`
- constructs may be added here, including :class:`PrimaryKeyConstraint`,
- and :class:`ForeignKeyConstraint`.
+ to add the list of :class:`Column` objects contained within this
+ table. Similar to the style of a CREATE TABLE statement, other
+ :class:`SchemaItem` constructs may be added here, including
+ :class:`PrimaryKeyConstraint`, and :class:`ForeignKeyConstraint`.
- :param autoload: Defaults to False: the Columns for this table should be reflected
- from the database. Usually there will be no Column objects in the
- constructor if this property is set.
+ :param autoload: Defaults to False: the Columns for this table should
+ be reflected from the database. Usually there will be no Column
+ objects in the constructor if this property is set.
- :param autoload_with: If autoload==True, this is an optional Engine or Connection
- instance to be used for the table reflection. If ``None``, the
- underlying MetaData's bound connectable will be used.
+ :param autoload_with: If autoload==True, this is an optional Engine
+ or Connection instance to be used for the table reflection. If
+ ``None``, the underlying MetaData's bound connectable will be used.
:param implicit_returning: True by default - indicates that
RETURNING can be used by default to fetch newly inserted primary key
values, for backends which support this. Note that
create_engine() also provides an implicit_returning flag.
- :param include_columns: A list of strings indicating a subset of columns to be loaded via
- the ``autoload`` operation; table columns who aren't present in
- this list will not be represented on the resulting ``Table``
- object. Defaults to ``None`` which indicates all columns should
- be reflected.
+ :param include_columns: A list of strings indicating a subset of
+ columns to be loaded via the ``autoload`` operation; table columns who
+ aren't present in this list will not be represented on the resulting
+ ``Table`` object. Defaults to ``None`` which indicates all columns
+ should be reflected.
- :param info: A dictionary which defaults to ``{}``. A space to store application
- specific data. This must be a dictionary.
+ :param info: A dictionary which defaults to ``{}``. A space to store
+ application specific data. This must be a dictionary.
:param mustexist: When ``True``, indicates that this Table must already
be present in the given :class:`MetaData`` collection.
:param quote_schema: same as 'quote' but applies to the schema identifier.
- :param schema: The *schema name* for this table, which is required if the table
- resides in a schema other than the default selected schema for the
- engine's database connection. Defaults to ``None``.
+ :param schema: The *schema name* for this table, which is required if
+ the table resides in a schema other than the default selected schema
+ for the engine's database connection. Defaults to ``None``.
:param useexisting: When ``True``, indicates that if this Table is already
present in the given :class:`MetaData`, apply further arguments within
- the constructor to the existing :class:`Table`. If this flag is not
- set, an error is raised when the parameters of an existing :class:`Table`
- are overwritten.
+ the constructor to the existing :class:`Table`. If this flag is not
+ set, an error is raised when the parameters of an existing
+ :class:`Table` are overwritten.
"""
__visit_name__ = 'table'
- ddl_events = ('before-create', 'after-create', 'before-drop', 'after-drop')
+ ddl_events = ('before-create', 'after-create',
+ 'before-drop', 'after-drop')
def __new__(cls, *args, **kw):
if not args:
if key in metadata.tables:
if not useexisting and bool(args):
raise exc.InvalidRequestError(
- "Table '%s' is already defined for this MetaData instance. "
- "Specify 'useexisting=True' to redefine options and "
- "columns on an existing Table object." % key)
+ "Table '%s' is already defined for this MetaData "
+ "instance. Specify 'useexisting=True' to redefine "
+ "options and columns on an existing Table object." % key)
table = metadata.tables[key]
table._init_existing(*args, **kw)
return table
# circular foreign keys
if autoload:
if autoload_with:
- autoload_with.reflecttable(self, include_columns=include_columns)
+ autoload_with.reflecttable(self,
+ include_columns=include_columns)
else:
- _bind_or_error(metadata, msg="No engine is bound to this Table's MetaData. "
- "Pass an engine to the Table via "
- "autoload_with=<someengine>, "
- "or associate the MetaData with an engine via "
- "metadata.bind=<someengine>").\
- reflecttable(self, include_columns=include_columns)
+ _bind_or_error(metadata,
+ msg="No engine is bound to this Table's MetaData. "
+ "Pass an engine to the Table via "
+ "autoload_with=<someengine>, "
+ "or associate the MetaData with an engine via "
+ "metadata.bind=<someengine>").\
+ reflecttable(self, include_columns=include_columns)
# initialize all the column, etc. objects. done after reflection to
# allow user-overrides
def _extra_kwargs(self, **kwargs):
# validate remaining kwargs that they all specify DB prefixes
if len([k for k in kwargs
- if not re.match(r'^(?:%s)_' % '|'.join(dialects.__all__), k)]):
+ if not re.match(
+ r'^(?:%s)_' %
+ '|'.join(dialects.__all__), k
+ )
+ ]):
raise TypeError(
"Invalid argument(s) for Table: %r" % kwargs.keys())
self.kwargs.update(kwargs)
created or dropped, either directly before or after the DDL is issued
to the database. The listener may modify the Table, but may not abort
the event itself.
-
- Arguments are:
-
- event
+
+ :param event:
One of ``Table.ddl_events``; e.g. 'before-create', 'after-create',
'before-drop' or 'after-drop'.
- listener
+ :param listener:
A callable, invoked with three positional arguments:
- event
+ :event:
The event currently being handled
- target
+
+ :target:
The ``Table`` object being created or dropped
- bind
+
+ :bind:
The ``Connection`` bueing used for DDL execution.
Listeners are added to the Table's ``ddl_listeners`` attribute.
+
"""
if event not in self.ddl_events:
metadata.tables[_get_table_key(self.name, self.schema)] = self
self.metadata = metadata
- def get_children(self, column_collections=True, schema_visitor=False, **kwargs):
+ def get_children(self, column_collections=True,
+ schema_visitor=False, **kw):
if not schema_visitor:
return expression.TableClause.get_children(
- self, column_collections=column_collections, **kwargs)
+ self, column_collections=column_collections, **kw)
else:
if column_collections:
return list(self.columns)
if bind is None:
bind = _bind_or_error(self)
- return bind.run_callable(bind.dialect.has_table, self.name, schema=self.schema)
+ return bind.run_callable(bind.dialect.has_table,
+ self.name, schema=self.schema)
def create(self, bind=None, checkfirst=False):
"""Issue a ``CREATE`` statement for this table.
def tometadata(self, metadata, schema=RETAIN_SCHEMA):
- """Return a copy of this ``Table`` associated with a different ``MetaData``."""
+ """Return a copy of this ``Table`` associated with a different
+ ``MetaData``."""
try:
if schema is RETAIN_SCHEMA:
return self.description
def references(self, column):
- """Return True if this Column references the given column via foreign key."""
+ """Return True if this Column references the given column via foreign
+ key."""
+
for fk in self.foreign_keys:
if fk.references(column.table):
return True
[repr(self.name)] + [repr(self.type)] +
[repr(x) for x in self.foreign_keys if x is not None] +
[repr(x) for x in self.constraints] +
- [(self.table is not None and "table=<%s>" % self.table.description or "")] +
+ [(self.table is not None and "table=<%s>" %
+ self.table.description or "")] +
["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg])
def _set_parent(self, table):
elif self.unique:
if isinstance(self.unique, basestring):
raise exc.ArgumentError(
- "The 'unique' keyword argument on Column is boolean only. "
- "To create unique constraints or indexes with a specific "
- "name, append an explicit UniqueConstraint to the Table's "
- "list of elements, or create an explicit Index object "
- "external to the Table.")
+ "The 'unique' keyword argument on Column is boolean "
+ "only. To create unique constraints or indexes with a "
+ "specific name, append an explicit UniqueConstraint to "
+ "the Table's list of elements, or create an explicit "
+ "Index object external to the Table.")
table.append_constraint(UniqueConstraint(self.key))
for fn in self._table_events:
def get_children(self, schema_visitor=False, **kwargs):
if schema_visitor:
- return [x for x in (self.default, self.onupdate) if x is not None] + \
+ return [x for x in (self.default, self.onupdate)
+ if x is not None] + \
list(self.foreign_keys) + list(self.constraints)
else:
return expression.ColumnClause.get_children(self, **kwargs)
def _get_colspec(self, schema=None):
if schema:
- return schema + "." + self.column.table.name + "." + self.column.key
+ return schema + "." + self.column.table.name + \
+ "." + self.column.key
elif isinstance(self._colspec, basestring):
return self._colspec
elif hasattr(self._colspec, '__clause_element__'):
def references(self, table):
"""Return True if the given table is referenced by this ForeignKey."""
+
return table.corresponding_column(self.column) is not None
def get_referent(self, table):
"""Return the column in the given table referenced by this ForeignKey.
- Returns None if this ``ForeignKey`` does not reference the given table.
+ Returns None if this ``ForeignKey`` does not reference the given
+ table.
"""
if hasattr(self, 'parent'):
if self.parent is column:
return
- raise exc.InvalidRequestError("This ForeignKey already has a parent !")
+ raise exc.InvalidRequestError(
+ "This ForeignKey already has a parent !")
self.parent = column
self.parent.foreign_keys.add(self)
self.parent._on_table_attach(self._set_table)
@util.memoized_property
def is_scalar(self):
- return not self.is_callable and not self.is_clause_element and not self.is_sequence
+ return not self.is_callable and \
+ not self.is_clause_element and \
+ not self.is_sequence
def _maybe_wrap_callable(self, fn):
"""Backward compat: Wrap callables that don't accept a context."""
self.arg = arg
def __repr__(self):
- return "DefaultClause(%r, for_update=%r)" % (self.arg, self.for_update)
+ return "DefaultClause(%r, for_update=%r)" % \
+ (self.arg, self.for_update)
class PassiveDefault(DefaultClause):
def __init__(self, *arg, **kw):
- util.warn_deprecated("PassiveDefault is deprecated. Use DefaultClause.")
+ util.warn_deprecated("PassiveDefault is deprecated. "
+ "Use DefaultClause.")
DefaultClause.__init__(self, *arg, **kw)
class Constraint(SchemaItem):
_create_rule=None):
"""Create a SQL constraint.
- name
+ :param name:
Optional, the in-database name of this ``Constraint``.
- deferrable
+ :param deferrable:
Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
issuing DDL for this constraint.
- initially
+ :param initially:
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
- _create_rule
+ :param _create_rule:
a callable which is passed the DDLCompiler object during
compilation. Returns True or False to signal inline generation of
this Constraint.
return self.parent
except AttributeError:
pass
- raise exc.InvalidRequestError("This constraint is not bound to a table. Did you mean to call table.add_constraint(constraint) ?")
+ raise exc.InvalidRequestError(
+ "This constraint is not bound to a table. Did you "
+ "mean to call table.add_constraint(constraint) ?")
def _set_parent(self, parent):
self.parent = parent
def __init__(self, *columns, **kw):
"""
- \*columns
+ :param \*columns:
A sequence of column names or Column objects.
- name
+ :param name:
Optional, the in-database name of this constraint.
- deferrable
+ :param deferrable:
Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
issuing DDL for this constraint.
- initially
+ :param initially:
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
"""
super(ColumnCollectionConstraint, self).__init__(**kw)
self.columns = expression.ColumnCollection()
- self._pending_colargs = [_to_schema_column_or_string(c) for c in columns]
+ self._pending_colargs = [_to_schema_column_or_string(c)
+ for c in columns]
if self._pending_colargs and \
isinstance(self._pending_colargs[0], Column) and \
self._pending_colargs[0].table is not None:
initially=None, table=None, _create_rule=None):
"""Construct a CHECK constraint.
- sqltext
+ :param sqltext:
A string containing the constraint definition, which will be used
verbatim, or a SQL expression construct.
- name
+ :param name:
Optional, the in-database name of the constraint.
- deferrable
+ :param deferrable:
Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
issuing DDL for this constraint.
- initially
+ :param initially:
Optional string. If set, emit INITIALLY <value> when issuing DDL
for this constraint.
"""
- super(CheckConstraint, self).__init__(name, deferrable, initially, _create_rule)
+ super(CheckConstraint, self).\
+ __init__(name, deferrable, initially, _create_rule)
self.sqltext = expression._literal_as_text(sqltext)
if table is not None:
self._set_parent(table)
are mutually dependent on each other.
"""
- super(ForeignKeyConstraint, self).__init__(name, deferrable, initially)
+ super(ForeignKeyConstraint, self).\
+ __init__(name, deferrable, initially)
self.onupdate = onupdate
self.ondelete = ondelete
if self.use_alter:
def supports_alter(ddl, event, schema_item, bind, **kw):
- return table in set(kw['tables']) and bind.dialect.supports_alter
- AddConstraint(self, on=supports_alter).execute_at('after-create', table.metadata)
- DropConstraint(self, on=supports_alter).execute_at('before-drop', table.metadata)
+ return table in set(kw['tables']) and \
+ bind.dialect.supports_alter
+
+ AddConstraint(self, on=supports_alter).\
+ execute_at('after-create', table.metadata)
+ DropConstraint(self, on=supports_alter).\
+ execute_at('before-drop', table.metadata)
def copy(self, **kw):
return ForeignKeyConstraint(
def __init__(self, name, *columns, **kwargs):
"""Construct an index object.
- Arguments are:
-
- name
+ :param name:
The name of the index
- \*columns
+ :param \*columns:
Columns to include in the index. All columns must belong to the same
table.
- \**kwargs
- Keyword arguments include:
-
- unique
+ :param unique:
Defaults to False: create a unique index.
- postgresql_where
- Defaults to None: create a partial index when using PostgreSQL
+ :param \**kw:
+ Other keyword arguments may be interpreted by specific dialects.
+
"""
self.name = name
# all columns muse be from same table
raise exc.ArgumentError(
"All index columns must be from same table. "
- "%s is from %s not %s" % (column, column.table, self.table))
+ "%s is from %s not %s" %
+ (column, column.table, self.table))
self.columns.add(column)
def _set_parent(self, table):
bind.drop(self)
def __repr__(self):
- return 'Index("%s", %s%s)' % (self.name,
- ', '.join(repr(c) for c in self.columns),
- (self.unique and ', unique=True') or '')
+ return 'Index("%s", %s%s)' % (
+ self.name,
+ ', '.join(repr(c) for c in self.columns),
+ (self.unique and ', unique=True') or '')
class MetaData(SchemaItem):
"""A collection of Tables and their associated schema constructs.
__visit_name__ = 'metadata'
- ddl_events = ('before-create', 'after-create', 'before-drop', 'after-drop')
+ ddl_events = ('before-create', 'after-create',
+ 'before-drop', 'after-drop')
def __init__(self, bind=None, reflect=False):
"""Create a new MetaData object.
- bind
+ :param bind:
An Engine or Connection to bind to. May also be a string or URL
instance, these are passed to create_engine() and this MetaData will
be bound to the resulting engine.
- reflect
+ :param reflect:
Optional, automatically load all tables from the bound database.
Defaults to False. ``bind`` is required when this option is set.
For finer control over loaded tables, use the ``reflect`` method of
if reflect:
if not bind:
raise exc.ArgumentError(
- "A bind must be supplied in conjunction with reflect=True")
+ "A bind must be supplied in conjunction "
+ "with reflect=True")
self.reflect()
def __repr__(self):
added to the database, however no special action is taken if a table
in this ``MetaData`` no longer exists in the database.
- bind
- A :class:`~sqlalchemy.engine.base.Connectable` used to access the database; if None, uses the
- existing bind on this ``MetaData``, if any.
+ :param bind:
+ A :class:`~sqlalchemy.engine.base.Connectable` used to access the
+ database; if None, uses the existing bind on this ``MetaData``, if
+ any.
- schema
+ :param schema:
Optional, query and reflect tables from an alterate schema.
- only
+ :param only:
Optional. Load only a sub-set of available named tables. May be
specified as a sequence of names or a callable.
s = schema and (" schema '%s'" % schema) or ''
raise exc.InvalidRequestError(
'Could not reflect: requested table(s) not available '
- 'in %s%s: (%s)' % (bind.engine.url, s, ', '.join(missing)))
+ 'in %s%s: (%s)' %
+ (bind.engine.url, s, ', '.join(missing)))
load = [name for name in only if name not in current]
for name in load:
involved in DDL creates or drops, and will be invoked either before
all Table-related actions or after.
- Arguments are:
-
- event
+ :param event:
One of ``MetaData.ddl_events``; 'before-create', 'after-create',
'before-drop' or 'after-drop'.
- listener
+
+ :param listener:
A callable, invoked with three positional arguments:
- event
+ :event:
The event currently being handled
- target
+
+ :target:
The ``MetaData`` object being operated upon
- bind
+
+ :bind:
The ``Connection`` bueing used for DDL execution.
Listeners are added to the MetaData's ``ddl_listeners`` attribute.
Conditional by default, will not attempt to recreate tables already
present in the target database.
- bind
- A :class:`~sqlalchemy.engine.base.Connectable` used to access the database; if None, uses the
- existing bind on this ``MetaData``, if any.
+ :param bind:
+ A :class:`~sqlalchemy.engine.base.Connectable` used to access the
+ database; if None, uses the existing bind on this ``MetaData``, if
+ any.
- tables
+ :param tables:
Optional list of ``Table`` objects, which is a subset of the total
tables in the ``MetaData`` (others are ignored).
- checkfirst
+ :param checkfirst:
Defaults to True, don't issue CREATEs for tables already present
in the target database.
Conditional by default, will not attempt to drop tables not present in
the target database.
- bind
- A :class:`~sqlalchemy.engine.base.Connectable` used to access the database; if None, uses
- the existing bind on this ``MetaData``, if any.
+ :param bind:
+ A :class:`~sqlalchemy.engine.base.Connectable` used to access the
+ database; if None, uses the existing bind on this ``MetaData``, if
+ any.
- tables
+ :param tables:
Optional list of ``Table`` objects, which is a subset of the
total tables in the ``MetaData`` (others are ignored).
- checkfirst
- Defaults to True, only issue DROPs for tables confirmed to be present
- in the target database.
+ :param checkfirst:
+ Defaults to True, only issue DROPs for tables confirmed to be
+ present in the target database.
"""
if bind is None:
"""Execute this DDL immediately.
Executes the DDL statement in isolation using the supplied
- :class:`~sqlalchemy.engine.base.Connectable` or :class:`~sqlalchemy.engine.base.Connectable` assigned to the ``.bind`` property,
- if not supplied. If the DDL has a conditional ``on`` criteria, it
- will be invoked with None as the event.
+ :class:`~sqlalchemy.engine.base.Connectable` or
+ :class:`~sqlalchemy.engine.base.Connectable` assigned to the ``.bind``
+ property, if not supplied. If the DDL has a conditional ``on``
+ criteria, it will be invoked with None as the event.
- bind
- Optional, an ``Engine`` or ``Connection``. If not supplied, a
- valid :class:`~sqlalchemy.engine.base.Connectable` must be present in the ``.bind`` property.
+ :param bind:
+ Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
+ :class:`~sqlalchemy.engine.base.Connectable` must be present in the
+ ``.bind`` property.
- target
+ :param target:
Optional, defaults to None. The target SchemaItem for the
execute call. Will be passed to the ``on`` callable if any,
and may also provide string expansion data for the
statement. See ``execute_at`` for more information.
+
"""
if bind is None:
if self._should_execute(None, target, bind):
return bind.execute(self.against(target))
else:
- bind.engine.logger.info("DDL execution skipped, criteria not met.")
+ bind.engine.logger.info(
+ "DDL execution skipped, criteria not met.")
def execute_at(self, event, target):
"""Link execution of this DDL to the DDL lifecycle of a SchemaItem.
- Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance, executing
- it when that schema item is created or dropped. The DDL statement
- will be executed using the same Connection and transactional context
- as the Table create/drop itself. The ``.bind`` property of this
- statement is ignored.
+ Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
+ executing it when that schema item is created or dropped. The DDL
+ statement will be executed using the same Connection and transactional
+ context as the Table create/drop itself. The ``.bind`` property of
+ this statement is ignored.
- event
+ :param event:
One of the events defined in the schema item's ``.ddl_events``;
e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
- target
+ :param target:
The Table or MetaData instance for which this DDLElement will
be associated with.
A DDLElement instance can be linked to any number of schema items.
``execute_at`` builds on the ``append_ddl_listener`` interface of
- MetaDta and Table objects.
+ :class:`MetaData` and :class:`Table` objects.
Caveat: Creating or dropping a Table in isolation will also trigger
any DDL set to ``execute_at`` that Table's MetaData. This may change
def _check_ddl_on(self, on):
if (on is not None and
- (not isinstance(on, (basestring, tuple, list, set)) and not util.callable(on))):
+ (not isinstance(on, (basestring, tuple, list, set)) and
+ not util.callable(on))):
raise exc.ArgumentError(
- "Expected the name of a database dialect, a tuple of names, or a callable for "
+ "Expected the name of a database dialect, a tuple "
+ "of names, or a callable for "
"'on' criteria, got type '%s'." % type(on).__name__)
def _should_execute(self, event, target, bind, **kw):
return s
def _compiler(self, dialect, **kw):
- """Return a compiler appropriate for this ClauseElement, given a Dialect."""
+ """Return a compiler appropriate for this ClauseElement, given a
+ Dialect."""
return dialect.ddl_compiler(dialect, self, **kw)
def __init__(self, statement, on=None, context=None, bind=None):
"""Create a DDL statement.
- statement
+ :param statement:
A string or unicode string to be executed. Statements will be
processed with Python's string formatting operator. See the
``context`` argument and the ``execute_at`` method.
SQL bind parameters are not available in DDL statements.
- on
+ :param on:
Optional filtering criteria. May be a string, tuple or a callable
predicate. If a string, it will be compared to the name of the
executing database dialect::
If a callable, it will be invoked with four positional arguments
as well as optional keyword arguments:
- ddl
+ :ddl:
This DDL element.
- event
+ :event:
The name of the event that has triggered this DDL, such as
'after-create' Will be None if the DDL is executed explicitly.
- target
+ :target:
The ``Table`` or ``MetaData`` object which is the target of
this event. May be None if the DDL is executed explicitly.
- connection
+ :connection:
The ``Connection`` being used for DDL execution
- \**kw
- Keyword arguments which may be sent include:
- tables - a list of Table objects which are to be created/
- dropped within a MetaData.create_all() or drop_all() method
- call.
+ :tables:
+ Optional keyword argument - a list of Table objects which are to
+ be created/ dropped within a MetaData.create_all() or drop_all()
+ method call.
+
If the callable returns a true value, the DDL statement will be
executed.
- context
+ :param context:
Optional dictionary, defaults to None. These values will be
available for use in string substitutions on the DDL statement.
- bind
- Optional. A :class:`~sqlalchemy.engine.base.Connectable`, used by default when ``execute()``
- is invoked without a bind argument.
+ :param bind:
+ Optional. A :class:`~sqlalchemy.engine.base.Connectable`, used by
+ default when ``execute()`` is invoked without a bind argument.
"""
return element
class _CreateDropBase(DDLElement):
- """Base class for DDL constucts that represent CREATE and DROP or equivalents.
+ """Base class for DDL constucts that represent CREATE and DROP or
+ equivalents.
The common theme of _CreateDropBase is a single
``element`` attribute which refers to the element
def __init__(self, element, *args, **kw):
super(AddConstraint, self).__init__(element, *args, **kw)
- element._create_rule = util.portable_instancemethod(self._create_rule_disable)
+ element._create_rule = util.portable_instancemethod(
+ self._create_rule_disable)
class DropConstraint(_CreateDropBase):
"""Represent an ALTER TABLE DROP CONSTRAINT statement."""
def __init__(self, element, cascade=False, **kw):
self.cascade = cascade
super(DropConstraint, self).__init__(element, **kw)
- element._create_rule = util.portable_instancemethod(self._create_rule_disable)
+ element._create_rule = util.portable_instancemethod(
+ self._create_rule_disable)
def _bind_or_error(schemaitem, msg=None):
bind = schemaitem.bind
bindable = "this %s's .metadata.bind" % name
if msg is None:
- msg = ('The %s is not bound to an Engine or Connection. '
- 'Execution can not proceed without a database to execute '
- 'against. Either execute with an explicit connection or '
- 'assign %s to enable implicit execution.') % (item, bindable)
+ msg = "The %s is not bound to an Engine or Connection. "\
+ "Execution can not proceed without a database to execute "\
+ "against. Either execute with an explicit connection or "\
+ "assign %s to enable implicit execution." % \
+ (item, bindable)
raise exc.UnboundExecutionError(msg)
return bind