RETAIN_SCHEMA = util.symbol('retain_schema')
+
class SchemaItem(events.SchemaEventTarget, visitors.Visitable):
"""Base class for items that define a database schema."""
"""
return {}
+
def _get_table_key(name, schema):
if schema is None:
return name
else:
return schema + "." + name
+
def _validate_dialect_kwargs(kwargs, name):
# validate remaining kwargs that they all specify DB prefixes
if len([k for k in kwargs
inspection._self_inspects(SchemaItem)
+
class Table(SchemaItem, expression.TableClause):
"""Represent a table in a database.
Column('value', String(50))
)
- The :class:`.Table` object constructs a unique instance of itself based on its
- name and optional schema name within the given :class:`.MetaData` object.
- Calling the :class:`.Table`
+ The :class:`.Table` object constructs a unique instance of itself based
+ on its name and optional schema name within the given
+ :class:`.MetaData` object. Calling the :class:`.Table`
constructor with the same name and same :class:`.MetaData` argument
a second time will return the *same* :class:`.Table` object - in this way
the :class:`.Table` constructor acts as a registry function.
or Connection instance to be used for the table reflection. If
``None``, the underlying MetaData's bound connectable will be used.
- :param extend_existing: When ``True``, indicates that if this :class:`.Table` is already
- present in the given :class:`.MetaData`, apply further arguments within
- the constructor to the existing :class:`.Table`.
+ :param extend_existing: When ``True``, indicates that if this
+ :class:`.Table` is already present in the given :class:`.MetaData`,
+ apply further arguments within the constructor to the existing
+ :class:`.Table`.
If ``extend_existing`` or ``keep_existing`` are not set, an error is
raised if additional table modifiers are specified when
keep_existing = kw.pop('keep_existing', False)
extend_existing = kw.pop('extend_existing', False)
if 'useexisting' in kw:
- util.warn_deprecated("useexisting is deprecated. Use extend_existing.")
+ msg = "useexisting is deprecated. Use extend_existing."
+ util.warn_deprecated(msg)
if extend_existing:
- raise exc.ArgumentError("useexisting is synonymous "
- "with extend_existing.")
+ msg = "useexisting is synonymous with extend_existing."
+ raise exc.ArgumentError(msg)
extend_existing = kw.pop('useexisting', False)
if keep_existing and extend_existing:
- raise exc.ArgumentError("keep_existing and extend_existing "
- "are mutually exclusive.")
+ msg = "keep_existing and extend_existing are mutually exclusive."
+ raise exc.ArgumentError(msg)
mustexist = kw.pop('mustexist', False)
key = _get_table_key(name, schema)
self.schema = kwargs.pop('schema', None)
if self.schema is None:
self.schema = metadata.schema
- self.quote_schema = kwargs.pop('quote_schema', metadata.quote_schema)
+ self.quote_schema = kwargs.pop(
+ 'quote_schema', metadata.quote_schema)
else:
self.quote_schema = kwargs.pop('quote_schema', None)
# allow user-overrides
self._init_items(*args)
- def _autoload(self, metadata, autoload_with, include_columns, exclude_columns=()):
+ def _autoload(self, metadata, autoload_with, include_columns,
+ exclude_columns=()):
if self.primary_key.columns:
PrimaryKeyConstraint(*[
c for c in self.primary_key.columns
@property
def _sorted_constraints(self):
- """Return the set of constraints as a list, sorted by creation order."""
+ """Return the set of constraints as a list, sorted by creation
+ order.
- return sorted(self.constraints, key=lambda c:c._creation_order)
+ """
+ return sorted(self.constraints, key=lambda c: c._creation_order)
def _init_existing(self, *args, **kwargs):
autoload = kwargs.pop('autoload', False)
if include_columns is not None:
for c in self.c:
if c.name not in include_columns:
- self._columns.remove(c)
+ self._columns.remove(c)
for key in ('quote', 'quote_schema'):
if key in kwargs:
exclude_columns = [c.name for c in self.c]
else:
exclude_columns = ()
- self._autoload(self.metadata, autoload_with, include_columns, exclude_columns)
+ self._autoload(
+ self.metadata, autoload_with, include_columns, exclude_columns)
self._extra_kwargs(**kwargs)
self._init_items(*args)
column._set_parent_with_dispatch(self)
def append_constraint(self, constraint):
- """Append a :class:`~.schema.Constraint` to this :class:`~.schema.Table`.
+ """Append a :class:`~.schema.Constraint` to this
+ :class:`~.schema.Table`.
This has the effect of the constraint being included in any
future CREATE TABLE statement, assuming specific DDL creation
- events have not been associated with the given :class:`~.schema.Constraint`
- object.
+ events have not been associated with the given
+ :class:`~.schema.Constraint` object.
Note that this does **not** produce the constraint within the
relational database automatically, for a table that already exists
in the database. To add a constraint to an
existing relational database table, the SQL ALTER command must
- be used. SQLAlchemy also provides the :class:`.AddConstraint` construct
- which can produce this SQL when invoked as an executable clause.
+ be used. SQLAlchemy also provides the
+ :class:`.AddConstraint` construct which can produce this SQL when
+ invoked as an executable clause.
"""
self,
checkfirst=checkfirst)
-
def drop(self, bind=None, checkfirst=False):
"""Issue a ``DROP`` statement for this
:class:`.Table`, using the given :class:`.Connectable`
self,
checkfirst=checkfirst)
-
def tometadata(self, metadata, schema=RETAIN_SCHEMA):
"""Return a copy of this :class:`.Table` associated with a different
:class:`.MetaData`.
table.dispatch._update(self.dispatch)
return table
+
class Column(SchemaItem, expression.ColumnClause):
"""Represents a column in a database table."""
.. versionchanged:: 0.7.4
``autoincrement`` accepts a special value ``'ignore_fk'``
- to indicate that autoincrementing status regardless of foreign key
- references. This applies to certain composite foreign key
+ to indicate that autoincrementing status regardless of foreign
+ key references. This applies to certain composite foreign key
setups, such as the one demonstrated in the ORM documentation
at :ref:`post_update`.
``True`` in which case the rendered name of the column is used.
.. versionadded:: 0.7.4
- Note that if the schema name is not included, and the underlying
- :class:`.MetaData` has a "schema", that value will be used.
+ Note that if the schema name is not included, and the
+ underlying :class:`.MetaData` has a "schema", that value will
+ be used.
:param name: Optional string. An in-database name for the key if
`constraint` is not provided.
# markers.
self.constraint = _constraint
-
self.use_alter = use_alter
self.name = name
self.onupdate = onupdate
return fk
def _get_colspec(self, schema=None):
- """Return a string based 'column specification' for this :class:`.ForeignKey`.
+ """Return a string based 'column specification' for this
+ :class:`.ForeignKey`.
This is usually the equivalent of the string-based "tablename.colname"
argument first passed to the object's constructor.
target_fullname = property(_get_colspec)
def references(self, table):
- """Return True if the given :class:`.Table` is referenced by this :class:`.ForeignKey`."""
+ """Return True if the given :class:`.Table` is referenced by this
+ :class:`.ForeignKey`."""
return table.corresponding_column(self.column) is not None
@util.memoized_property
def column(self):
- """Return the target :class:`.Column` referenced by this :class:`.ForeignKey`.
+ """Return the target :class:`.Column` referenced by this
+ :class:`.ForeignKey`.
If this :class:`.ForeignKey` was created using a
string-based target column specification, this
self.constraint._set_parent_with_dispatch(table)
table.foreign_keys.add(self)
+
class _NotAColumnExpr(object):
def _not_a_column_expr(self):
raise exc.InvalidRequestError(
__clause_element__ = self_group = lambda self: self._not_a_column_expr()
_from_objects = property(lambda self: self._not_a_column_expr())
+
class DefaultGenerator(_NotAColumnExpr, SchemaItem):
"""Base class for column *default* values."""
def __repr__(self):
return "ColumnDefault(%r)" % self.arg
+
class Sequence(DefaultGenerator):
"""Represents a named database sequence.
:param metadata: optional :class:`.MetaData` object which will be
associated with this :class:`.Sequence`. A :class:`.Sequence`
that is associated with a :class:`.MetaData` gains access to the
- ``bind`` of that :class:`.MetaData`, meaning the :meth:`.Sequence.create`
- and :meth:`.Sequence.drop` methods will make usage of that engine
- automatically.
+ ``bind`` of that :class:`.MetaData`, meaning the
+ :meth:`.Sequence.create` and :meth:`.Sequence.drop` methods will
+ make usage of that engine automatically.
.. versionchanged:: 0.7
Additionally, the appropriate CREATE SEQUENCE/
inspection._self_inspects(FetchedValue)
+
class DefaultClause(FetchedValue):
"""A DDL-specified DEFAULT column value.
return "DefaultClause(%r, for_update=%r)" % \
(self.arg, self.for_update)
+
class PassiveDefault(DefaultClause):
"""A DDL-specified DEFAULT column value.
def __init__(self, *arg, **kw):
DefaultClause.__init__(self, *arg, **kw)
+
class Constraint(SchemaItem):
"""A table-level SQL constraint."""
def copy(self, **kw):
raise NotImplementedError()
+
class ColumnCollectionMixin(object):
def __init__(self, *columns):
self.columns = expression.ColumnCollection()
col = table.c[col]
self.columns.add(col)
+
class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
"""A constraint that proxies a ColumnCollection."""
self._set_parent_with_dispatch(
tables.pop())
-
def __visit_name__(self):
if isinstance(self.parent, Table):
return "check_constraint"
c.dispatch._update(self.dispatch)
return c
+
class ForeignKeyConstraint(Constraint):
"""A table-level FOREIGN KEY constraint.
columns[0].table is not None:
self._set_parent_with_dispatch(columns[0].table)
-
@property
def _col_description(self):
return ", ".join(self._elements)
col = table.c[col]
except KeyError:
raise exc.ArgumentError(
- "Can't create ForeignKeyConstraint "
- "on table '%s': no column "
- "named '%s' is present." % (table.description, col))
+ "Can't create ForeignKeyConstraint "
+ "on table '%s': no column "
+ "named '%s' is present." % (table.description, col))
if not hasattr(fk, 'parent') or \
fk.parent is not col:
return table in set(kw['tables']) and \
bind.dialect.supports_alter
- event.listen(table.metadata, "after_create", AddConstraint(self, on=supports_alter))
- event.listen(table.metadata, "before_drop", DropConstraint(self, on=supports_alter))
-
+ event.listen(table.metadata, "after_create",
+ AddConstraint(self, on=supports_alter))
+ event.listen(table.metadata, "before_drop",
+ DropConstraint(self, on=supports_alter))
def copy(self, **kw):
fkc = ForeignKeyConstraint(
fkc.dispatch._update(self.dispatch)
return fkc
+
class PrimaryKeyConstraint(ColumnCollectionConstraint):
"""A table-level PRIMARY KEY constraint.
def _replace(self, col):
self.columns.replace(col)
+
class UniqueConstraint(ColumnCollectionConstraint):
"""A table-level UNIQUE constraint.
__visit_name__ = 'unique_constraint'
+
class Index(ColumnCollectionMixin, SchemaItem):
"""A table-level INDEX.
(self.unique and ["unique=True"] or [])
))
+
class MetaData(SchemaItem):
- """A collection of :class:`.Table` objects and their associated schema constructs.
+ """A collection of :class:`.Table` objects and their associated schema
+ constructs.
Holds a collection of :class:`.Table` objects as well as
an optional binding to an :class:`.Engine` or
in the collection and their columns may participate in implicit SQL
execution.
- The :class:`.Table` objects themselves are stored in the ``metadata.tables``
- dictionary.
+ The :class:`.Table` objects themselves are stored in the
+ ``metadata.tables`` dictionary.
The ``bind`` property may be assigned to dynamically. A common pattern is
to start unbound and then bind later when an engine is available::
__visit_name__ = 'metadata'
- def __init__(self, bind=None, reflect=False, schema=None, quote_schema=None):
+ def __init__(self, bind=None, reflect=False, schema=None,
+ quote_schema=None):
"""Create a new MetaData object.
:param bind:
Please use the :meth:`.MetaData.reflect` method.
:param schema:
- The default schema to use for the :class:`.Table`, :class:`.Sequence`, and other
- objects associated with this :class:`.MetaData`.
- Defaults to ``None``.
+ The default schema to use for the :class:`.Table`,
+ :class:`.Sequence`, and other objects associated with this
+ :class:`.MetaData`. Defaults to ``None``.
:param quote_schema:
- Sets the ``quote_schema`` flag for those :class:`.Table`, :class:`.Sequence`,
- and other objects which make usage of the local ``schema`` name.
+ Sets the ``quote_schema`` flag for those :class:`.Table`,
+ :class:`.Sequence`, and other objects which make usage of the
+ local ``schema`` name.
.. versionadded:: 0.7.4
``schema`` and ``quote_schema`` parameters.
if t.schema is not None])
def __getstate__(self):
- return {'tables': self.tables, 'schema':self.schema,
- 'quote_schema':self.quote_schema,
- 'schemas':self._schemas,
- 'sequences':self._sequences}
+ return {'tables': self.tables,
+ 'schema': self.schema,
+ 'quote_schema': self.quote_schema,
+ 'schemas': self._schemas,
+ 'sequences': self._sequences}
def __setstate__(self, state):
self.tables = state['tables']
checkfirst=checkfirst,
tables=tables)
+
class ThreadLocalMetaData(MetaData):
"""A MetaData variant that presents a different ``bind`` in every thread.
if hasattr(e, 'dispose'):
e.dispose()
+
class SchemaVisitor(visitors.ClauseVisitor):
"""Define the visiting for ``SchemaItem`` objects."""
return dialect.ddl_compiler(dialect, self, **kw)
+
class DDLElement(expression.Executable, _DDLCompiles):
"""Base class for DDL expression constructs.
"""
_execution_options = expression.Executable.\
- _execution_options.union({'autocommit':True})
+ _execution_options.union({'autocommit': True})
target = None
on = None
This DDL element.
:target:
- The :class:`.Table` or :class:`.MetaData` object which is the target of
- this event. May be None if the DDL is executed explicitly.
+ The :class:`.Table` or :class:`.MetaData` object which is the
+ target of this event. May be None if the DDL is executed
+ explicitly.
:bind:
The :class:`.Connection` being used for DDL execution
def bind(self):
if self._bind:
return self._bind
+
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
Specifies literal SQL DDL to be executed by the database. DDL objects
function as DDL event listeners, and can be subscribed to those events
- listed in :class:`.DDLEvents`, using either :class:`.Table` or :class:`.MetaData`
- objects as targets. Basic templating support allows a single DDL instance
- to handle repetitive tasks for multiple tables.
+ listed in :class:`.DDLEvents`, using either :class:`.Table` or
+ :class:`.MetaData` objects as targets. Basic templating support allows
+ a single DDL instance to handle repetitive tasks for multiple tables.
Examples::
self.on = on
self._bind = bind
-
def __repr__(self):
return '<%s@%s; %s>' % (
type(self).__name__, id(self),
for key in ('on', 'context')
if getattr(self, key)]))
+
def _to_schema_column(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, Column):
- raise exc.ArgumentError("schema.Column object expected")
- return element
+ if hasattr(element, '__clause_element__'):
+ element = element.__clause_element__()
+ if not isinstance(element, Column):
+ raise exc.ArgumentError("schema.Column object expected")
+ return element
+
def _to_schema_column_or_string(element):
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
if not isinstance(element, (basestring, expression.ColumnElement)):
- raise exc.ArgumentError("Element %r is not a string name or column element" % element)
+ msg = "Element %r is not a string name or column element"
+ raise exc.ArgumentError(msg % element)
return element
+
class _CreateDropBase(DDLElement):
"""Base class for DDL constucts that represent CREATE and DROP or
equivalents.
"""
return False
+
class CreateSchema(_CreateDropBase):
"""Represent a CREATE SCHEMA statement.
self.quote = quote
super(CreateSchema, self).__init__(name, **kw)
+
class DropSchema(_CreateDropBase):
"""Represent a DROP SCHEMA statement.
"""Create a new :class:`.DropSchema` construct."""
self.quote = quote
- self.cascade=cascade
+ self.cascade = cascade
super(DropSchema, self).__init__(name, **kw)
"""
__visit_name__ = "drop_view"
+
class CreateColumn(_DDLCompiles):
"""Represent a :class:`.Column` as rendered in a CREATE TABLE statement,
via the :class:`.CreateTable` construct.
def __init__(self, element):
self.element = element
+
class DropTable(_CreateDropBase):
"""Represent a DROP TABLE statement."""
__visit_name__ = "drop_table"
+
class CreateSequence(_CreateDropBase):
"""Represent a CREATE SEQUENCE statement."""
__visit_name__ = "create_sequence"
+
class DropSequence(_CreateDropBase):
"""Represent a DROP SEQUENCE statement."""
__visit_name__ = "drop_sequence"
+
class CreateIndex(_CreateDropBase):
"""Represent a CREATE INDEX statement."""
__visit_name__ = "create_index"
+
class DropIndex(_CreateDropBase):
"""Represent a DROP INDEX statement."""
__visit_name__ = "drop_index"
+
class AddConstraint(_CreateDropBase):
"""Represent an ALTER TABLE ADD CONSTRAINT statement."""
element._create_rule = util.portable_instancemethod(
self._create_rule_disable)
+
class DropConstraint(_CreateDropBase):
"""Represent an ALTER TABLE DROP CONSTRAINT statement."""
element._create_rule = util.portable_instancemethod(
self._create_rule_disable)
+
def _bind_or_error(schemaitem, msg=None):
bind = schemaitem.bind
if not bind:
(item, bindable)
raise exc.UnboundExecutionError(msg)
return bind
-
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""defines genericized SQL types, each represented by a subclass of
-:class:`~sqlalchemy.types.AbstractType`. Dialects define further subclasses of these
-types.
+:class:`~sqlalchemy.types.AbstractType`. Dialects define further subclasses
+of these types.
For more information see the SQLAlchemy documentation on types.
__all__ = ['TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR', 'TEXT', 'Text',
'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
- 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT', 'SMALLINT',
- 'INTEGER', 'DATE', 'TIME', 'String', 'Integer', 'SmallInteger',
- 'BigInteger', 'Numeric', 'Float', 'DateTime', 'Date', 'Time',
- 'LargeBinary', 'Binary', 'Boolean', 'Unicode', 'Concatenable',
- 'UnicodeText', 'PickleType', 'Interval', 'Enum']
+ 'CLOB', 'BLOB', 'BINARY', 'VARBINARY', 'BOOLEAN', 'BIGINT',
+ 'SMALLINT', 'INTEGER', 'DATE', 'TIME', 'String', 'Integer',
+ 'SmallInteger', 'BigInteger', 'Numeric', 'Float', 'DateTime',
+ 'Date', 'Time', 'LargeBinary', 'Binary', 'Boolean', 'Unicode',
+ 'Concatenable', 'UnicodeText', 'PickleType', 'Interval', 'Enum']
import datetime as dt
import codecs
if util.jython:
import array
+
class AbstractType(Visitable):
"""Base for all types - not needed except for backwards
compatibility."""
+
class TypeEngine(AbstractType):
"""Base for built-in types."""
parameter within the statement. It is used for special data types
that require literals being wrapped in some special database function
in order to coerce an application-level value into a database-specific
- format. It is the SQL analogue of the :meth:`.TypeEngine.bind_processor`
- method.
+ format. It is the SQL analogue of the
+ :meth:`.TypeEngine.bind_processor` method.
The method is evaluated at statement compile time, as opposed
to statement construction time.
The construction of :meth:`.TypeEngine.with_variant` is always
from the "fallback" type to that which is dialect specific.
The returned type is an instance of :class:`.Variant`, which
- itself provides a :meth:`~sqlalchemy.types.Variant.with_variant` that can
- be called repeatedly.
+ itself provides a :meth:`~sqlalchemy.types.Variant.with_variant`
+ that can be called repeatedly.
:param type_: a :class:`.TypeEngine` that will be selected
as a variant from the originating type, when a dialect
return self.__class__
def dialect_impl(self, dialect):
- """Return a dialect-specific implementation for this :class:`.TypeEngine`."""
+ """Return a dialect-specific implementation for this
+ :class:`.TypeEngine`.
+ """
try:
return dialect._type_memos[self]['impl']
except KeyError:
def __repr__(self):
return util.generic_repr(self)
+
def _reconstitute_comparator(expression):
return expression.comparator
+
class UserDefinedType(TypeEngine):
"""Base for user defined types.
Default behavior for :class:`.UserDefinedType` is the
same as that of :class:`.TypeDecorator`; by default it returns
``self``, assuming the compared value should be coerced into
- the same type as this one. See :meth:`.TypeDecorator.coerce_compared_value`
- for more detail.
+ the same type as this one. See
+ :meth:`.TypeDecorator.coerce_compared_value` for more detail.
.. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value`
now returns ``self`` by default, rather than falling onto the
- more fundamental behavior of :meth:`.TypeEngine.coerce_compared_value`.
+ more fundamental behavior of
+ :meth:`.TypeEngine.coerce_compared_value`.
"""
the :class:`.TypeEngine` type represented by ``self.impl``.
Makes usage of :meth:`dialect_impl` but also traverses
into wrapped :class:`.TypeDecorator` instances.
- Behavior can be customized here by overriding :meth:`load_dialect_impl`.
+ Behavior can be customized here by overriding
+ :meth:`load_dialect_impl`.
"""
adapted = dialect.type_descriptor(self)
return self.impl.bind_processor(dialect)
def result_processor(self, dialect, coltype):
- """Provide a result value processing function for the given :class:`.Dialect`.
+ """Provide a result value processing function for the given
+ :class:`.Dialect`.
This is the method that fulfills the :class:`.TypeEngine`
contract for result value conversion. :class:`.TypeDecorator`
return instance
def get_dbapi_type(self, dbapi):
- """Return the DBAPI type object represented by this :class:`.TypeDecorator`.
+ """Return the DBAPI type object represented by this
+ :class:`.TypeDecorator`.
By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
underlying "impl".
"""Construct a new :class:`.Variant`.
:param base: the base 'fallback' type
- :param mapping: dictionary of string dialect names to :class:`.TypeEngine`
- instances.
+ :param mapping: dictionary of string dialect names to
+ :class:`.TypeEngine` instances.
"""
self.impl = base
else:
return typeobj
+
def adapt_type(typeobj, colspecs):
if isinstance(typeobj, type):
typeobj = typeobj()
NullTypeEngine = NullType
+
class Concatenable(object):
"""A mixin that marks a type as supporting 'concatenation',
typically strings."""
class Comparator(TypeEngine.Comparator):
_blank_dict = util.immutabledict()
+
def _adapt_expression(self, op, other_comparator):
othertype = other_comparator.type._type_affinity
return op, \
get(othertype, NULLTYPE)
comparator_factory = Comparator
+
class String(Concatenable, TypeEngine):
"""The base for all string and character types.
else:
encoder = codecs.getencoder(dialect.encoding)
warn_on_bytestring = self._warn_on_bytestring
+
def process(value):
if isinstance(value, unicode):
return encoder(value, self.unicode_error)[0]
def get_dbapi_type(self, dbapi):
return dbapi.STRING
+
class Text(String):
"""A variably sized string type.
"""
__visit_name__ = 'text'
+
class Unicode(String):
"""A variable length Unicode string type.
kwargs.setdefault('_warn_on_bytestring', True)
super(Unicode, self).__init__(length=length, **kwargs)
+
class UnicodeText(Text):
"""An unbounded-length Unicode string type.
# TODO: need a dictionary object that will
# handle operators generically here, this is incomplete
return {
- operators.add:{
- Date:Date,
- Integer:self.__class__,
- Numeric:Numeric,
+ operators.add: {
+ Date: Date,
+ Integer: self.__class__,
+ Numeric: Numeric,
},
- operators.mul:{
- Interval:Interval,
- Integer:self.__class__,
- Numeric:Numeric,
+ operators.mul: {
+ Interval: Interval,
+ Integer: self.__class__,
+ Numeric: Numeric,
},
# Py2K
- operators.div:{
- Integer:self.__class__,
- Numeric:Numeric,
+ operators.div: {
+ Integer: self.__class__,
+ Numeric: Numeric,
},
# end Py2K
- operators.truediv:{
- Integer:self.__class__,
- Numeric:Numeric,
+ operators.truediv: {
+ Integer: self.__class__,
+ Numeric: Numeric,
},
- operators.sub:{
- Integer:self.__class__,
- Numeric:Numeric,
+ operators.sub: {
+ Integer: self.__class__,
+ Numeric: Numeric,
},
}
+
class SmallInteger(Integer):
"""A type for smaller ``int`` integers.
@util.memoized_property
def _expression_adaptations(self):
return {
- operators.mul:{
- Interval:Interval,
- Numeric:self.__class__,
- Integer:self.__class__,
+ operators.mul: {
+ Interval: Interval,
+ Numeric: self.__class__,
+ Integer: self.__class__,
},
# Py2K
- operators.div:{
- Numeric:self.__class__,
- Integer:self.__class__,
+ operators.div: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
},
# end Py2K
- operators.truediv:{
- Numeric:self.__class__,
- Integer:self.__class__,
+ operators.truediv: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
},
- operators.add:{
- Numeric:self.__class__,
- Integer:self.__class__,
+ operators.add: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
},
- operators.sub:{
- Numeric:self.__class__,
- Integer:self.__class__,
+ operators.sub: {
+ Numeric: self.__class__,
+ Integer: self.__class__,
}
}
+
class Float(Numeric):
"""A type for ``float`` numbers.
:param \**kwargs: deprecated. Additional arguments here are ignored
by the default :class:`.Float` type. For database specific
floats that support additional arguments, see that dialect's
- documentation for details, such as :class:`sqlalchemy.dialects.mysql.FLOAT`.
+ documentation for details, such as
+ :class:`sqlalchemy.dialects.mysql.FLOAT`.
"""
self.precision = precision
@util.memoized_property
def _expression_adaptations(self):
return {
- operators.mul:{
- Interval:Interval,
- Numeric:self.__class__,
+ operators.mul: {
+ Interval: Interval,
+ Numeric: self.__class__,
},
# Py2K
- operators.div:{
- Numeric:self.__class__,
+ operators.div: {
+ Numeric: self.__class__,
},
# end Py2K
- operators.truediv:{
- Numeric:self.__class__,
+ operators.truediv: {
+ Numeric: self.__class__,
},
- operators.add:{
- Numeric:self.__class__,
+ operators.add: {
+ Numeric: self.__class__,
},
- operators.sub:{
- Numeric:self.__class__,
+ operators.sub: {
+ Numeric: self.__class__,
}
}
@util.memoized_property
def _expression_adaptations(self):
return {
- operators.add:{
- Interval:self.__class__,
+ operators.add: {
+ Interval: self.__class__,
},
- operators.sub:{
- Interval:self.__class__,
- DateTime:Interval,
+ operators.sub: {
+ Interval: self.__class__,
+ DateTime: Interval,
},
}
-class Date(_DateAffinity,TypeEngine):
+class Date(_DateAffinity, TypeEngine):
"""A type for ``datetime.date()`` objects."""
__visit_name__ = 'date'
@util.memoized_property
def _expression_adaptations(self):
return {
- operators.add:{
- Integer:self.__class__,
- Interval:DateTime,
- Time:DateTime,
+ operators.add: {
+ Integer: self.__class__,
+ Interval: DateTime,
+ Time: DateTime,
},
- operators.sub:{
+ operators.sub: {
# date - integer = date
- Integer:self.__class__,
+ Integer: self.__class__,
# date - date = integer.
- Date:Integer,
+ Date: Integer,
- Interval:DateTime,
+ Interval: DateTime,
# date - datetime = interval,
# this one is not in the PG docs
# but works
- DateTime:Interval,
+ DateTime: Interval,
},
}
-class Time(_DateAffinity,TypeEngine):
+class Time(_DateAffinity, TypeEngine):
"""A type for ``datetime.time()`` objects."""
__visit_name__ = 'time'
@util.memoized_property
def _expression_adaptations(self):
return {
- operators.add:{
- Date:DateTime,
- Interval:self.__class__
+ operators.add: {
+ Date: DateTime,
+ Interval: self.__class__
},
- operators.sub:{
- Time:Interval,
- Interval:self.__class__,
+ operators.sub: {
+ Time: Interval,
+ Interval: self.__class__,
},
}
# here, though pg8000 does to indicate "bytea"
def bind_processor(self, dialect):
DBAPIBinary = dialect.dbapi.Binary
+
def process(value):
x = self
if value is not None:
def get_dbapi_type(self, dbapi):
return dbapi.BINARY
+
class LargeBinary(_Binary):
"""A type for large binary byte data.
"""
_Binary.__init__(self, length=length)
+
class Binary(LargeBinary):
"""Deprecated. Renamed to LargeBinary."""
'LargeBinary.')
LargeBinary.__init__(self, *arg, **kw)
+
class SchemaType(events.SchemaEventTarget):
"""Mark a type as possibly requiring schema-level DDL for usage.
constraints, triggers, and other rules.
:class:`.SchemaType` classes can also be targets for the
- :meth:`.DDLEvents.before_parent_attach` and :meth:`.DDLEvents.after_parent_attach`
- events, where the events fire off surrounding the association of
- the type object with a parent :class:`.Column`.
+ :meth:`.DDLEvents.before_parent_attach` and
+ :meth:`.DDLEvents.after_parent_attach` events, where the events fire off
+ surrounding the association of the type object with a parent
+ :class:`.Column`.
"""
if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
t._on_metadata_drop(target, bind, **kw)
+
class Enum(String, SchemaType):
"""Generic Enum Type.
if self.native_enum:
SchemaType._set_table(self, column, table)
-
e = schema.CheckConstraint(
column.in_(self.enums),
name=self.name,
else:
return super(Enum, self).adapt(impltype, **kw)
+
class PickleType(TypeDecorator):
"""Holds Python objects, which are serialized using pickle.
else:
return processors.int_to_boolean
+
class Interval(_DateAffinity, TypeDecorator):
"""A type for ``datetime.timedelta()`` objects.
@util.memoized_property
def _expression_adaptations(self):
return {
- operators.add:{
- Date:DateTime,
- Interval:self.__class__,
- DateTime:DateTime,
- Time:Time,
+ operators.add: {
+ Date: DateTime,
+ Interval: self.__class__,
+ DateTime: DateTime,
+ Time: Time,
},
- operators.sub:{
- Interval:self.__class__
+ operators.sub: {
+ Interval: self.__class__
},
- operators.mul:{
- Numeric:self.__class__
+ operators.mul: {
+ Numeric: self.__class__
},
operators.truediv: {
- Numeric:self.__class__
+ Numeric: self.__class__
},
# Py2K
operators.div: {
- Numeric:self.__class__
+ Numeric: self.__class__
}
# end Py2K
}
__visit_name__ = 'REAL'
+
class FLOAT(Float):
"""The SQL FLOAT type."""
__visit_name__ = 'FLOAT'
+
class NUMERIC(Numeric):
"""The SQL NUMERIC type."""
__visit_name__ = 'BIGINT'
+
class TIMESTAMP(DateTime):
"""The SQL TIMESTAMP type."""
def get_dbapi_type(self, dbapi):
return dbapi.TIMESTAMP
+
class DATETIME(DateTime):
"""The SQL DATETIME type."""
__visit_name__ = 'TIME'
+
class TEXT(Text):
"""The SQL TEXT type."""
__visit_name__ = 'TEXT'
+
class CLOB(Text):
"""The CLOB type.
__visit_name__ = 'CLOB'
+
class VARCHAR(String):
"""The SQL VARCHAR type."""
__visit_name__ = 'VARCHAR'
+
class NVARCHAR(Unicode):
"""The SQL NVARCHAR type."""
__visit_name__ = 'NVARCHAR'
+
class CHAR(String):
"""The SQL CHAR type."""
__visit_name__ = 'BLOB'
+
class BINARY(_Binary):
"""The SQL BINARY type."""
__visit_name__ = 'BINARY'
+
class VARBINARY(_Binary):
"""The SQL VARBINARY type."""
_type_map = {
str: String(),
# Py3K
- #bytes : LargeBinary(),
+ #bytes: LargeBinary(),
# Py2K
- unicode : Unicode(),
+ unicode: Unicode(),
# end Py2K
- int : Integer(),
- float : Numeric(),
+ int: Integer(),
+ float: Numeric(),
bool: BOOLEANTYPE,
- decimal.Decimal : Numeric(),
- dt.date : Date(),
- dt.datetime : DateTime(),
- dt.time : Time(),
- dt.timedelta : Interval(),
+ decimal.Decimal: Numeric(),
+ dt.date: Date(),
+ dt.datetime: DateTime(),
+ dt.time: Time(),
+ dt.timedelta: Interval(),
NoneType: NULLTYPE
}
-