--- /dev/null
+.. change::
+ :tags: bug, sql
+ :tickets: 4088
+
+ Internal refinements to the :class:`.Enum`, :class:`.Interval`, and
+ :class:`.Boolean` types, which now extend a common mixin
+ :class:`.Emulated` that indicates a type that provides Python-side
+ emulation of a DB native type, switching out to the DB native type when a
+ supporting backend is in use. The Postgresql :class:`.INTERVAL` type
+ when used directly will now include the correct type coercion rules for
+ SQL expressions that also take effect for :class:`.sqltypes.Interval`
+ (such as adding a date to an interval yields a datetime).
from .types import _StringType
from ... import exc, sql, util
-from ... import types as sqltypes
+from ...sql import sqltypes
class _EnumeratedValues(_StringType):
return strip_values
-class ENUM(sqltypes.Enum, _EnumeratedValues):
+class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _EnumeratedValues):
"""MySQL ENUM type."""
__visit_name__ = 'ENUM'
+ native_enum = True
+
def __init__(self, *enums, **kw):
"""Construct an ENUM.
"""
kw.pop('strict', None)
- validate_strings = kw.pop("validate_strings", False)
- sqltypes.Enum.__init__(
- self, validate_strings=validate_strings, *enums)
- kw.pop('metadata', None)
- kw.pop('schema', None)
- kw.pop('name', None)
- kw.pop('quote', None)
- kw.pop('native_enum', None)
- kw.pop('inherit_schema', None)
- kw.pop('_create_events', None)
+ self._enum_init(enums, kw)
_StringType.__init__(self, length=self.length, **kw)
+ @classmethod
+ def adapt_emulated_to_native(cls, impl, **kw):
+ """Produce a MySQL native :class:`.mysql.ENUM` from plain
+ :class:`.Enum`.
+
+ """
+ kw.setdefault("validate_strings", impl.validate_strings)
+ return cls(**kw)
+
def _setup_for_values(self, values, objects, kw):
values, length = self._init_values(values, kw)
- return sqltypes.Enum._setup_for_values(self, values, objects, kw)
+ return super(ENUM, self)._setup_for_values(values, objects, kw)
def _object_value_for_elem(self, elem):
# mysql sends back a blank string for any value that
return util.generic_repr(
self, to_inspect=[ENUM, _StringType, sqltypes.Enum])
- def adapt(self, cls, **kw):
- return sqltypes.Enum.adapt(self, cls, **kw)
-
class SET(_EnumeratedValues):
"""MySQL SET type."""
from ... import sql, schema, exc, util
from ...engine import default, reflection
from ...sql import compiler, expression
-from ... import types as sqltypes
+from ...sql import sqltypes
try:
from uuid import UUID as _python_UUID
self.precision = precision
-class INTERVAL(sqltypes.TypeEngine):
+class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval):
"""PostgreSQL INTERVAL type.
"""
__visit_name__ = 'INTERVAL'
+ native = True
def __init__(self, precision=None, fields=None):
"""Construct an INTERVAL.
self.fields = fields
@classmethod
- def _adapt_from_generic_interval(cls, interval):
+ def adapt_emulated_to_native(cls, interval, **kw):
return INTERVAL(precision=interval.second_precision)
@property
__visit_name__ = 'TSVECTOR'
-class ENUM(sqltypes.Enum):
+class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum):
"""PostgreSQL ENUM type.
"""
+ native_enum = True
+
def __init__(self, *enums, **kw):
"""Construct an :class:`~.postgresql.ENUM`.
self.create_type = kw.pop("create_type", True)
super(ENUM, self).__init__(*enums, **kw)
+ @classmethod
+ def adapt_emulated_to_native(cls, impl, **kw):
+ """Produce a Postgresql native :class:`.postgresql.ENUM` from plain
+ :class:`.Enum`.
+
+ """
+ kw.setdefault("validate_strings", impl.validate_strings)
+ kw.setdefault('name', impl.name)
+ kw.setdefault('schema', impl.schema)
+ kw.setdefault('inherit_schema', impl.inherit_schema)
+ kw.setdefault('metadata', impl.metadata)
+ kw.setdefault('_create_events', False)
+ return cls(**kw)
+
def create(self, bind=None, checkfirst=True):
"""Emit ``CREATE TYPE`` for this
:class:`~.postgresql.ENUM`.
class _PGEnum(ENUM):
def result_processor(self, dialect, coltype):
- if self.native_enum and util.py2k and self.convert_unicode is True:
+ if util.py2k and self.convert_unicode is True:
# we can't easily use PG's extensions here because
# the OID is on the fly, and we need to give it a python
# function anyway - not really worth it.
import json
from . import elements
-from .type_api import TypeEngine, TypeDecorator, to_instance, Variant
+from .type_api import TypeEngine, TypeDecorator, to_instance, Variant, \
+ Emulated, NativeForEmulated
from .elements import quoted_name, TypeCoerce as type_coerce, _defer_name, \
Slice, _literal_as_binds
from .. import exc, util, processors
return variant_mapping['_default'] is self
-class Enum(String, SchemaType):
-
+class Enum(Emulated, String, SchemaType):
"""Generic Enum Type.
The :class:`.Enum` type provides a set of possible string values
.. seealso::
- :class:`~.postgresql.ENUM` - PostgreSQL-specific type,
+ :class:`.postgresql.ENUM` - PostgreSQL-specific type,
which has additional functionality.
- """
+ :class:`.mysql.ENUM` - MySQL-specific type
+ """
__visit_name__ = 'enum'
def __init__(self, *enums, **kw):
.. versionadded:: 1.1.0b2
"""
+ self._enum_init(enums, kw)
+
+ @property
+ def _enums_argument(self):
+ if self.enum_class is not None:
+ return [self.enum_class]
+ else:
+ return self.enums
+
+ def _enum_init(self, enums, kw):
+ """internal init for :class:`.Enum` and subclasses.
+
+ friendly init helper used by subclasses to remove
+ all the Enum-specific keyword arguments from kw. Allows all
+ other arguments in kw to pass through.
+
+ """
+ self.native_enum = kw.pop('native_enum', True)
+ self.create_constraint = kw.pop('create_constraint', True)
values, objects = self._parse_into_values(enums, kw)
self._setup_for_values(values, objects, kw)
- self.native_enum = kw.pop('native_enum', True)
convert_unicode = kw.pop('convert_unicode', None)
- self.create_constraint = kw.pop('create_constraint', True)
self.validate_strings = kw.pop('validate_strings', False)
if convert_unicode is None:
length = 0
self._valid_lookup[None] = self._object_lookup[None] = None
- String.__init__(self,
- length=length,
- convert_unicode=convert_unicode,
- )
- SchemaType.__init__(self, **kw)
+ super(Enum, self).__init__(
+ length=length,
+ convert_unicode=convert_unicode,
+ )
+
+ if self.enum_class:
+ kw.setdefault('name', self.enum_class.__name__.lower())
+ SchemaType.__init__(
+ self,
+ name=kw.pop('name', None),
+ schema=kw.pop('schema', None),
+ metadata=kw.pop('metadata', None),
+ inherit_schema=kw.pop('inherit_schema', False),
+ quote=kw.pop('quote', None),
+ _create_events=kw.pop('_create_events', True)
+ )
def _parse_into_values(self, enums, kw):
+ if not enums and '_enums' in kw:
+ enums = kw.pop('_enums')
+
if len(enums) == 1 and hasattr(enums[0], '__members__'):
self.enum_class = enums[0]
values = list(self.enum_class.__members__)
objects = [self.enum_class.__members__[k] for k in values]
- kw.setdefault('name', self.enum_class.__name__.lower())
-
return values, objects
else:
self.enum_class = None
[(value, value) for value in self._valid_lookup.values()]
)
+ @property
+ def native(self):
+ return self.native_enum
+
def _db_value_for_elem(self, elem):
try:
return self._valid_lookup[elem]
'"%s" is not among the defined enum values' % elem)
def __repr__(self):
- return util.generic_repr(self,
- additional_kw=[('native_enum', True)],
- to_inspect=[Enum, SchemaType],
- )
+ return util.generic_repr(
+ self,
+ additional_kw=[('native_enum', True)],
+ to_inspect=[Enum, SchemaType],
+ )
+
+ def adapt_to_emulated(self, impltype, **kw):
+ kw.setdefault("convert_unicode", self.convert_unicode)
+ kw.setdefault("validate_strings", self.validate_strings)
+ kw.setdefault('name', self.name)
+ kw.setdefault('schema', self.schema)
+ kw.setdefault('inherit_schema', self.inherit_schema)
+ kw.setdefault('metadata', self.metadata)
+ kw.setdefault('_create_events', False)
+ kw.setdefault('native_enum', self.native_enum)
+ assert '_enums' in kw
+ return impltype(**kw)
+
+ def adapt(self, impltype, **kw):
+ kw['_enums'] = self._enums_argument
+ return super(Enum, self).adapt(impltype, **kw)
def _should_create_constraint(self, compiler, **kw):
if not self._is_impl_for_variant(compiler.dialect, kw):
@util.dependencies("sqlalchemy.sql.schema")
def _set_table(self, schema, column, table):
- if self.native_enum:
- SchemaType._set_table(self, column, table)
+ SchemaType._set_table(self, column, table)
if not self.create_constraint:
return
)
assert e.table is table
- def copy(self, **kw):
- return SchemaType.copy(self, **kw)
-
- def adapt(self, impltype, **kw):
- schema = kw.pop('schema', self.schema)
- metadata = kw.pop('metadata', self.metadata)
- _create_events = kw.pop('_create_events', False)
- if issubclass(impltype, Enum):
- if self.enum_class is not None:
- args = [self.enum_class]
- else:
- args = self.enums
- return impltype(name=self.name,
- schema=schema,
- metadata=metadata,
- convert_unicode=self.convert_unicode,
- native_enum=self.native_enum,
- inherit_schema=self.inherit_schema,
- validate_strings=self.validate_strings,
- _create_events=_create_events,
- *args,
- **kw)
- else:
- # TODO: why would we be here?
- return super(Enum, self).adapt(impltype, **kw)
-
def literal_processor(self, dialect):
- parent_processor = super(Enum, self).literal_processor(dialect)
+ parent_processor = super(
+ Enum, self).literal_processor(dialect)
def process(value):
value = self._db_value_for_elem(value)
return process
+ def copy(self, **kw):
+ return SchemaType.copy(self, **kw)
+
@property
def python_type(self):
if self.enum_class:
return x == y
-class Boolean(TypeEngine, SchemaType):
+class Boolean(Emulated, TypeEngine, SchemaType):
"""A bool datatype.
"""
__visit_name__ = 'boolean'
+ native = True
def __init__(
self, create_constraint=True, name=None, _create_events=True):
return processors.int_to_boolean
-class Interval(_LookupExpressionAdapter, TypeDecorator):
+class _AbstractInterval(_LookupExpressionAdapter, TypeEngine):
+ @util.memoized_property
+ def _expression_adaptations(self):
+ # Based on http://www.postgresql.org/docs/current/\
+ # static/functions-datetime.html.
+
+ return {
+ operators.add: {
+ Date: DateTime,
+ Interval: self.__class__,
+ DateTime: DateTime,
+ Time: Time,
+ },
+ operators.sub: {
+ Interval: self.__class__
+ },
+ operators.mul: {
+ Numeric: self.__class__
+ },
+ operators.truediv: {
+ Numeric: self.__class__
+ },
+ operators.div: {
+ Numeric: self.__class__
+ }
+ }
+
+ @property
+ def _type_affinity(self):
+ return Interval
+
+ def coerce_compared_value(self, op, value):
+ """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
+ return self.impl.coerce_compared_value(op, value)
+
+
+class Interval(Emulated, _AbstractInterval, TypeDecorator):
"""A type for ``datetime.timedelta()`` objects.
self.second_precision = second_precision
self.day_precision = day_precision
- def adapt(self, cls, **kw):
- if self.native and hasattr(cls, '_adapt_from_generic_interval'):
- return cls._adapt_from_generic_interval(self, **kw)
- else:
- return self.__class__(
- native=self.native,
- second_precision=self.second_precision,
- day_precision=self.day_precision,
- **kw)
-
@property
def python_type(self):
return dt.timedelta
+ def adapt_to_emulated(self, impltype, **kw):
+ return _AbstractInterval.adapt(self, impltype, **kw)
+
def bind_processor(self, dialect):
impl_processor = self.impl.bind_processor(dialect)
epoch = self.epoch
return value - epoch
return process
- @util.memoized_property
- def _expression_adaptations(self):
- # Based on http://www.postgresql.org/docs/current/\
- # static/functions-datetime.html.
-
- return {
- operators.add: {
- Date: DateTime,
- Interval: self.__class__,
- DateTime: DateTime,
- Time: Time,
- },
- operators.sub: {
- Interval: self.__class__
- },
- operators.mul: {
- Numeric: self.__class__
- },
- operators.truediv: {
- Numeric: self.__class__
- },
- operators.div: {
- Numeric: self.__class__
- }
- }
-
- @property
- def _type_affinity(self):
- return Interval
-
- def coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
-
- return self.impl.coerce_compared_value(op, value)
-
class JSON(Indexable, TypeEngine):
"""Represent a SQL JSON type.
return self
+class Emulated(object):
+ """Mixin for base types that emulate the behavior of a DB-native type.
+
+ An :class:`.Emulated` type will use an available database type
+ in conjunction with Python-side routines and/or database constraints
+ in order to approximate the behavior of a database type that is provided
+ natively by some backends. When a native-providing backend is in
+ use, the native version of the type is used. This native version
+ should include the :class:`.NativeForEmulated` mixin to allow it to be
+ distinguished from :class:`.Emulated`.
+
+ Current examples of :class:`.Emulated` are: :class:`.Interval`,
+ :class:`.Enum`, :class:`.Boolean`.
+
+ .. versionadded:: 1.2.0b3
+
+ """
+
+ def adapt_to_emulated(self, impltype, **kw):
+ """Given an impl class, adapt this type to the impl assuming "emulated".
+
+ The impl should also be an "emulated" version of this type,
+ most likely the same class as this type itself.
+
+ e.g.: sqltypes.Enum adapts to the Enum class.
+
+ """
+ return super(Emulated, self).adapt(impltype, **kw)
+
+ def adapt(self, impltype, **kw):
+ if hasattr(impltype, "adapt_emulated_to_native"):
+
+ if self.native:
+ # native support requested, dialect gave us a native
+ # implementor, pass control over to it
+ return impltype.adapt_emulated_to_native(self, **kw)
+ else:
+ # impltype adapts to native, and we are not native,
+ # so reject the impltype in favor of "us"
+ impltype = self.__class__
+
+ if issubclass(impltype, self.__class__):
+ return self.adapt_to_emulated(impltype, **kw)
+ else:
+ return super(Emulated, self).adapt(impltype, **kw)
+
+
+class NativeForEmulated(object):
+ """Indicates DB-native types supported by an :class:`.Emulated` type.
+
+ .. versionadded:: 1.2.0b3
+
+ """
+
+ @classmethod
+ def adapt_emulated_to_native(cls, impl, **kw):
+ """Given an impl, adapt this type's class to the impl assuming "native".
+
+ The impl will be an :class:`.Emulated` class but not a
+ :class:`.NativeForEmulated`.
+
+ e.g.: postgresql.ENUM produces a type given an Enum instance.
+
+ """
+ return cls(**kw)
+
+
class TypeDecorator(SchemaEventTarget, TypeEngine):
"""Allows the creation of types which add additional functionality
to an existing type.
will cause the index value ``'foo'`` to be JSON encoded.
"""
-
__visit_name__ = "type_decorator"
def __init__(self, *args, **kwargs):
return util.generic_repr(self, to_inspect=self.impl)
+
+
class Variant(TypeDecorator):
"""A wrapping type that selects among a variety of
implementations based on dialect in use.
exc.DBAPIError, enum_table.insert().execute,
e1=None, e2=None, e3=None, e4=None)
+ assert enum_table.c.e2generic.type.validate_strings
assert_raises(
exc.StatementError,
enum_table.insert().execute,
result = connection.execute(s).first()
eq_(result[0], datetime.timedelta(40))
+ def test_interval_coercion(self):
+ expr = column('bar', postgresql.INTERVAL) + column('foo', types.Date)
+ eq_(expr.type._type_affinity, types.DateTime)
+
+ expr = column('bar', postgresql.INTERVAL) * \
+ column('foo', types.Numeric)
+ eq_(expr.type._type_affinity, types.Interval)
+ assert isinstance(expr.type, postgresql.INTERVAL)
+
class SpecialTypesTest(fixtures.TestBase, ComparesTables, AssertsCompiledSQL):
is_true(y_copy.type._create_events)
+ # for Postgresql, this will emit CREATE TYPE
+ m.dispatch.before_create(t1, testing.db)
+ try:
+ eq_(t1.c.y.type.evt_targets, (t1, ))
+ finally:
+ # do the drop so that Postgresql emits DROP TYPE
+ m.dispatch.after_drop(t1, testing.db)
+
+ def test_enum_nonnative_column_copy_transfers_events(self):
+ m = MetaData()
+
+ type_ = self.WrapEnum('a', 'b', 'c', name='foo', native_enum=False)
+ y = Column('y', type_)
+ y_copy = y.copy()
+ t1 = Table('x', m, y_copy)
+
+ is_true(y_copy.type._create_events)
+
m.dispatch.before_create(t1, testing.db)
eq_(t1.c.y.type.evt_targets, (t1, ))
"sqlalchemy" in subcl.__module__:
yield True, subcl, [typ]
+ from sqlalchemy.sql import sqltypes
for is_down_adaption, typ, target_adaptions in adaptions():
if typ in (types.TypeDecorator, types.TypeEngine, types.Variant):
continue
else:
t1 = typ()
for cls in target_adaptions:
- if not issubclass(typ, types.Enum) and \
- issubclass(cls, types.Enum):
+ if (
+ (is_down_adaption and
+ issubclass(typ, sqltypes.Emulated)) or
+ (not is_down_adaption and
+ issubclass(cls, sqltypes.Emulated))
+ ):
continue
+
if cls.__module__.startswith("test"):
continue
t2, t1 = t1, t2
for k in t1.__dict__:
- if k in ('impl', '_is_oracle_number', '_create_events'):
+ if k in (
+ 'impl', '_is_oracle_number',
+ '_create_events', 'create_constraint',
+ 'inherit_schema', 'schema', 'metadata',
+ 'name', ):
continue
# assert each value was copied, or that
# the adapted type has a more specific
def test_adapt(self):
from sqlalchemy.dialects.postgresql import ENUM
e1 = Enum('one', 'two', 'three', native_enum=False)
- eq_(e1.adapt(ENUM).native_enum, False)
+
+ false_adapt = e1.adapt(ENUM)
+ eq_(false_adapt.native_enum, False)
+ assert not isinstance(false_adapt, ENUM)
+
e1 = Enum('one', 'two', 'three', native_enum=True)
- eq_(e1.adapt(ENUM).native_enum, True)
- e1 = Enum('one', 'two', 'three', name='foo', schema='bar')
+ true_adapt = e1.adapt(ENUM)
+ eq_(true_adapt.native_enum, True)
+ assert isinstance(true_adapt, ENUM)
+
+ e1 = Enum('one', 'two', 'three', name='foo',
+ schema='bar', metadata=MetaData())
eq_(e1.adapt(ENUM).name, 'foo')
eq_(e1.adapt(ENUM).schema, 'bar')
+ is_(e1.adapt(ENUM).metadata, e1.metadata)
+ eq_(e1.adapt(Enum).name, 'foo')
+ eq_(e1.adapt(Enum).schema, 'bar')
+ is_(e1.adapt(Enum).metadata, e1.metadata)
e1 = Enum(self.SomeEnum)
eq_(e1.adapt(ENUM).name, 'someenum')
eq_(e1.adapt(ENUM).enums, ['one', 'two', 'three'])
assert expr.right.type._type_affinity is MyFoobarType
def test_date_coercion(self):
- from sqlalchemy.sql import column
-
expr = column('bar', types.NULLTYPE) - column('foo', types.TIMESTAMP)
eq_(expr.type._type_affinity, types.NullType)
expr = func.current_date() - column('foo', types.TIMESTAMP)
eq_(expr.type._type_affinity, types.Interval)
+ def test_interval_coercion(self):
+ expr = column('bar', types.Interval) + column('foo', types.Date)
+ eq_(expr.type._type_affinity, types.DateTime)
+
+ expr = column('bar', types.Interval) * column('foo', types.Numeric)
+ eq_(expr.type._type_affinity, types.Interval)
+
+
def test_numerics_coercion(self):
for op in (operator.add, operator.mul, operator.truediv, operator.sub):
assert adapted.native is False
eq_(str(adapted), "DATETIME")
- @testing.fails_on(
- "postgresql+zxjdbc",
- "Not yet known how to pass values of the INTERVAL type")
- @testing.fails_on(
- "oracle+zxjdbc",
- "Not yet known how to pass values of the INTERVAL type")
def test_roundtrip(self):
small_delta = datetime.timedelta(days=15, seconds=5874)
delta = datetime.timedelta(414)
eq_(row['native_interval_args'], delta)
eq_(row['non_native_interval'], delta)
- @testing.fails_on(
- "oracle+zxjdbc",
- "Not yet known how to pass values of the INTERVAL type")
def test_null(self):
interval_table.insert().execute(
id=1, native_inverval=None, non_native_interval=None)