- the "op()" function is now treated as an "operation", rather than a "comparison".
the difference is, an operation produces a BinaryExpression from which further operations
can occur whereas comparison produces the more restrictive BooleanExpression
-- postgres
- - postgres no longer uses client-side cursors, uses more efficient server side
- cursors via apparently undocumented psycopg2 behavior recently discovered on the
- mailing list. disable it via create_engine('postgres://', client_side_cursors=True)
+ - type system slightly modified to support TypeDecorators that can be overridden by the dialect
+ (ok, thats not very clear, it allows the mssql tweak below to be possible)
+- mssql:
+ - added an NVarchar type (produces NVARCHAR), also MSUnicode which provides Unicode-translation
+ for the NVarchar regardless of dialect convert_unicode setting.
+- postgres:
+ - postgres has an optional "server_side_cursors=True" flag which will utilize
+ server side cursors. these are appropriate for fetching only partial results
+ and are necessary for working with very large unbounded result sets.
+ While we'd like this to be the default behavior, different environments seem
+ to have different results and the causes have not been isolated so we are leaving
+ the feature off by default for now. Uses an apparently undocumented psycopg2
+ behavior recently discovered on the psycopg mailing list.
- added "BIGSERIAL" support for postgres table with PGBigInteger/autoincrement
- fixes to postgres reflection to better handle when schema names are present;
thanks to jason (at) ncsmags.com [ticket:402]
-- mysql
+- mysql:
- mysql is inconsistent with what kinds of quotes it uses in foreign keys during a
SHOW CREATE TABLE, reflection updated to accomodate for all three styles [ticket:420]
- firebird:
which will create a new annotated file ./lib/sqlalchemy/sql.py,cover . Pretty cool !
+TIPS
+----
+When running the tests on postgres, postgres gets slower and slower each time you run the tests.
+This seems to be related to the constant creation/dropping of tables. Running a "VACUUM FULL"
+on the database will speed it up again.
class MSString(sqltypes.String):
def get_col_spec(self):
return "VARCHAR(%(length)s)" % {'length' : self.length}
-class MSUnicode(sqltypes.Unicode):
+class MSNVarchar(MSString):
+ """NVARCHAR string, does unicode conversion if dialect.convert_encoding is true"""
def get_col_spec(self):
return "NVARCHAR(%(length)s)" % {'length' : self.length}
+class MSUnicode(sqltypes.Unicode):
+ """Unicode subclass, does unicode conversion in all cases, uses NVARCHAR impl"""
+ impl = MSNVarchar
class MSChar(sqltypes.CHAR):
def get_col_spec(self):
return "CHAR(%(length)s)" % {'length' : self.length}
self.HASIDENT = False
-
-
class MSSQLDialect(ansisql.ANSIDialect):
def __init__(self, module=None, auto_identity_insert=False, **params):
self.module = module or dbmodule
class MSSQLSchemaGenerator(ansisql.ANSISchemaGenerator):
def get_column_specification(self, column, **kwargs):
colspec = self.preparer.format_column(column) + " " + column.type.engine_impl(self.engine).get_col_spec()
-
+
# install a IDENTITY Sequence if we have an implicit IDENTITY column
if column.primary_key and column.autoincrement and isinstance(column.type, sqltypes.Integer) and not column.foreign_key:
if column.default is None or (isinstance(column.default, schema.Sequence) and column.default.optional):
#TODO: determin MSSQL's case folding rules
return value
-if dbmodule.__name__ == 'adodbapi':
+if dbmodule and dbmodule.__name__ == 'adodbapi':
dialect = MSSQLDialect
else:
dialect = PyMSSQLDialect
self._last_inserted_ids = [v for v in row]
class PGDialect(ansisql.ANSIDialect):
- def __init__(self, module=None, use_oids=False, use_information_schema=False, client_side_cursors=False, **params):
+ def __init__(self, module=None, use_oids=False, use_information_schema=False, server_side_cursors=False, **params):
self.use_oids = use_oids
- self.client_side_cursors = client_side_cursors
+ self.server_side_cursors = server_side_cursors
if module is None:
#if psycopg is None:
# raise exceptions.ArgumentError("Couldnt locate psycopg1 or psycopg2: specify postgres module argument")
return ([], opts)
def create_cursor(self, connection):
- if self.client_side_cursors:
- return connection.cursor()
- else:
+ if self.server_side_cursors:
# use server-side cursors:
# http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
return connection.cursor('x')
-
+ else:
+ return connection.cursor()
def create_execution_context(self):
return PGExecutionContext(self)
try:
return self.impl_dict[dialect]
except:
+ # see if the dialect has an adaptation of the TypeDecorator itself
+ adapted_decorator = dialect.type_descriptor(self)
+ if adapted_decorator is not self:
+ result = adapted_decorator.dialect_impl(dialect)
+ self.impl_dict[dialect] = result
+ return result
typedesc = dialect.type_descriptor(self.impl)
tt = self.copy()
if not isinstance(tt, self.__class__):
except KeyError:
pass
else:
- # couldnt adapt...raise exception ?
+ # couldnt adapt - so just return the type itself
+ # (it may be a user-defined type)
return typeobj
# if we adapted the given generic type to a database-specific type,
# but it turns out the originally given "generic" type
assert (t1.impl.length == 20)
assert isinstance(t2.impl, TEXT)
assert t2.impl.length is None
-
+
+
+ def testdialecttypedecorators(self):
+ """test that a a Dialect can provide a dialect-specific subclass of a TypeDecorator subclass."""
+ import sqlalchemy.databases.mssql as mssql
+ dialect = mssql.MSSQLDialect()
+ # run the test twice to insure the caching step works too
+ for x in range(0, 1):
+ col = Column('', Unicode(length=10))
+ dialect_type = col.type.dialect_impl(dialect)
+ assert isinstance(dialect_type, mssql.MSUnicode)
+ assert dialect_type.get_col_spec() == 'NVARCHAR(10)'
+ assert isinstance(dialect_type.impl, mssql.MSString)
+
class OverrideTest(PersistTest):
"""tests user-defined types, including a full type as well as a TypeDecorator"""
self.assert_(isinstance(x['plain_data'], unicode) and x['plain_data'] == unicodedata)
finally:
db.engine.dialect.convert_unicode = prev_unicode
+
class BinaryTest(AssertMixin):