- sql
- RowProxy objects can be used in place of dictionary arguments
sent to connection.execute() and friends. [ticket:935]
-
+
+- dialect
+ - Added a new description_encoding attribute on the dialect
+ that is used for encoding the column name when processing
+ the metadata. This usually defaults to utf-8.
+
- mssql
- Added in a new MSGenericBinary type. This maps to the Binary
type so it can implement the specialized behavior of treating
return object.__new__(cls, *args, **kwargs)
def __init__(self,
- auto_identity_insert=True, query_timeout=None, text_as_varchar=False,
- use_scope_identity=False, has_window_funcs=False, max_identifier_length=None,
- schema_name="dbo", **opts):
+ auto_identity_insert=True, query_timeout=None,
+ text_as_varchar=False, use_scope_identity=False,
+ has_window_funcs=False, max_identifier_length=None,
+ schema_name="dbo", description_encoding='utf-8', **opts):
self.auto_identity_insert = bool(auto_identity_insert)
self.query_timeout = int(query_timeout or 0)
self.schema_name = schema_name
self.use_scope_identity = bool(use_scope_identity)
self.has_window_funcs = bool(has_window_funcs)
self.max_identifier_length = int(max_identifier_length or 0) or 128
- super(MSSQLDialect, self).__init__(**opts)
+ super(MSSQLDialect, self).__init__(description_encoding=description_encoding, **opts)
@classmethod
def dbapi(cls, module_name=None):
supports_unicode_statements = supports_unicode
execution_ctx_cls = MSSQLExecutionContext_pyodbc
- def __init__(self, **params):
- super(MSSQLDialect_pyodbc, self).__init__(**params)
+ def __init__(self, description_encoding='latin-1', **params):
+ super(MSSQLDialect_pyodbc, self).__init__(description_encoding=description_encoding, **params)
# FIXME: scope_identity sniff should look at server version, not the ODBC driver
# whether use_scope_identity will work depends on the version of pyodbc
try:
dialect.schemagenerator = MSSQLSchemaGenerator
dialect.schemadropper = MSSQLSchemaDropper
dialect.preparer = MSSQLIdentifierPreparer
+
supports_default_values
Indicates if the construct ``INSERT INTO tablename DEFAULT VALUES`` is supported
+
+ description_encoding
+ type of encoding to use for unicode when working with metadata
+ descriptions. If set to ``None`` no encoding will be done.
+ This usually defaults to 'utf-8'.
"""
def create_connect_args(self, url):
typemap = self.dialect.dbapi_type_map
for i, item in enumerate(metadata):
- colname = item[0].decode(self.dialect.encoding)
+ colname = item[0]
+ if self.dialect.description_encoding:
+ colname = colname.decode(self.dialect.description_encoding)
if '.' in colname:
# sqlite will in some circumstances prepend table name to colnames, so strip
supports_default_values = False
supports_empty_insert = True
- def __init__(self, convert_unicode=False, assert_unicode=False, encoding='utf-8', paramstyle=None, dbapi=None, label_length=None, **kwargs):
+ def __init__(self, convert_unicode=False, assert_unicode=False,
+ encoding='utf-8', paramstyle=None, dbapi=None,
+ label_length=None, description_encoding='utf-8', **kwargs):
self.convert_unicode = convert_unicode
self.assert_unicode = assert_unicode
self.encoding = encoding
if label_length and label_length > self.max_identifier_length:
raise exc.ArgumentError("Label length of %d is greater than this dialect's maximum identifier length of %d" % (label_length, self.max_identifier_length))
self.label_length = label_length
+ self.description_encoding = description_encoding
def type_descriptor(self, typeobj):
"""Provide a database-specific ``TypeEngine`` object, given
self.postfetch_cols = self.compiled.postfetch
self.prefetch_cols = self.compiled.prefetch
-DefaultDialect.execution_ctx_cls = DefaultExecutionContext
\ No newline at end of file
+DefaultDialect.execution_ctx_cls = DefaultExecutionContext