]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
Added a new description_encoding attribute on the dialect.
authorMichael Trier <mtrier@gmail.com>
Tue, 30 Dec 2008 06:39:33 +0000 (06:39 +0000)
committerMichael Trier <mtrier@gmail.com>
Tue, 30 Dec 2008 06:39:33 +0000 (06:39 +0000)
This is used for encoding the column name when processing the metadata. This
usually defaults to utf-8.

CHANGES
lib/sqlalchemy/databases/mssql.py
lib/sqlalchemy/engine/base.py
lib/sqlalchemy/engine/default.py

diff --git a/CHANGES b/CHANGES
index 6d34b8e081cd2c1394fec70819287a1f59ddcbc6..71cab4c5754530145122879a1239795996e97d13 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -41,7 +41,12 @@ CHANGES
 - sql
     - RowProxy objects can be used in place of dictionary arguments 
       sent to connection.execute() and friends.  [ticket:935]
-      
+
+- dialect
+    - Added a new description_encoding attribute on the dialect
+      that is used for encoding the column name when processing
+      the metadata. This usually defaults to utf-8.
+
 - mssql
     - Added in a new MSGenericBinary type. This maps to the Binary
       type so it can implement the specialized behavior of treating
index 95be55e7f6473c6af1ca813262bcddc48b9f131d..ad9ba847abf745cd2f89caae0f483d27b8e68e73 100644 (file)
@@ -922,9 +922,10 @@ class MSSQLDialect(default.DefaultDialect):
             return object.__new__(cls, *args, **kwargs)
 
     def __init__(self,
-                 auto_identity_insert=True, query_timeout=None, text_as_varchar=False,
-                 use_scope_identity=False,  has_window_funcs=False, max_identifier_length=None,
-                 schema_name="dbo", **opts):
+                 auto_identity_insert=True, query_timeout=None,
+                 text_as_varchar=False, use_scope_identity=False,
+                 has_window_funcs=False, max_identifier_length=None,
+                 schema_name="dbo", description_encoding='utf-8', **opts):
         self.auto_identity_insert = bool(auto_identity_insert)
         self.query_timeout = int(query_timeout or 0)
         self.schema_name = schema_name
@@ -934,7 +935,7 @@ class MSSQLDialect(default.DefaultDialect):
         self.use_scope_identity = bool(use_scope_identity)
         self.has_window_funcs =  bool(has_window_funcs)
         self.max_identifier_length = int(max_identifier_length or 0) or 128
-        super(MSSQLDialect, self).__init__(**opts)
+        super(MSSQLDialect, self).__init__(description_encoding=description_encoding, **opts)
 
     @classmethod
     def dbapi(cls, module_name=None):
@@ -1240,8 +1241,8 @@ class MSSQLDialect_pyodbc(MSSQLDialect):
     supports_unicode_statements = supports_unicode
     execution_ctx_cls = MSSQLExecutionContext_pyodbc
 
-    def __init__(self, **params):
-        super(MSSQLDialect_pyodbc, self).__init__(**params)
+    def __init__(self, description_encoding='latin-1', **params):
+        super(MSSQLDialect_pyodbc, self).__init__(description_encoding=description_encoding, **params)
         # FIXME: scope_identity sniff should look at server version, not the ODBC driver
         # whether use_scope_identity will work depends on the version of pyodbc
         try:
@@ -1569,3 +1570,4 @@ dialect.statement_compiler = MSSQLCompiler
 dialect.schemagenerator = MSSQLSchemaGenerator
 dialect.schemadropper = MSSQLSchemaDropper
 dialect.preparer = MSSQLIdentifierPreparer
+
index 24fd1cc6e551e7d6d691646db18dc8bc425912e1..c547c0e54dd65bd7d22c79d60a5e3d8d66e47a46 100644 (file)
@@ -107,6 +107,11 @@ class Dialect(object):
 
     supports_default_values
       Indicates if the construct ``INSERT INTO tablename DEFAULT VALUES`` is supported
+
+    description_encoding
+      type of encoding to use for unicode when working with metadata
+      descriptions. If set to ``None`` no encoding will be done.
+      This usually defaults to 'utf-8'.
     """
 
     def create_connect_args(self, url):
@@ -1441,7 +1446,9 @@ class ResultProxy(object):
         typemap = self.dialect.dbapi_type_map
 
         for i, item in enumerate(metadata):
-            colname = item[0].decode(self.dialect.encoding)
+            colname = item[0]
+            if self.dialect.description_encoding:
+                colname = colname.decode(self.dialect.description_encoding)
 
             if '.' in colname:
                 # sqlite will in some circumstances prepend table name to colnames, so strip
index 682ab526c6ebd10268436e38259f6242354ac64b..8a44bac74fdba1a891135b6f9a4ed2fc64cf6448 100644 (file)
@@ -40,7 +40,9 @@ class DefaultDialect(base.Dialect):
     supports_default_values = False 
     supports_empty_insert = True
 
-    def __init__(self, convert_unicode=False, assert_unicode=False, encoding='utf-8', paramstyle=None, dbapi=None, label_length=None, **kwargs):
+    def __init__(self, convert_unicode=False, assert_unicode=False,
+                 encoding='utf-8', paramstyle=None, dbapi=None, 
+                 label_length=None, description_encoding='utf-8', **kwargs):
         self.convert_unicode = convert_unicode
         self.assert_unicode = assert_unicode
         self.encoding = encoding
@@ -58,6 +60,7 @@ class DefaultDialect(base.Dialect):
         if label_length and label_length > self.max_identifier_length:
             raise exc.ArgumentError("Label length of %d is greater than this dialect's maximum identifier length of %d" % (label_length, self.max_identifier_length))
         self.label_length = label_length
+        self.description_encoding = description_encoding
 
     def type_descriptor(self, typeobj):
         """Provide a database-specific ``TypeEngine`` object, given
@@ -368,4 +371,4 @@ class DefaultExecutionContext(base.ExecutionContext):
             self.postfetch_cols = self.compiled.postfetch
             self.prefetch_cols = self.compiled.prefetch
 
-DefaultDialect.execution_ctx_cls = DefaultExecutionContext
\ No newline at end of file
+DefaultDialect.execution_ctx_cls = DefaultExecutionContext