result, also generates procs that are not used in most cases. simplify the approach
by passing type to _exec_default() to be used if needed by _execute_scalar(),
looking for the proc on just t._autoincrement_column in post_insert().
class FBExecutionContext(default.DefaultExecutionContext):
- def fire_sequence(self, seq, proc):
+ def fire_sequence(self, seq, type_):
"""Get the next value from the sequence using ``gen_id()``."""
return self._execute_scalar(
"SELECT gen_id(%s, 1) FROM rdb$database" %
self.dialect.identifier_preparer.format_sequence(seq),
- proc
+ type_
)
class OracleExecutionContext(default.DefaultExecutionContext):
- def fire_sequence(self, seq, proc):
+ def fire_sequence(self, seq, type_):
return int(self._execute_scalar("SELECT " +
self.dialect.identifier_preparer.format_sequence(seq) +
- ".nextval FROM DUAL"), proc)
+ ".nextval FROM DUAL"), type_)
class OracleDialect(default.DefaultDialect):
name = 'oracle'
__visit_name__ = "drop_enum_type"
class PGExecutionContext(default.DefaultExecutionContext):
- def fire_sequence(self, seq, proc):
+ def fire_sequence(self, seq, type_):
if not seq.optional:
return self._execute_scalar(("select nextval('%s')" % \
- self.dialect.identifier_preparer.format_sequence(seq)), proc)
+ self.dialect.identifier_preparer.format_sequence(seq)), type_)
else:
return None
- def get_insert_default(self, column, proc):
+ def get_insert_default(self, column):
if column.primary_key:
if (isinstance(column.server_default, schema.DefaultClause) and
column.server_default.arg is not None):
# pre-execute passive defaults on primary key columns
return self._execute_scalar("select %s" %
- column.server_default.arg, proc)
+ column.server_default.arg, column.type)
elif column is column.table._autoincrement_column \
and (column.default is None or
exc = "select nextval('\"%s_%s_seq\"')" % \
(column.table.name, column.name)
- return self._execute_scalar(exc, proc)
+ return self._execute_scalar(exc, column.type)
- return super(PGExecutionContext, self).get_insert_default(column, proc)
+ return super(PGExecutionContext, self).get_insert_default(column)
class PGDialect(default.DefaultDialect):
name = 'postgresql'
else:
return autocommit
- def _execute_scalar(self, stmt, proc):
+ def _execute_scalar(self, stmt, type_):
"""Execute a string statement on the current cursor, returning a
scalar result.
conn._cursor_execute(self.cursor, stmt, default_params)
r = self.cursor.fetchone()[0]
- if proc:
- return proc(r)
- else:
- return r
+ if type_ is not None:
+ # apply type post processors to the result
+ proc = type_._cached_result_processor(
+ self.dialect,
+ self.cursor.description[0][1]
+ )
+ if proc:
+ return proc(r)
+ return r
@property
def connection(self):
table = self.compiled.statement.table
lastrowid = self.get_lastrowid()
+
+ autoinc_col = table._autoincrement_column
+ if autoinc_col is not None:
+ # apply type post processors to the lastrowid
+ proc = autoinc_col.type._cached_result_processor(self.dialect, None)
+ if proc is not None:
+ lastrowid = proc(lastrowid)
+
self.inserted_primary_key = [
- c is table._autoincrement_column and (
- proc and proc(lastrowid)
- or lastrowid
- ) or v
- for c, v, proc in zip(
+ c is autoinc_col and lastrowid or v
+ for c, v in zip(
table.primary_key,
- self.inserted_primary_key,
- self.compiled._pk_processors)
+ self.inserted_primary_key)
]
def _fetch_implicit_returning(self, resultproxy):
self.root_connection._handle_dbapi_exception(e, None, None, None, self)
raise
- def _exec_default(self, default, proc):
+ def _exec_default(self, default, type_):
if default.is_sequence:
- return self.fire_sequence(default, proc)
+ return self.fire_sequence(default, type_)
elif default.is_callable:
return default.arg(self)
elif default.is_clause_element:
else:
return default.arg
- def get_insert_default(self, column, proc):
+ def get_insert_default(self, column):
if column.default is None:
return None
else:
- return self._exec_default(column.default, proc)
+ return self._exec_default(column.default, column.type)
- def get_update_default(self, column, proc):
+ def get_update_default(self, column):
if column.onupdate is None:
return None
else:
- return self._exec_default(column.onupdate, proc)
+ return self._exec_default(column.onupdate, column.type)
def __process_defaults(self):
"""Generate default values for compiled insert/update statements,
for param in self.compiled_parameters:
self.current_parameters = param
- for c, proc in zip(self.prefetch_cols,
- self.compiled._prefetch_processors):
+ for c in self.prefetch_cols:
if c in scalar_defaults:
val = scalar_defaults[c]
elif self.isinsert:
- val = self.get_insert_default(c, proc)
+ val = self.get_insert_default(c)
else:
- val = self.get_update_default(c, proc)
+ val = self.get_update_default(c)
if val is not None:
param[c.key] = val
del self.current_parameters
self.current_parameters = compiled_parameters = \
self.compiled_parameters[0]
- for c, proc in zip(self.compiled.prefetch,
- self.compiled._prefetch_processors):
+ for c in self.compiled.prefetch:
if self.isinsert:
- val = self.get_insert_default(c, proc)
+ val = self.get_insert_default(c)
else:
- val = self.get_update_default(c, proc)
+ val = self.get_update_default(c)
if val is not None:
compiled_parameters[c.key] = val
if value is not None
)
- @util.memoized_property
- def _pk_processors(self):
- return [
- col.type._cached_result_processor(self.dialect, None)
- for col in self.statement.table.primary_key
- ]
-
- @util.memoized_property
- def _prefetch_processors(self):
- return [
- col.type._cached_result_processor(self.dialect, None)
- for col in self.prefetch
- ]
-
def is_subquery(self):
return len(self.stack) > 1
metadata.drop_all()
class NumericInterpretationTest(TestBase):
-
+ __only_on__ = 'postgresql'
def test_numeric_codes(self):
from sqlalchemy.dialects.postgresql import pg8000, psycopg2, base
val = proc(val)
assert val in (23.7, decimal.Decimal("23.7"))
+ @testing.provide_metadata
+ def test_numeric_default(self):
+ t =Table('t', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('nd', Numeric(asdecimal=True), default=0),
+ Column('nf', Numeric(asdecimal=False), default=0),
+ Column('fd', Float(asdecimal=True), default=0),
+ Column('ff', Float(asdecimal=False), default=0),
+ )
+ metadata.create_all()
+ r = t.insert().execute()
+
+ row = t.select().execute().first()
+ assert isinstance(row[1], decimal.Decimal)
+ assert isinstance(row[2], float)
+ assert isinstance(row[3], decimal.Decimal)
+ assert isinstance(row[4], float)
+ eq_(
+ row,
+ (1, decimal.Decimal("0"), 0, decimal.Decimal("0"), 0)
+ )
+
class InsertTest(TestBase, AssertsExecutionResults):
__only_on__ = 'postgresql'