required for firebird, not a bad idea for others [ticket:408]
- Firebird fix to autoload multifield foreign keys [ticket:409]
- Firebird NUMERIC type properly handles a type without precision [ticket:409]
+- oracle:
+ - *slight* support for binary, but still need to figure out how to insert reasonably large
+ values (over 4K). requires auto_setinputsizes=True sent to create_engine(), rows must
+ be fully fetched individually, etc.
- orm:
- poked the first hole in the can of worms: saying query.select_by(somerelationname=someinstance)
will create the join of the primary key columns represented by "somerelationname"'s mapper to the
return dbapi.BINARY
def get_col_spec(self):
return "BLOB"
-class OracleLongBinary(sqltypes.Binary):
- def get_dbapi_type(self, dbapi):
- return dbapi.LONG_BINARY
- def get_col_spec(self):
- return "LONGBLOB"
+ def convert_bind_param(self, value, dialect):
+ # this is RAWTOHEX
+ return ''.join(["%.2X" % ord(c) for c in value])
+ def convert_result_value(self, value, dialect):
+ if value is None:
+ return None
+ else:
+ return value.read()
+
class OracleBoolean(sqltypes.Boolean):
def get_col_spec(self):
return "SMALLINT"
def setUpAll(self):
global binary_table
binary_table = Table('binary_table', db,
- Column('primary_id', Integer, primary_key=True),
+ Column('primary_id', Integer, Sequence('binary_id_seq', optional=True), primary_key=True),
Column('data', Binary),
Column('data_slice', Binary(100)),
Column('misc', String(30)),
def tearDownAll(self):
binary_table.drop()
- @testbase.unsupported('oracle')
def testbinary(self):
testobj1 = pickleable.Foo('im foo 1')
testobj2 = pickleable.Foo('im foo 2')
- stream1 =self.load_stream('binary_data_one.dat')
- stream2 =self.load_stream('binary_data_two.dat')
+ if db.name == 'oracle':
+ stream1 =self.load_stream('binary_data_one.dat', len=2000)
+ stream2 =self.load_stream('binary_data_two.dat', len=2000)
+ else:
+ stream1 =self.load_stream('binary_data_one.dat')
+ stream2 =self.load_stream('binary_data_two.dat')
binary_table.insert().execute(primary_id=1, misc='binary_data_one.dat', data=stream1, data_slice=stream1[0:100], pickled=testobj1)
binary_table.insert().execute(primary_id=2, misc='binary_data_two.dat', data=stream2, data_slice=stream2[0:99], pickled=testobj2)
- l = binary_table.select().execute().fetchall()
+ if db.name == 'oracle':
+ res = binary_table.select().execute()
+ l = []
+ row = res.fetchone()
+ l.append(dict([(k, row[k]) for k in row.keys()]))
+ row = res.fetchone()
+ l.append(dict([(k, row[k]) for k in row.keys()]))
+ else:
+ l = binary_table.select().execute().fetchall()
print len(stream1), len(l[0]['data']), len(l[0]['data_slice'])
self.assert_(list(stream1) == list(l[0]['data']))
self.assert_(list(stream1[0:100]) == list(l[0]['data_slice']))
self.assert_(testobj1 == l[0]['pickled'])
self.assert_(testobj2 == l[1]['pickled'])
- def load_stream(self, name):
+ def load_stream(self, name, len=12579):
f = os.path.join(os.path.dirname(testbase.__file__), name)
# put a number less than the typical MySQL default BLOB size
- return file(f).read(12579)
+ return file(f).read(len)
class DateTest(AssertMixin):
def setUpAll(self):