builder.config.html_context['site_base'] = builder.config['site_base']
self.lookup = TemplateLookup(directories=builder.config.templates_path,
- #format_exceptions=True,
+ #format_exceptions=True,
imports=[
"from builder import util"
]
# RTD layout
if rtd:
- # add variables if not present, such
+ # add variables if not present, such
# as if local test of READTHEDOCS variable
if 'MEDIA_URL' not in context:
context['MEDIA_URL'] = "http://media.readthedocs.org/"
'sqlpopup':[
(
r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))',
- bygroups(using(PythonConsoleLexer), Token.Sql.Popup),
+ bygroups(using(PythonConsoleLexer), Token.Sql.Popup),
"#pop"
)
],
'opensqlpopup':[
(
r'.*?(?:{stop}\n*|$)',
- Token.Sql,
+ Token.Sql,
"#pop"
)
]
'sqlpopup':[
(
r'(.*?\n)((?:PRAGMA|BEGIN|SELECT|INSERT|DELETE|ROLLBACK|COMMIT|ALTER|UPDATE|CREATE|DROP|PRAGMA|DESCRIBE).*?(?:{stop}\n?|$))',
- bygroups(using(PythonLexer), Token.Sql.Popup),
+ bygroups(using(PythonLexer), Token.Sql.Popup),
"#pop"
)
],
'opensqlpopup':[
(
r'.*?(?:{stop}\n*|$)',
- Token.Sql,
+ Token.Sql,
"#pop"
)
]
site_base = "http://www.sqlalchemy.org"
-# arbitrary number recognized by builders.py, incrementing this
+# arbitrary number recognized by builders.py, incrementing this
# will force a rebuild
build_number = 3
rootlogger.addHandler(handler)
-def teststring(s, name, globs=None, verbose=None, report=True,
- optionflags=0, extraglobs=None, raise_on_error=False,
+def teststring(s, name, globs=None, verbose=None, report=True,
+ optionflags=0, extraglobs=None, raise_on_error=False,
parser=doctest.DocTestParser()):
from doctest import DebugRunner, DocTestRunner, master
This directory includes the following examples:
-* basic_association.py - illustrate a many-to-many relationship between an
+* basic_association.py - illustrate a many-to-many relationship between an
"Order" and a collection of "Item" objects, associating a purchase price
with each via an association object called "OrderItem"
* proxied_association.py - same example as basic_association, adding in
The association object pattern is a form of many-to-many which
associates additional data with each association between parent/child.
-The example illustrates an "order", referencing a collection
+The example illustrates an "order", referencing a collection
of "items", with a particular price paid associated with each "item".
"""
# query the order, print items
order = session.query(Order).filter_by(customer_name='john smith').one()
- print [(order_item.item.description, order_item.price)
+ print [(order_item.item.description, order_item.price)
for order_item in order.order_items]
# print customers who bought 'MySQL Crowbar' on sale
key = Column(String)
values = association_proxy("elements", "value")
- """Bridge the association from 'elements' over to the
+ """Bridge the association from 'elements' over to the
'value' element of C."""
def __init__(self, key, values=None):
"""
Illustrates how to embed Beaker cache functionality within
the Query object, allowing full cache control as well as the
-ability to pull "lazy loaded" attributes from long term cache
+ability to pull "lazy loaded" attributes from long term cache
as well.
In this demo, the following techniques are illustrated:
* Using custom subclasses of Query
-* Basic technique of circumventing Query to pull from a
+* Basic technique of circumventing Query to pull from a
custom cache source instead of the database.
* Rudimental caching with Beaker, using "regions" which allow
global control over a fixed set of configurations.
-* Using custom MapperOption objects to configure options on
- a Query, including the ability to invoke the options
+* Using custom MapperOption objects to configure options on
+ a Query, including the ability to invoke the options
deep within an object graph when lazy loads occur.
E.g.::
Listing of files:
environment.py - Establish the Session, the Beaker cache
- manager, data / cache file paths, and configurations,
+ manager, data / cache file paths, and configurations,
bootstrap fixture data if necessary.
- caching_query.py - Represent functions and classes
+ caching_query.py - Represent functions and classes
which allow the usage of Beaker caching with SQLAlchemy.
Introduces a query option called FromCache.
"""advanced.py
-Illustrate usage of Query combined with the FromCache option,
+Illustrate usage of Query combined with the FromCache option,
including front-end loading, cache invalidation, namespace techniques
and collection caching.
start/end are integers, range is then
"person <start>" - "person <end>".
- The cache option we set up is called "name_range", indicating
+ The cache option we set up is called "name_range", indicating
a range of names for the Person class.
The `Person.addresses` collections are also cached. Its basically
another level of tuning here, as that particular cache option
- can be transparently replaced with joinedload(Person.addresses).
+ can be transparently replaced with joinedload(Person.addresses).
The effect is that each Person and his/her Address collection
is cached either together or separately, affecting the kind of
SQL that emits for unloaded Person objects as well as the distribution
print "\ntwenty five through forty, invalidate first:\n"
print ", ".join([p.name for p in load_name_range(25, 40, True)])
-# illustrate the address loading from either cache/already
+# illustrate the address loading from either cache/already
# on the Person
print "\n\nPeople plus addresses, two through twelve, addresses possibly from cache"
for p in load_name_range(2, 12):
print p.format_full()
-# illustrate the address loading from either cache/already
+# illustrate the address loading from either cache/already
# on the Person
print "\n\nPeople plus addresses, two through twelve, addresses from cache"
for p in load_name_range(2, 12):
parameters on a Query
* RelationshipCache - a variant of FromCache which is specific
to a query invoked during a lazy load.
- * _params_from_query - extracts value parameters from
+ * _params_from_query - extracts value parameters from
a Query.
The rest of what's here are standard SQLAlchemy and
from sqlalchemy.sql import visitors
class CachingQuery(Query):
- """A Query subclass which optionally loads full results from a Beaker
+ """A Query subclass which optionally loads full results from a Beaker
cache region.
The CachingQuery stores additional state that allows it to consult
a Beaker cache before accessing the database:
- * A "region", which is a cache region argument passed to a
+ * A "region", which is a cache region argument passed to a
Beaker CacheManager, specifies a particular cache configuration
(including backend implementation, expiration times, etc.)
* A "namespace", which is a qualifying name that identifies a
- group of keys within the cache. A query that filters on a name
- might use the name "by_name", a query that filters on a date range
+ group of keys within the cache. A query that filters on a name
+ might use the name "by_name", a query that filters on a date range
to a joined table might use the name "related_date_range".
When the above state is present, a Beaker cache is retrieved.
- The "namespace" name is first concatenated with
- a string composed of the individual entities and columns the Query
+ The "namespace" name is first concatenated with
+ a string composed of the individual entities and columns the Query
requests, i.e. such as ``Query(User.id, User.name)``.
The Beaker cache is then loaded from the cache manager based
on the region and composed namespace. The key within the cache
itself is then constructed against the bind parameters specified
- by this query, which are usually literals defined in the
+ by this query, which are usually literals defined in the
WHERE clause.
The FromCache and RelationshipCache mapper options below represent
return cache, cache_key
def _namespace_from_query(namespace, query):
- # cache namespace - the token handed in by the
+ # cache namespace - the token handed in by the
# option + class we're querying against
namespace = " ".join([namespace] + [str(x) for x in query._entities])
if hasattr(query, '_cache_parameters'):
region, namespace, cache_key = query._cache_parameters
raise ValueError("This query is already configured "
- "for region %r namespace %r" %
+ "for region %r namespace %r" %
(region, namespace)
)
query._cache_parameters = region, namespace, cache_key
be a name uniquely describing the target Query's
lexical structure.
- :param cache_key: optional. A string cache key
+ :param cache_key: optional. A string cache key
that will serve as the key to the query. Use this
if your query has a huge amount of parameters (such
- as when using in_()) which correspond more simply to
+ as when using in_()) which correspond more simply to
some other identifier.
"""
_set_cache_parameters(query, self.region, self.namespace, self.cache_key)
class RelationshipCache(MapperOption):
- """Specifies that a Query as called within a "lazy load"
+ """Specifies that a Query as called within a "lazy load"
should load results from a cache."""
propagate_to_loaders = True
if (cls, key) in self._relationship_options:
relationship_option = self._relationship_options[(cls, key)]
_set_cache_parameters(
- query,
- relationship_option.region,
- relationship_option.namespace,
+ query,
+ relationship_option.region,
+ relationship_option.namespace,
None)
def and_(self, option):
"""environment.py
-Establish data / cache file paths, and configurations,
+Establish data / cache file paths, and configurations,
bootstrap fixture data if necessary.
"""
person = Person(
"person %.2d" % i,
Address(
- street="street %.2d" % i,
+ street="street %.2d" % i,
postal_code=all_post_codes[random.randint(0, len(all_post_codes) - 1)]
)
)
# remove the Session. next query starts from scratch.
Session.remove()
-# load again, using the same FromCache option. now they're cached
+# load again, using the same FromCache option. now they're cached
# under "all_people", no SQL is emitted.
print "loading people....again!"
people = Session.query(Person).options(FromCache("default", "all_people")).all()
-# want to load on some different kind of query ? change the namespace
+# want to load on some different kind of query ? change the namespace
# you send to FromCache
print "loading people two through twelve"
people_two_through_twelve = Session.query(Person).\
# the data is cached under the "namespace" you send to FromCache, *plus*
# the bind parameters of the query. So this query, having
-# different literal parameters under "Person.name.between()" than the
+# different literal parameters under "Person.name.between()" than the
# previous one, issues new SQL...
print "loading people five through fifteen"
people_five_through_fifteen = Session.query(Person).\
# invalidate the cache for the three queries we've done. Recreate
-# each Query, which includes at the very least the same FromCache,
-# same list of objects to be loaded, and the same parameters in the
+# each Query, which includes at the very least the same FromCache,
+# same list of objects to be loaded, and the same parameters in the
# same order, then call invalidate().
print "invalidating everything"
Session.query(Person).options(FromCache("default", "all_people")).invalidate()
import collections
class ScopedSessionNamespace(container.MemoryNamespaceManager):
- """A Beaker cache type which will cache objects locally on
+ """A Beaker cache type which will cache objects locally on
the current session.
When used with the query_cache system, the effect is that the objects
# identity is preserved - person10 is the *same* object that's
# ultimately inside the cache. So it is safe to manipulate
- # the not-queried-for attributes of objects when using such a
- # cache without the need to invalidate - however, any change
- # that would change the results of a cached query, such as
- # inserts, deletes, or modification to attributes that are
+ # the not-queried-for attributes of objects when using such a
+ # cache without the need to invalidate - however, any change
+ # that would change the results of a cached query, such as
+ # inserts, deletes, or modification to attributes that are
# part of query criterion, still require careful invalidation.
from caching_query import _get_cache_parameters
cache, key = _get_cache_parameters(q)
"""Model. We are modeling Person objects with a collection
-of Address objects. Each Address has a PostalCode, which
+of Address objects. Each Address has a PostalCode, which
in turn references a City and then a Country:
Person --(1..n)--> Address
def __str__(self):
return "%s\t"\
"%s, %s\t"\
- "%s" % (self.street, self.city.name,
+ "%s" % (self.street, self.city.name,
self.postal_code.code, self.country.name)
class Person(Base):
"""relationship_caching.py
-Load a set of Person and Address objects, specifying that
-related PostalCode, City, Country objects should be pulled from long
+Load a set of Person and Address objects, specifying that
+related PostalCode, City, Country objects should be pulled from long
term cache.
"""
"""Illustrates customized class instrumentation, using
the :mod:`sqlalchemy.ext.instrumentation` extension package.
-In this example, mapped classes are modified to
+In this example, mapped classes are modified to
store their state in a dictionary attached to an attribute
named "_goofy_dict", instead of using __dict__.
-this example illustrates how to replace SQLAlchemy's class
+this example illustrates how to replace SQLAlchemy's class
descriptors with a user-defined system.
if __name__ == '__main__':
meta = MetaData(create_engine('sqlite://'))
- table1 = Table('table1', meta,
- Column('id', Integer, primary_key=True),
+ table1 = Table('table1', meta,
+ Column('id', Integer, primary_key=True),
Column('name', Text))
- table2 = Table('table2', meta,
- Column('id', Integer, primary_key=True),
- Column('name', Text),
+ table2 = Table('table2', meta,
+ Column('id', Integer, primary_key=True),
+ Column('name', Text),
Column('t1id', Integer, ForeignKey('table1.id')))
meta.create_all()
"""Illustrates how to place a dictionary-like facade on top of a "dynamic" relation, so
-that dictionary operations (assuming simple string keys) can operate upon a large
+that dictionary operations (assuming simple string keys) can operate upon a large
collection without loading the full collection at once.
"""
\ No newline at end of file
represented in a separate table. The nodes are associated in a hierarchy using an adjacency list
structure. A query function is introduced which can search for nodes along any path with a given
structure of attributes, basically a (very narrow) subset of xpath.
-* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but associates each
- DOM row with its owning document row, so that a full document of DOM nodes can be
+* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but associates each
+ DOM row with its owning document row, so that a full document of DOM nodes can be
loaded using O(1) queries - the construction of the "hierarchy" is performed after
the load in a non-recursive fashion and is much more efficient.
session.add(Document(file, doc))
session.commit()
- # locate documents with a certain path/attribute structure
+ # locate documents with a certain path/attribute structure
for document in find_document('/somefile/header/field2[@attr=foo]'):
# dump the XML
print document
"""This script duplicates adjacency_list.py, but optimizes the loading
-of XML nodes to be based on a "flattened" datamodel. Any number of XML documents,
-each of arbitrary complexity, can be loaded in their entirety via a single query
+of XML nodes to be based on a "flattened" datamodel. Any number of XML documents,
+each of arbitrary complexity, can be loaded in their entirety via a single query
which joins on only three tables.
"""
Column('filename', String(30), unique=True),
)
-# stores XML nodes in an adjacency list model. This corresponds to
+# stores XML nodes in an adjacency list model. This corresponds to
# Element and SubElement objects.
elements = Table('elements', meta,
Column('element_id', Integer, primary_key=True),
########################## PART IV - Persistence Mapping #####################
-# Node class. a non-public class which will represent
+# Node class. a non-public class which will represent
# the DB-persisted Element/SubElement object. We cannot create mappers for
-# ElementTree elements directly because they are at the very least not new-style
+# ElementTree elements directly because they are at the very least not new-style
# classes, and also may be backed by native implementations.
# so here we construct an adapter.
class _Node(object):
pass
-# Attribute class. also internal, this will represent the key/value attributes stored for
+# Attribute class. also internal, this will represent the key/value attributes stored for
# a particular Node.
class _Attribute(object):
def __init__(self, name, value):
"""illustrates a quick and dirty way to persist an XML document expressed using ElementTree and pickle.
-This is a trivial example using PickleType to marshal/unmarshal the ElementTree
+This is a trivial example using PickleType to marshal/unmarshal the ElementTree
document into a binary column. Compare to explicit.py which stores the individual components of the ElementTree
structure in distinct rows using two additional mapped entities. Note that the usage of both
styles of persistence are identical, as is the structure of the main Document class.
"""
-Illustrates various methods of associating multiple types of
+Illustrates various methods of associating multiple types of
parents with a particular child object.
-The examples all use the declarative extension along with
+The examples all use the declarative extension along with
declarative mixins. Each one presents the identical use
case at the end - two classes, ``Customer`` and ``Supplier``, both
subclassing the ``HasAddresses`` mixin, which ensures that the
table is used so that traditional foreign key constraints may be used.
This configuration has the advantage that a fixed set of tables
-are used, with no extra-table-per-parent needed. The individual
-Address record can also locate its parent with no need to scan
+are used, with no extra-table-per-parent needed. The individual
+Address record can also locate its parent with no need to scan
amongst many tables.
"""
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
-
+
"""
@declared_attr
def __tablename__(cls):
class AddressAssociation(Base):
"""Associates a collection of Address objects
with a particular parent.
-
+
"""
__tablename__ = "address_association"
@classmethod
def creator(cls, discriminator):
- """Provide a 'creator' function to use with
+ """Provide a 'creator' function to use with
the association proxy."""
return lambda addresses:AddressAssociation(
- addresses=addresses,
+ addresses=addresses,
discriminator=discriminator)
discriminator = Column(String)
return getattr(self, "%s_parent" % self.discriminator)
class Address(Base):
- """The Address class.
-
- This represents all address records in a
+ """The Address class.
+
+ This represents all address records in a
single table.
-
+
"""
- association_id = Column(Integer,
+ association_id = Column(Integer,
ForeignKey("address_association.id")
)
street = Column(String)
city = Column(String)
zip = Column(String)
association = relationship(
- "AddressAssociation",
+ "AddressAssociation",
backref="addresses")
parent = association_proxy("association", "parent")
def __repr__(self):
return "%s(street=%r, city=%r, zip=%r)" % \
- (self.__class__.__name__, self.street,
+ (self.__class__.__name__, self.street,
self.city, self.zip)
class HasAddresses(object):
"""HasAddresses mixin, creates a relationship to
the address_association table for each parent.
-
+
"""
@declared_attr
def address_association_id(cls):
- return Column(Integer,
+ return Column(Integer,
ForeignKey("address_association.id"))
@declared_attr
"address_association", "addresses",
creator=AddressAssociation.creator(discriminator)
)
- return relationship("AddressAssociation",
- backref=backref("%s_parent" % discriminator,
+ return relationship("AddressAssociation",
+ backref=backref("%s_parent" % discriminator,
uselist=False))
session.add_all([
Customer(
- name='customer 1',
+ name='customer 1',
addresses=[
Address(
street='123 anywhere street',
This configuration has the advantage that all Address
rows are in one table, so that the definition of "Address"
-can be maintained in one place. The association table
+can be maintained in one place. The association table
contains the foreign key to Address so that Address
has no dependency on the system.
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
-
+
"""
@declared_attr
def __tablename__(cls):
Base = declarative_base(cls=Base)
class Address(Base):
- """The Address class.
-
- This represents all address records in a
+ """The Address class.
+
+ This represents all address records in a
single table.
-
+
"""
street = Column(String)
city = Column(String)
def __repr__(self):
return "%s(street=%r, city=%r, zip=%r)" % \
- (self.__class__.__name__, self.street,
+ (self.__class__.__name__, self.street,
self.city, self.zip)
class HasAddresses(object):
"""HasAddresses mixin, creates a new address_association
table for each parent.
-
+
"""
@declared_attr
def addresses(cls):
address_association = Table(
"%s_addresses" % cls.__tablename__,
cls.metadata,
- Column("address_id", ForeignKey("address.id"),
+ Column("address_id", ForeignKey("address.id"),
primary_key=True),
- Column("%s_id" % cls.__tablename__,
- ForeignKey("%s.id" % cls.__tablename__),
+ Column("%s_id" % cls.__tablename__,
+ ForeignKey("%s.id" % cls.__tablename__),
primary_key=True),
)
return relationship(Address, secondary=address_association)
session.add_all([
Customer(
- name='customer 1',
+ name='customer 1',
addresses=[
Address(
street='123 anywhere street',
class Base(object):
"""Base class which provides automated table name
and surrogate primary key column.
-
+
"""
@declared_attr
def __tablename__(cls):
Base = declarative_base(cls=Base)
class Address(object):
- """Define columns that will be present in each
+ """Define columns that will be present in each
'Address' table.
-
+
This is a declarative mixin, so additional mapped
attributes beyond simple columns specified here
should be set up using @declared_attr.
-
+
"""
street = Column(String)
city = Column(String)
def __repr__(self):
return "%s(street=%r, city=%r, zip=%r)" % \
- (self.__class__.__name__, self.street,
+ (self.__class__.__name__, self.street,
self.city, self.zip)
class HasAddresses(object):
"""HasAddresses mixin, creates a new Address class
for each parent.
-
+
"""
@declared_attr
def addresses(cls):
"%sAddress" % cls.__name__,
(Address, Base,),
dict(
- __tablename__ = "%s_address" %
+ __tablename__ = "%s_address" %
cls.__tablename__,
- parent_id = Column(Integer,
+ parent_id = Column(Integer,
ForeignKey("%s.id" % cls.__tablename__)),
parent = relationship(cls)
)
session.add_all([
Customer(
- name='customer 1',
+ name='customer 1',
addresses=[
Customer.Address(
street='123 anywhere street',
class Edge(Base):
__tablename__ = 'edge'
- lower_id = Column(Integer,
- ForeignKey('node.node_id'),
+ lower_id = Column(Integer,
+ ForeignKey('node.node_id'),
primary_key=True)
- higher_id = Column(Integer,
- ForeignKey('node.node_id'),
+ higher_id = Column(Integer,
+ ForeignKey('node.node_id'),
primary_key=True)
lower_node = relationship(Node,
- primaryjoin=lower_id==Node.node_id,
+ primaryjoin=lower_id==Node.node_id,
backref='lower_edges')
higher_node = relationship(Node,
- primaryjoin=higher_id==Node.node_id,
+ primaryjoin=higher_id==Node.node_id,
backref='higher_edges')
# here we have lower.node_id <= higher.node_id
metadata = MetaData()
-managers_table = Table('managers', metadata,
+managers_table = Table('managers', metadata,
Column('employee_id', Integer, primary_key=True),
Column('name', String(50)),
Column('manager_data', String(40))
)
-engineers_table = Table('engineers', metadata,
+engineers_table = Table('engineers', metadata,
Column('employee_id', Integer, primary_key=True),
Column('name', String(50)),
Column('engineer_info', String(40))
id = Column(Integer, primary_key=True)
name = Column(String(50))
- employees = relationship("Person",
+ employees = relationship("Person",
backref='company',
cascade='all, delete-orphan')
def __repr__(self):
return "Engineer %s, status %s, engineer_name %s, "\
"primary_language %s" % \
- (self.name, self.status,
+ (self.name, self.status,
self.engineer_name, self.primary_language)
class Manager(Person):
c = Company(name='company1', employees=[
Manager(
- name='pointy haired boss',
+ name='pointy haired boss',
status='AAB',
manager_name='manager1'),
- Engineer(name='dilbert',
+ Engineer(name='dilbert',
status='BBA',
- engineer_name='engineer1',
+ engineer_name='engineer1',
primary_language='java'),
Person(name='joesmith'),
- Engineer(name='wally',
+ Engineer(name='wally',
status='CGG',
- engineer_name='engineer2',
+ engineer_name='engineer2',
primary_language='python'),
- Manager(name='jsmith',
+ Manager(name='jsmith',
status='ABA',
manager_name='manager2')
])
for e in c.employees:
print e
-# query using with_polymorphic.
+# query using with_polymorphic.
eng_manager = with_polymorphic(Person, [Engineer, Manager], aliased=True)
print session.query(eng_manager).\
filter(
eng_manager,
Company.employees
).filter(
- or_(eng_manager.Engineer.engineer_name=='engineer1',
+ or_(eng_manager.Engineer.engineer_name=='engineer1',
eng_manager.Manager.manager_name=='manager2')
).all()
metadata = MetaData()
# a table to store companies
-companies = Table('companies', metadata,
+companies = Table('companies', metadata,
Column('company_id', Integer, primary_key=True),
Column('name', String(50)))
-employees_table = Table('employees', metadata,
+employees_table = Table('employees', metadata,
Column('employee_id', Integer, primary_key=True),
Column('company_id', Integer, ForeignKey('companies.company_id')),
Column('name', String(50)),
def __repr__(self):
return "Engineer %s, status %s, engineer_name %s, "\
"primary_language %s" % \
- (self.name, self.status,
+ (self.name, self.status,
self.engineer_name, self.primary_language)
class Manager(Person):
def __repr__(self):
class Employee(Base):
__tablename__ = 'personnel'
__mapper_args__ = {
- 'extension':NestedSetExtension(),
+ 'extension':NestedSetExtension(),
'batch':False # allows extension to fire for each instance before going to the next.
}
class TextualGisElement(GisElement, expression.Function):
"""Represents a Geometry value as expressed within application code; i.e. in wkt format.
- Extends expression.Function so that the value is interpreted as
+ Extends expression.Function so that the value is interpreted as
GeomFromText(value) in a SQL expression context.
"""
return value
return process
-# other datatypes can be added as needed, which
+# other datatypes can be added as needed, which
# currently only affect DDL statements.
class Point(Geometry):
# DDL integration
class GISDDL(object):
- """A DDL extension which integrates SQLAlchemy table create/drop
+ """A DDL extension which integrates SQLAlchemy table create/drop
methods with PostGis' AddGeometryColumn/DropGeometryColumn functions.
Usage::
class GisAttribute(AttributeExtension):
- """Intercepts 'set' events on a mapped instance attribute and
+ """Intercepts 'set' events on a mapped instance attribute and
converts the incoming value to a GIS expression.
"""
"""
return column_property(
- Column(*args, **kw),
- extension=GisAttribute(),
+ Column(*args, **kw),
+ extension=GisAttribute(),
comparator_factory=GisComparator
)
* a function which can return a list of shard ids which apply to a particular
instance identifier; this is called "id_chooser". If it returns all shard ids,
all shards will be searched.
-* a function which can return a list of shard ids to try, given a particular
- Query ("query_chooser"). If it returns all shard ids, all shards will be
+* a function which can return a list of shard ids to try, given a particular
+ Query ("query_chooser"). If it returns all shard ids, all shards will be
queried and the results joined together.
In this example, four sqlite databases will store information about weather
The construction of generic sharding routines is an ambitious approach
to the issue of organizing instances among multiple databases. For a
-more plain-spoken alternative, the "distinct entity" approach
+more plain-spoken alternative, the "distinct entity" approach
is a simple method of assigning objects to different tables (and potentially
-database nodes) in an explicit way - described on the wiki at
+database nodes) in an explicit way - described on the wiki at
`EntityName <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/EntityName>`_.
"""
"""copy of ComparableEntity and eq_() from test.lib.
This is just to support running the example outside of
-the SQLA testing environment which is no longer part of
+the SQLA testing environment which is no longer part of
SQLAlchemy as of 0.7.
"""
versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
m = mapper(
- versioned_cls,
- table,
- inherits=super_history_mapper,
+ versioned_cls,
+ table,
+ inherits=super_history_mapper,
polymorphic_on=polymorphic_on,
polymorphic_identity=local_mapper.polymorphic_identity
)
try:
prop = obj_mapper.get_property_by_column(obj_col)
except UnmappedColumnError:
- # in the case of single table inheritance, there may be
+ # in the case of single table inheritance, there may be
# columns on the mapped table intended for the subclass only.
- # the "unmapped" status of the subclass column on the
+ # the "unmapped" status of the subclass column on the
# base class is a feature of the declarative module as of sqla 0.5.2.
continue
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
[
- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+ SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+ BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1)
]
)
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
+ SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+ BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+ SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
]
)
eq_(
sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
[
- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
- BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2),
- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
+ SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
+ BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
+ BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2),
+ SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
]
)
try:
import pkg_resources
except ImportError:
- return do_download()
+ return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict, e:
return False
def _get_server_version_info(self, connection):
- # eGenix suggests using conn.dbms_version instead
+ # eGenix suggests using conn.dbms_version instead
# of what we're doing here
dbapi_con = connection.connection
version = []
if 'port' in keys and not 'port' in query:
port = ',%d' % int(keys.pop('port'))
- connectors = ["DRIVER={%s}" %
+ connectors = ["DRIVER={%s}" %
keys.pop('driver', self.pyodbc_driver_name),
'Server=%s%s' % (keys.pop('host', ''), port),
'Database=%s' % keys.pop('database', '') ]
connectors.append("Trusted_Connection=Yes")
# if set to 'Yes', the ODBC layer will try to automagically
- # convert textual data from your database encoding to your
- # client encoding. This should obviously be set to 'No' if
- # you query a cp1253 encoded database from a latin1 client...
+ # convert textual data from your database encoding to your
+ # client encoding. This should obviously be set to 'No' if
+ # you query a cp1253 encoded database from a latin1 client...
if 'odbc_autotranslate' in keys:
connectors.append("AutoTranslate=%s" %
keys.pop("odbc_autotranslate"))
if self._user_supports_unicode_binds is not None:
self.supports_unicode_binds = self._user_supports_unicode_binds
else:
- self.supports_unicode_binds = (not self.freetds or
+ self.supports_unicode_binds = (not self.freetds or
self.freetds_driver_version >= '0.91'
) and not self.easysoft
# end Py2K
def _create_jdbc_url(self, url):
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
- url.port is not None
+ url.port is not None
and ':%s' % url.port or '',
url.database)
opts = self._driver_kwargs()
opts.update(url.query)
return [
- [self._create_jdbc_url(url),
- url.username, url.password,
+ [self._create_jdbc_url(url),
+ url.username, url.password,
self.jdbc_driver_name],
opts]
def _auto_fn(name):
"""default dialect importer.
-
+
plugs into the :class:`.PluginLoader`
as a first-hit system.
-
+
"""
if "." in name:
dialect, driver = name.split(".")
.. note::
- The Access dialect is **non-functional as of SQLAlchemy 0.6**,
+ The Access dialect is **non-functional as of SQLAlchemy 0.6**,
pending development efforts to bring it up-to-date.
# self._last_inserted_ids[0] is None:
self.cursor.execute("SELECT @@identity AS lastrowid")
row = self.cursor.fetchone()
- self._last_inserted_ids = [int(row[0])]
+ self._last_inserted_ids = [int(row[0])]
#+ self._last_inserted_ids[1:]
# print "LAST ROW ID", self._last_inserted_ids
colargs = \
{
- 'nullable': not(col.Required or
+ 'nullable': not(col.Required or
col.Attributes & const.dbAutoIncrField),
}
default = col.DefaultValue
if isinstance(thecol.type, AcInteger) and \
not (thecol.default and
isinstance(
- thecol.default.arg,
+ thecol.default.arg,
schema.Sequence
)):
thecol.autoincrement = False
# This is necessary, so we get the latest updates
dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
- names = [t.Name for t in dtbs.TableDefs
+ names = [t.Name for t in dtbs.TableDefs
if t.Name[:4] != "MSys" and t.Name[:4] != "~TMP"]
dtbs.Close()
return names
'length': 'len',
}
def visit_function(self, func):
- """Access function names differ from the ANSI SQL names;
+ """Access function names differ from the ANSI SQL names;
rewrite common ones"""
func.name = self.function_rewrites.get(func.name, func.name)
return super(AccessCompiler, self).visit_function(func)
dialect
__all__ = (
- 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME',
+ 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME',
'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB',
'dialect'
)
.. note::
The Informix dialect functions on current SQLAlchemy versions
- but is not regularly tested, and may have many issues and
+ but is not regularly tested, and may have many issues and
caveats not currently handled.
"""
c = connection.execute(
"""select t1.constrname as cons_name,
t4.colname as local_column, t7.tabname as remote_table,
- t6.colname as remote_column, t7.owner as remote_owner
+ t6.colname as remote_column, t7.owner as remote_owner
from sysconstraints as t1 , systables as t2 ,
sysindexes as t3 , syscolumns as t4 ,
sysreferences as t5 , syscolumns as t6 , systables as t7 ,
and t3.tabid = t2.tabid and t3.idxname = t1.idxname
and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
- t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
+ t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
and t5.constrid = t1.constrid and t8.constrid = t5.primary
and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
# Select the column positions from sysindexes for sysconstraints
data = connection.execute(
- """select t2.*
+ """select t2.*
from systables as t1, sysindexes as t2, sysconstraints as t3
where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
and t2.idxname=t3.idxname and t3.constrtype='P'""",
c = connection.execute(
"""select t1.colname
from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
+ where t2.tabname=? and t1.tabid = t2.tabid and
t1.colno in (%s)""" % place_holder,
table_name, *colpositions
).fetchall()
c = connection.execute(
"""select t1.colname
from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
+ where t2.tabname=? and t1.tabid = t2.tabid and
t1.colno in (%s)""" % place_holder,
table_name, *colnames
).fetchall()
.. note::
- The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**,
+ The MaxDB dialect is **non-functional as of SQLAlchemy 0.6**,
pending development efforts to bring it up-to-date.
Overview
value[20:])])
else:
raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." %
+ "datetimeformat '%s' is not supported." %
dialect.datetimeformat)
return process
if value is None:
return None
else:
- return datetime.date(int(value[0:4]), int(value[4:6]),
+ return datetime.date(int(value[0:4]), int(value[4:6]),
int(value[6:8]))
elif dialect.datetimeformat == 'iso':
def process(value):
if value is None:
return None
else:
- return datetime.date(int(value[0:4]), int(value[5:7]),
+ return datetime.date(int(value[0:4]), int(value[5:7]),
int(value[8:10]))
else:
raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." %
+ "datetimeformat '%s' is not supported." %
dialect.datetimeformat)
return process
if value is None:
return None
else:
- return datetime.time(int(value[0:4]), int(value[4:6]),
+ return datetime.time(int(value[0:4]), int(value[4:6]),
int(value[6:8]))
elif dialect.datetimeformat == 'iso':
def process(value):
int(value[8:10]))
else:
raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." %
+ "datetimeformat '%s' is not supported." %
dialect.datetimeformat)
return process
__all__ = (
- 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR',
+ 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR',
'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME',
- 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME',
+ 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME',
'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP',
'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect'
)
\ No newline at end of file
class MSDateTime_adodbapi(MSDateTime):
def result_processor(self, dialect, coltype):
def process(value):
- # adodbapi will return datetimes with empty time
+ # adodbapi will return datetimes with empty time
# values as datetime.date() objects.
# Promote them back to full datetime.datetime()
if type(value) is datetime.date:
connectors = ["Provider=SQLOLEDB"]
if 'port' in keys:
- connectors.append ("Data Source=%s, %s" %
+ connectors.append ("Data Source=%s, %s" %
(keys.get("host"), keys.get("port")))
else:
connectors.append ("Data Source=%s" % keys.get("host"))
mssql+pymssql://<username>:<password>@<freetds_name>
Adding "?charset=utf8" or similar will cause pymssql to return
-strings as Python unicode objects. This can potentially improve
-performance in some scenarios as decoding of strings is
+strings as Python unicode objects. This can potentially improve
+performance in some scenarios as decoding of strings is
handled natively.
Limitations
dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english
-* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection
+* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection
that would appear like::
DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass
* ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection
string which includes the port
- information using the comma syntax. This will create the following
+ information using the comma syntax. This will create the following
connection string::
DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass
Unicode Binds
^^^^^^^^^^^^^
-The current state of PyODBC on a unix backend with FreeTDS and/or
+The current state of PyODBC on a unix backend with FreeTDS and/or
EasySoft is poor regarding unicode; different OS platforms and versions of UnixODBC
-versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically
+versus IODBC versus FreeTDS/EasySoft versus PyODBC itself dramatically
alter how strings are received. The PyODBC dialect attempts to use all the information
it knows to determine whether or not a Python unicode literal can be
passed directly to the PyODBC driver or not; while SQLAlchemy can encode
bytestrings for certain encodings and requires a Python unicode object,
while the author has observed widespread cases where a Python unicode
is completely misinterpreted by PyODBC, particularly when dealing with
-the information schema tables used in table reflection, and the value
+the information schema tables used in table reflection, and the value
must first be encoded to a bytestring.
It is for this reason that whether or not unicode literals for bound
-parameters be sent to PyODBC can be controlled using the
-``supports_unicode_binds`` parameter to ``create_engine()``. When
-left at its default of ``None``, the PyODBC dialect will use its
+parameters be sent to PyODBC can be controlled using the
+``supports_unicode_binds`` parameter to ``create_engine()``. When
+left at its default of ``None``, the PyODBC dialect will use its
best guess as to whether or not the driver deals with unicode literals
well. When ``False``, unicode literals will be encoded first, and when
``True`` unicode literals will be passed straight through. This is an interim
super(MSExecutionContext_pyodbc, self).pre_exec()
- # don't embed the scope_identity select into an
+ # don't embed the scope_identity select into an
# "INSERT .. DEFAULT VALUES"
if self._select_lastrowid and \
self.dialect.use_scope_identity and \
def post_exec(self):
if self._embedded_scope_identity:
# Fetch the last inserted id from the manipulated statement
- # We may have to skip over a number of result sets with
+ # We may have to skip over a number of result sets with
# no data (due to triggers, etc.)
while True:
try:
- # fetchall() ensures the cursor is consumed
+ # fetchall() ensures the cursor is consumed
# without closing it (FreeTDS particularly)
row = self.cursor.fetchall()[0]
break
def _get_server_version_info(self, connection):
return tuple(
- int(x)
+ int(x)
for x in connection.connection.dbversion.split('.')
)
-------------------
MySQL features an automatic connection close behavior, for connections that have
-been idle for eight hours or more. To circumvent having this issue, use the
+been idle for eight hours or more. To circumvent having this issue, use the
``pool_recycle`` option which controls the maximum age of any connection::
engine = create_engine('mysql+mysqldb://...', pool_recycle=3600)
Transaction Isolation Level
---------------------------
-:func:`.create_engine` accepts an ``isolation_level``
-parameter which results in the command ``SET SESSION
-TRANSACTION ISOLATION LEVEL <level>`` being invoked for
+:func:`.create_engine` accepts an ``isolation_level``
+parameter which results in the command ``SET SESSION
+TRANSACTION ISOLATION LEVEL <level>`` being invoked for
every new connection. Valid values for this parameter are
-``READ COMMITTED``, ``READ UNCOMMITTED``,
+``READ COMMITTED``, ``READ UNCOMMITTED``,
``REPEATABLE READ``, and ``SERIALIZABLE``::
engine = create_engine(
- "mysql://scott:tiger@localhost/test",
+ "mysql://scott:tiger@localhost/test",
isolation_level="READ UNCOMMITTED"
)
This is in contradiction to the default setting on most MySQL DBAPI drivers,
which is "number of rows actually modified/deleted". For this reason, the
SQLAlchemy MySQL dialects always set the ``constants.CLIENT.FOUND_ROWS`` flag,
-or whatever is equivalent for the DBAPI in use, on connect, unless the flag value
+or whatever is equivalent for the DBAPI in use, on connect, unless the flag value
is overridden using DBAPI-specific options
(such as ``client_flag`` for the MySQL-Python driver, ``found_rows`` for the
OurSQL driver).
~~~~~~~~~~~~~
Some MySQL storage engines permit you to specify an index type when creating
-an index or primary key constraint. SQLAlchemy provides this feature via the
+an index or primary key constraint. SQLAlchemy provides this feature via the
``mysql_using`` parameter on :class:`.Index`::
Index('my_index', my_table.c.data, mysql_using='hash')
PrimaryKeyConstraint("data", mysql_using='hash')
The value passed to the keyword argument will be simply passed through to the
-underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index
+underlying CREATE INDEX or PRIMARY KEY clause, so it *must* be a valid index
type for your MySQL storage engine.
More information can be found at:
def get_select_precolumns(self, select):
"""Add special MySQL keywords in place of DISTINCT.
-
- .. note::
-
+
+ .. note::
+
this usage is deprecated. :meth:`.Select.prefix_with`
should be used for special keywords at the start
of a SELECT.
-
+
"""
if isinstance(select._distinct, basestring):
return select._distinct.upper() + " "
if limit is None:
# hardwire the upper limit. Currently
# needed by OurSQL with Python 3
- # (https://bugs.launchpad.net/oursql/+bug/686232),
+ # (https://bugs.launchpad.net/oursql/+bug/686232),
# but also is consistent with the usage of the upper
# bound as part of MySQL's "syntax" for OFFSET with
# no LIMIT
return ' \n LIMIT %s, %s' % (
- self.process(sql.literal(offset)),
+ self.process(sql.literal(offset)),
"18446744073709551615")
else:
return ' \n LIMIT %s, %s' % (
- self.process(sql.literal(offset)),
+ self.process(sql.literal(offset)),
self.process(sql.literal(limit)))
else:
# No offset provided, so just use the limit
return None
def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw):
- return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw)
+ return ', '.join(t._compiler_dispatch(self, asfrom=True, **kw)
for t in [from_table] + list(extra_froms))
- def update_from_clause(self, update_stmt, from_table,
+ def update_from_clause(self, update_stmt, from_table,
extra_froms, from_hints, **kw):
return None
constraint_string += "KEY %s (%s)" % (
self.preparer.quote(
"idx_autoinc_%s" % auto_inc_column.name, None
- ),
+ ),
self.preparer.format_column(auto_inc_column)
)
opts = dict(
(
- k[len(self.dialect.name)+1:].upper(),
+ k[len(self.dialect.name)+1:].upper(),
v
)
for k, v in table.kwargs.items()
arg = "'%s'" % arg.replace("\\", "\\\\").replace("'", "''")
if opt in ('DATA_DIRECTORY', 'INDEX_DIRECTORY',
- 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET',
+ 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET',
'DEFAULT_CHARSET',
'DEFAULT_COLLATE'):
opt = opt.replace('_', ' ')
table = preparer.format_table(index.table)
columns = [preparer.quote(c.name, c.quote) for c in index.columns]
name = preparer.quote(
- self._index_identifier(index.name),
+ self._index_identifier(index.name),
index.quote)
text = "CREATE "
if type_.precision is None:
return self._extend_numeric(type_, "NUMERIC")
elif type_.scale is None:
- return self._extend_numeric(type_,
- "NUMERIC(%(precision)s)" %
+ return self._extend_numeric(type_,
+ "NUMERIC(%(precision)s)" %
{'precision': type_.precision})
else:
- return self._extend_numeric(type_,
- "NUMERIC(%(precision)s, %(scale)s)" %
+ return self._extend_numeric(type_,
+ "NUMERIC(%(precision)s, %(scale)s)" %
{'precision': type_.precision, 'scale' : type_.scale})
def visit_DECIMAL(self, type_):
if type_.precision is None:
return self._extend_numeric(type_, "DECIMAL")
elif type_.scale is None:
- return self._extend_numeric(type_,
- "DECIMAL(%(precision)s)" %
+ return self._extend_numeric(type_,
+ "DECIMAL(%(precision)s)" %
{'precision': type_.precision})
else:
- return self._extend_numeric(type_,
- "DECIMAL(%(precision)s, %(scale)s)" %
+ return self._extend_numeric(type_,
+ "DECIMAL(%(precision)s, %(scale)s)" %
{'precision': type_.precision, 'scale' : type_.scale})
def visit_DOUBLE(self, type_):
if self._mysql_type(type_) and \
type_.scale is not None and \
type_.precision is not None:
- return self._extend_numeric(type_,
+ return self._extend_numeric(type_,
"FLOAT(%s, %s)" % (type_.precision, type_.scale))
elif type_.precision is not None:
return self._extend_numeric(type_, "FLOAT(%s)" % (type_.precision,))
def visit_INTEGER(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "INTEGER(%(display_width)s)" %
+ return self._extend_numeric(type_,
+ "INTEGER(%(display_width)s)" %
{'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "INTEGER")
def visit_BIGINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "BIGINT(%(display_width)s)" %
+ return self._extend_numeric(type_,
+ "BIGINT(%(display_width)s)" %
{'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "BIGINT")
def visit_MEDIUMINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "MEDIUMINT(%(display_width)s)" %
+ return self._extend_numeric(type_,
+ "MEDIUMINT(%(display_width)s)" %
{'display_width': type_.display_width})
else:
return self._extend_numeric(type_, "MEDIUMINT")
def visit_SMALLINT(self, type_):
if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "SMALLINT(%(display_width)s)" %
+ return self._extend_numeric(type_,
+ "SMALLINT(%(display_width)s)" %
{'display_width': type_.display_width}
)
else:
return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length)
else:
raise exc.CompileError(
- "VARCHAR requires a length on dialect %s" %
+ "VARCHAR requires a length on dialect %s" %
self.dialect.name)
def visit_CHAR(self, type_):
return self._extend_string(type_, {'national':True}, "VARCHAR(%(length)s)" % {'length': type_.length})
else:
raise exc.CompileError(
- "NVARCHAR requires a length on dialect %s" %
+ "NVARCHAR requires a length on dialect %s" %
self.dialect.name)
def visit_NCHAR(self, type_):
quote = '"'
super(MySQLIdentifierPreparer, self).__init__(
- dialect,
- initial_quote=quote,
+ dialect,
+ initial_quote=quote,
escape_quote=quote)
def _quote_free_identifiers(self, *ids):
preparer = MySQLIdentifierPreparer
# default SQL compilation settings -
- # these are modified upon initialize(),
+ # these are modified upon initialize(),
# i.e. first connect
_backslash_escapes = True
_server_ansiquotes = False
else:
return None
- _isolation_lookup = set(['SERIALIZABLE',
+ _isolation_lookup = set(['SERIALIZABLE',
'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ'])
def set_isolation_level(self, connection, level):
if level not in self._isolation_lookup:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
+ "Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
)
cursor = connection.cursor()
return self._extract_error_code(e) in \
(2006, 2013, 2014, 2045, 2055)
elif isinstance(e, self.dbapi.InterfaceError):
- # if underlying connection is closed,
+ # if underlying connection is closed,
# this is the error you get
return "(0, '')" in str(e)
else:
def _parsed_state_or_create(self, connection, table_name, schema=None, **kw):
return self._setup_parser(
- connection,
- table_name,
- schema,
+ connection,
+ table_name,
+ schema,
info_cache=kw.get('info_cache', None)
)
def _tabledef_parser(self):
"""return the MySQLTableDefinitionParser, generate if needed.
- The deferred creation ensures that the dialect has
+ The deferred creation ensures that the dialect has
retrieved server version information first.
"""
``use_unicode=1`` parameter, or the ``charset`` parameter,
is passed as a connection argument.
-Without this setting, many MySQL server installations default to
+Without this setting, many MySQL server installations default to
a ``latin1`` encoding for client connections, which has the effect
-of all data being converted into ``latin1``, even if you have ``utf8``
+of all data being converted into ``latin1``, even if you have ``utf8``
or another character set configured on your tables
and columns. With versions 4.1 and higher, you can change the connection
character set either through server configuration or by including the
``charset`` parameter. The ``charset``
-parameter as received by MySQL-Python also has the side-effect of
+parameter as received by MySQL-Python also has the side-effect of
enabling ``use_unicode=1``::
# set client encoding to utf8; all strings come back as unicode
create_engine('mysql+mysqldb:///mydb?charset=utf8')
-Manually configuring ``use_unicode=0`` will cause MySQL-python to
+Manually configuring ``use_unicode=0`` will cause MySQL-python to
return encoded strings::
# set client encoding to utf8; all strings come back as utf8 str
from sqlalchemy.dialects.mysql.base import (MySQLDialect, MySQLExecutionContext,
MySQLCompiler, MySQLIdentifierPreparer)
from sqlalchemy.connectors.mysqldb import (
- MySQLDBExecutionContext,
- MySQLDBCompiler,
- MySQLDBIdentifierPreparer,
+ MySQLDBExecutionContext,
+ MySQLDBCompiler,
+ MySQLDBIdentifierPreparer,
MySQLDBConnector
)
arg = "'%s'" % arg
connection.execution_options(_oursql_plain_query=True).execute(query % arg)
- # Because mysql is bad, these methods have to be
+ # Because mysql is bad, these methods have to be
# reimplemented to use _PlainQuery. Basically, some queries
- # refuse to return any data if they're run through
+ # refuse to return any data if they're run through
# the parameterized query API, or refuse to be parameterized
# in the first place.
def do_begin_twophase(self, connection, xid):
# Q: why didn't we need all these "plain_query" overrides earlier ?
# am i on a newer/older version of OurSQL ?
def has_table(self, connection, table_name, schema=None):
- return MySQLDialect.has_table(self,
+ return MySQLDialect.has_table(self,
connection.connect().\
execution_options(_oursql_plain_query=True),
table_name, schema)
def initialize(self, connection):
return MySQLDialect.initialize(
- self,
+ self,
connection.execution_options(_oursql_plain_query=True)
)
opts.setdefault('found_rows', True)
ssl = {}
- for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
+ for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
'ssl_capath', 'ssl_cipher']:
if key in opts:
ssl[key[4:]] = opts[key]
Limitations
-----------
-The mysql-pyodbc dialect is subject to unresolved character encoding issues
+The mysql-pyodbc dialect is subject to unresolved character encoding issues
which exist within the current ODBC drivers available.
(see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage
of OurSQL, MySQLdb, or MySQL-connector/Python.
Driver
------
-The Oracle dialect uses the cx_oracle driver, available at
-http://cx-oracle.sourceforge.net/ . The dialect has several behaviors
+The Oracle dialect uses the cx_oracle driver, available at
+http://cx-oracle.sourceforge.net/ . The dialect has several behaviors
which are specifically tailored towards compatibility with this module.
Version 5.0 or greater is **strongly** recommended, as SQLAlchemy makes
-extensive use of the cx_oracle output converters for numeric and
+extensive use of the cx_oracle output converters for numeric and
string conversions.
Connecting
----------
-Connecting with create_engine() uses the standard URL approach of
-``oracle://user:pass@host:port/dbname[?key=value&key=value...]``. If dbname is present, the
-host, port, and dbname tokens are converted to a TNS name using the cx_oracle
+Connecting with create_engine() uses the standard URL approach of
+``oracle://user:pass@host:port/dbname[?key=value&key=value...]``. If dbname is present, the
+host, port, and dbname tokens are converted to a TNS name using the cx_oracle
:func:`makedsn()` function. Otherwise, the host token is taken directly as a TNS name.
Additional arguments which may be specified either as query string arguments on the
Generally, the ``NLS_LANG`` environment variable determines the nature
of the encoding to be used.
-Note that this behavior is disabled when Oracle 8 is detected, as it has been
+Note that this behavior is disabled when Oracle 8 is detected, as it has been
observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8.
LOB Objects
Two Phase Transaction Support
-----------------------------
-Two Phase transactions are implemented using XA transactions. Success has been reported
+Two Phase transactions are implemented using XA transactions. Success has been reported
with this feature but it should be regarded as experimental.
Precision Numerics
can be disabled by passing the flag ``coerce_to_decimal=False``
to :func:`.create_engine`::
- engine = create_engine("oracle+cx_oracle://dsn",
+ engine = create_engine("oracle+cx_oracle://dsn",
coerce_to_decimal=False)
.. versionadded:: 0.7.6
Add the ``coerce_to_decimal`` flag.
-Another alternative to performance is to use the
-`cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library;
+Another alternative to performance is to use the
+`cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library;
see :class:`.Numeric` for additional notes.
The handler attempts to use the "precision" and "scale"
def result_processor(self, dialect, coltype):
# we apply a cx_oracle type handler to all connections
# that converts floating point strings to Decimal().
- # However, in some subquery situations, Oracle doesn't
+ # However, in some subquery situations, Oracle doesn't
# give us enough information to determine int or Decimal.
# It could even be int/Decimal differently on each row,
# regardless of the scale given for the originating type.
else:
return None
else:
- # cx_oracle 4 behavior, will assume
+ # cx_oracle 4 behavior, will assume
# floats
return super(_OracleNumeric, self).\
result_processor(dialect, coltype)
# end Py2K
# we apply a connection output handler that returns
- # unicode in all cases, so the "native_unicode" flag
+ # unicode in all cases, so the "native_unicode" flag
# will be set for the default String.result_processor.
class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR):
getattr(self.compiled, '_quoted_bind_names', None)
if quoted_bind_names:
if not self.dialect.supports_unicode_statements:
- # if DBAPI doesn't accept unicode statements,
+ # if DBAPI doesn't accept unicode statements,
# keys in self.parameters would have been encoded
# here. so convert names in quoted_bind_names
# to encoded as well.
quoted_bind_names = \
dict(
- (fromname.encode(self.dialect.encoding),
- toname.encode(self.dialect.encoding))
- for fromname, toname in
+ (fromname.encode(self.dialect.encoding),
+ toname.encode(self.dialect.encoding))
+ for fromname, toname in
quoted_bind_names.items()
)
for param in self.parameters:
del param[fromname]
if self.dialect.auto_setinputsizes:
- # cx_oracle really has issues when you setinputsizes
+ # cx_oracle really has issues when you setinputsizes
# on String, including that outparams/RETURNING
# breaks for varchars
- self.set_input_sizes(quoted_bind_names,
+ self.set_input_sizes(quoted_bind_names,
exclude_types=self.dialect._cx_oracle_string_types
)
def get_result_proxy(self):
if hasattr(self, 'out_parameters') and self.compiled.returning:
returning_params = dict(
- (k, v.getvalue())
+ (k, v.getvalue())
for k, v in self.out_parameters.items()
)
return ReturningResultProxy(self, returning_params)
impl_type = type.dialect_impl(self.dialect)
dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi)
result_processor = impl_type.\
- result_processor(self.dialect,
+ result_processor(self.dialect,
dbapi_type)
if result_processor is not None:
out_parameters[name] = \
out_parameters[name] = self.out_parameters[name].getvalue()
else:
result.out_parameters = dict(
- (k, v.getvalue())
+ (k, v.getvalue())
for k, v in self.out_parameters.items()
)
class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle):
"""Support WITH_UNICODE in Python 2.xx.
- WITH_UNICODE allows cx_Oracle's Python 3 unicode handling
- behavior under Python 2.x. This mode in some cases disallows
- and in other cases silently passes corrupted data when
- non-Python-unicode strings (a.k.a. plain old Python strings)
- are passed as arguments to connect(), the statement sent to execute(),
+ WITH_UNICODE allows cx_Oracle's Python 3 unicode handling
+ behavior under Python 2.x. This mode in some cases disallows
+ and in other cases silently passes corrupted data when
+ non-Python-unicode strings (a.k.a. plain old Python strings)
+ are passed as arguments to connect(), the statement sent to execute(),
or any of the bind parameter keys or values sent to execute().
- This optional context therefore ensures that all statements are
+ This optional context therefore ensures that all statements are
passed as Python unicode objects.
"""
return ret
def _buffer_rows(self):
- return collections.deque([tuple(self._returning_params["ret_%d" % i]
+ return collections.deque([tuple(self._returning_params["ret_%d" % i]
for i, c in enumerate(self._returning_params))])
class OracleDialect_cx_oracle(OracleDialect):
execute_sequence_format = list
- def __init__(self,
- auto_setinputsizes=True,
- auto_convert_lobs=True,
- threaded=True,
- allow_twophase=True,
+ def __init__(self,
+ auto_setinputsizes=True,
+ auto_convert_lobs=True,
+ threaded=True,
+ allow_twophase=True,
coerce_to_decimal=True,
arraysize=50, **kwargs):
OracleDialect.__init__(self, **kwargs)
self._cx_oracle_string_types = types("STRING", "UNICODE", "NCLOB", "CLOB")
self._cx_oracle_unicode_types = types("UNICODE", "NCLOB")
- self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB")
+ self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB")
self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0)
self.supports_native_decimal = (
- self.cx_oracle_ver >= (5, 0) and
+ self.cx_oracle_ver >= (5, 0) and
coerce_to_decimal
)
self._detect_decimal_char(connection)
def _detect_decimal_char(self, connection):
- """detect if the decimal separator character is not '.', as
+ """detect if the decimal separator character is not '.', as
is the case with european locale settings for NLS_LANG.
cx_oracle itself uses similar logic when it formats Python
- Decimal objects to strings on the bind side (as of 5.0.3),
- as Oracle sends/receives string numerics only in the
+ Decimal objects to strings on the bind side (as of 5.0.3),
+ as Oracle sends/receives string numerics only in the
current locale.
"""
cx_Oracle = self.dbapi
conn = connection.connection
- # override the output_type_handler that's
- # on the cx_oracle connection with a plain
+ # override the output_type_handler that's
+ # on the cx_oracle connection with a plain
# one on the cursor
- def output_type_handler(cursor, name, defaultType,
+ def output_type_handler(cursor, name, defaultType,
size, precision, scale):
return cursor.var(
- cx_Oracle.STRING,
+ cx_Oracle.STRING,
255, arraysize=cursor.arraysize)
cursor = conn.cursor()
return
cx_Oracle = self.dbapi
- def output_type_handler(cursor, name, defaultType,
+ def output_type_handler(cursor, name, defaultType,
size, precision, scale):
# convert all NUMBER with precision + positive scale to Decimal
# this almost allows "native decimal" mode.
defaultType == cx_Oracle.NUMBER and \
precision and scale > 0:
return cursor.var(
- cx_Oracle.STRING,
- 255,
- outconverter=self._to_decimal,
+ cx_Oracle.STRING,
+ 255,
+ outconverter=self._to_decimal,
arraysize=cursor.arraysize)
# if NUMBER with zero precision and 0 or neg scale, this appears
- # to indicate "ambiguous". Use a slower converter that will
- # make a decision based on each value received - the type
+ # to indicate "ambiguous". Use a slower converter that will
+ # make a decision based on each value received - the type
# may change from row to row (!). This kills
# off "native decimal" mode, handlers still needed.
elif self.supports_native_decimal and \
defaultType == cx_Oracle.NUMBER \
and not precision and scale <= 0:
return cursor.var(
- cx_Oracle.STRING,
- 255,
- outconverter=self._detect_decimal,
+ cx_Oracle.STRING,
+ 255,
+ outconverter=self._detect_decimal,
arraysize=cursor.arraysize)
# allow all strings to come back natively as Unicode
elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
def _get_server_version_info(self, connection):
return tuple(
- int(x)
+ int(x)
for x in connection.connection.version.split('.')
)
DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect
__all__ = (
-'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET',
+'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET',
'CIDR', 'UUID', 'BIT', 'MACADDR', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME',
'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect'
)
Date and Time Types
-------------------
-SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide
+SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide
out of the box functionality for translating values between Python `datetime` objects
and a SQLite-supported format. SQLAlchemy's own :class:`~sqlalchemy.types.DateTime`
and related types provide date formatting and parsing functionality when SQlite is used.
This is regardless of the AUTOINCREMENT keyword being present or not.
To specifically render the AUTOINCREMENT keyword on the primary key
-column when rendering DDL, add the flag ``sqlite_autoincrement=True``
+column when rendering DDL, add the flag ``sqlite_autoincrement=True``
to the Table construct::
Table('sometable', metadata,
- Column('id', Integer, primary_key=True),
+ Column('id', Integer, primary_key=True),
sqlite_autoincrement=True)
Transaction Isolation Level
---------------------------
-:func:`.create_engine` accepts an ``isolation_level`` parameter which results in
-the command ``PRAGMA read_uncommitted <level>`` being invoked for every new
-connection. Valid values for this parameter are ``SERIALIZABLE`` and
+:func:`.create_engine` accepts an ``isolation_level`` parameter which results in
+the command ``PRAGMA read_uncommitted <level>`` being invoked for every new
+connection. Valid values for this parameter are ``SERIALIZABLE`` and
``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively.
See the section :ref:`pysqlite_serializable` for an important workaround
when using serializable isolation with Pysqlite.
---------------------------------------
Note that SQLite is not designed for a high level of concurrency. The database
-itself, being a file, is locked completely during write operations and within
+itself, being a file, is locked completely during write operations and within
transactions, meaning exactly one connection has exclusive access to the database
during this period - all other connections will be blocked during this time.
The Python DBAPI specification also calls for a connection model that is always
in a transaction; there is no BEGIN method, only commit and rollback. This implies
-that a SQLite DBAPI driver would technically allow only serialized access to a
+that a SQLite DBAPI driver would technically allow only serialized access to a
particular database file at all times. The pysqlite driver attempts to ameliorate this by
deferring the actual BEGIN statement until the first DML (INSERT, UPDATE, or
DELETE) is received within a transaction. While this breaks serializable isolation,
it at least delays the exclusive locking inherent in SQLite's design.
-SQLAlchemy's default mode of usage with the ORM is known
-as "autocommit=False", which means the moment the :class:`.Session` begins to be
+SQLAlchemy's default mode of usage with the ORM is known
+as "autocommit=False", which means the moment the :class:`.Session` begins to be
used, a transaction is begun. As the :class:`.Session` is used, the autoflush
-feature, also on by default, will flush out pending changes to the database
+feature, also on by default, will flush out pending changes to the database
before each query. The effect of this is that a :class:`.Session` used in its
default mode will often emit DML early on, long before the transaction is actually
-committed. This again will have the effect of serializing access to the SQLite
+committed. This again will have the effect of serializing access to the SQLite
database. If highly concurrent reads are desired against the SQLite database,
it is advised that the autoflush feature be disabled, and potentially even
that autocommit be re-enabled, which has the effect of each SQL statement and
flush committing changes immediately.
-For more information on SQLite's lack of concurrency by design, please
+For more information on SQLite's lack of concurrency by design, please
see `Situations Where Another RDBMS May Work Better - High Concurrency <http://www.sqlite.org/whentouse.html>`_
near the bottom of the page.
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
"""Represent a Python datetime object in SQLite using a string.
-
+
The default string storage format is::
-
+
"%(year)04d-%(month)02d-%(day)02d %(hour)02d:%(min)02d:%(second)02d.%(microsecond)06d"
-
+
e.g.::
-
+
2011-03-15 12:05:57.10558
-
- The storage format can be customized to some degree using the
+
+ The storage format can be customized to some degree using the
``storage_format`` and ``regexp`` parameters, such as::
-
+
import re
from sqlalchemy.dialects.sqlite import DATETIME
-
+
dt = DATETIME(
storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d",
regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)")
)
-
- :param storage_format: format string which will be applied to the
+
+ :param storage_format: format string which will be applied to the
dict with keys year, month, day, hour, minute, second, and microsecond.
-
- :param regexp: regular expression which will be applied to
+
+ :param regexp: regular expression which will be applied to
incoming result rows. If the regexp contains named groups, the
resulting match dict is applied to the Python datetime() constructor
as keyword arguments. Otherwise, if positional groups are used, the
"""Represent a Python date object in SQLite using a string.
The default string storage format is::
-
+
"%(year)04d-%(month)02d-%(day)02d"
-
+
e.g.::
-
+
2011-03-15
-
- The storage format can be customized to some degree using the
+
+ The storage format can be customized to some degree using the
``storage_format`` and ``regexp`` parameters, such as::
-
+
import re
from sqlalchemy.dialects.sqlite import DATE
storage_format="%(month)02d/%(day)02d/%(year)04d",
regexp=re.compile("(?P<month>\d+)/(?P<day>\d+)/(?P<year>\d+)")
)
-
- :param storage_format: format string which will be applied to the
+
+ :param storage_format: format string which will be applied to the
dict with keys year, month, and day.
-
- :param regexp: regular expression which will be applied to
+
+ :param regexp: regular expression which will be applied to
incoming result rows. If the regexp contains named groups, the
resulting match dict is applied to the Python date() constructor
as keyword arguments. Otherwise, if positional groups are used, the
class TIME(_DateTimeMixin, sqltypes.Time):
"""Represent a Python time object in SQLite using a string.
-
+
The default string storage format is::
-
+
"%(hour)02d:%(minute)02d:%(second)02d.%(microsecond)06d"
-
+
e.g.::
-
+
12:05:57.10558
-
- The storage format can be customized to some degree using the
+
+ The storage format can be customized to some degree using the
``storage_format`` and ``regexp`` parameters, such as::
-
+
import re
from sqlalchemy.dialects.sqlite import TIME
storage_format="%(hour)02d-%(minute)02d-%(second)02d-%(microsecond)06d",
regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?")
)
-
- :param storage_format: format string which will be applied to the
+
+ :param storage_format: format string which will be applied to the
dict with keys hour, minute, second, and microsecond.
-
- :param regexp: regular expression which will be applied to
+
+ :param regexp: regular expression which will be applied to
incoming result rows. If the regexp contains named groups, the
resulting match dict is applied to the Python time() constructor
as keyword arguments. Otherwise, if positional groups are used, the
issubclass(c.type._type_affinity, sqltypes.Integer) and \
not c.foreign_keys:
return None
-
+
return super(SQLiteDDLCompiler, self).\
visit_primary_key_constraint(constraint)
def _translate_colname(self, colname):
# adjust for dotted column names. SQLite
- # in the case of UNION may store col names as
+ # in the case of UNION may store col names as
# "tablename.colname"
# in cursor.description
if not self._preserve_raw_colnames and "." in colname:
# this flag used by pysqlite dialect, and perhaps others in the
# future, to indicate the driver is handling date/timestamp
- # conversions (and perhaps datetime/time as well on some
+ # conversions (and perhaps datetime/time as well on some
# hypothetical driver ?)
self.native_datetime = native_datetime
except KeyError:
raise exc.ArgumentError(
"Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
+ "Valid isolation levels for %s are %s" %
(level, self.name, ", ".join(self._isolation_lookup))
- )
+ )
cursor = connection.cursor()
cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level)
cursor.close()
res = cursor.fetchone()
if res:
value = res[0]
- else:
+ else:
# http://www.sqlite.org/changes.html#version_3_3_3
- # "Optional READ UNCOMMITTED isolation (instead of the
- # default isolation level of SERIALIZABLE) and
- # table level locking when database connections
+ # "Optional READ UNCOMMITTED isolation (instead of the
+ # default isolation level of SERIALIZABLE) and
+ # table level locking when database connections
# share a common cache.""
# pre-SQLite 3.3.0 default to 0
value = 0
pragma = "PRAGMA "
qtable = quote(table_name)
c = _pragma_cursor(
- connection.execute("%stable_info(%s)" %
+ connection.execute("%stable_info(%s)" %
(pragma, qtable)))
found_table = False
columns = []
if row is None:
break
(name, type_, nullable, default, has_default, primary_key) = \
- (row[1], row[2].upper(), not row[3],
+ (row[1], row[2].upper(), not row[3],
row[4], row[4] is not None, row[5])
name = re.sub(r'^\"|\"$', '', name)
match = re.match(r'(\w+)(\(.*?\))?', type_)
def _pragma_cursor(cursor):
- """work around SQLite issue whereby cursor.description
+ """work around SQLite issue whereby cursor.description
is blank when PRAGMA returns no rows."""
if cursor.closed:
Driver
------
-When using Python 2.5 and above, the built in ``sqlite3`` driver is
+When using Python 2.5 and above, the built in ``sqlite3`` driver is
already installed and no additional installation is needed. Otherwise,
the ``pysqlite2`` driver needs to be present. This is the same driver as
``sqlite3``, just with a different name.
The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3``
is loaded. This allows an explicitly installed pysqlite driver to take
-precedence over the built in one. As with all dialects, a specific
-DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control
+precedence over the built in one. As with all dialects, a specific
+DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control
this explicitly::
from sqlite3 import dbapi2 as sqlite
Compatibility with sqlite3 "native" date and datetime types
-----------------------------------------------------------
-The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and
+The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and
sqlite3.PARSE_COLNAMES options, which have the effect of any column
or expression explicitly cast as "date" or "timestamp" will be converted
-to a Python date or datetime object. The date and datetime types provided
-with the pysqlite dialect are not currently compatible with these options,
-since they render the ISO date/datetime including microseconds, which
+to a Python date or datetime object. The date and datetime types provided
+with the pysqlite dialect are not currently compatible with these options,
+since they render the ISO date/datetime including microseconds, which
pysqlite's driver does not. Additionally, SQLAlchemy does not at
-this time automatically render the "cast" syntax required for the
+this time automatically render the "cast" syntax required for the
freestanding functions "current_timestamp" and "current_date" to return
-datetime/date types natively. Unfortunately, pysqlite
+datetime/date types natively. Unfortunately, pysqlite
does not provide the standard DBAPI types in ``cursor.description``,
-leaving SQLAlchemy with no way to detect these types on the fly
+leaving SQLAlchemy with no way to detect these types on the fly
without expensive per-row type checks.
Keeping in mind that pysqlite's parsing option is not recommended,
-nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
+nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
can be forced if one configures "native_datetime=True" on create_engine()::
- engine = create_engine('sqlite://',
+ engine = create_engine('sqlite://',
connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
native_datetime=True
)
Pysqlite's default behavior is to prohibit the usage of a single connection
in more than one thread. This is controlled by the ``check_same_thread``
Pysqlite flag. This default is intended to work with older versions
-of SQLite that did not support multithreaded operation under
+of SQLite that did not support multithreaded operation under
various circumstances. In particular, older SQLite versions
did not allow a ``:memory:`` database to be used in multiple threads
under any circumstances.
* When a ``:memory:`` SQLite database is specified, the dialect by default will use
:class:`.SingletonThreadPool`. This pool maintains a single connection per
thread, so that all access to the engine within the current thread use the
- same ``:memory:`` database - other threads would access a different
+ same ``:memory:`` database - other threads would access a different
``:memory:`` database.
-* When a file-based database is specified, the dialect will use :class:`.NullPool`
+* When a file-based database is specified, the dialect will use :class:`.NullPool`
as the source of connections. This pool closes and discards connections
which are returned to the pool immediately. SQLite file-based connections
have extremely low overhead, so pooling is not necessary. The scheme also
connect_args={'check_same_thread':False},
poolclass=StaticPool)
-Note that using a ``:memory:`` database in multiple threads requires a recent
+Note that using a ``:memory:`` database in multiple threads requires a recent
version of SQLite.
Using Temporary Tables with SQLite
The pysqlite driver only returns Python ``unicode`` objects in result sets, never
plain strings, and accommodates ``unicode`` objects within bound parameter
-values in all cases. Regardless of the SQLAlchemy string type in use,
-string-based result values will by Python ``unicode`` in Python 2.
+values in all cases. Regardless of the SQLAlchemy string type in use,
+string-based result values will by Python ``unicode`` in Python 2.
The :class:`.Unicode` type should still be used to indicate those columns that
require unicode, however, so that non-``unicode`` values passed inadvertently
will emit a warning. Pysqlite will emit an error if a non-``unicode`` string
state is not begun until the first DML statement, that is INSERT, UPDATE
or DELETE, is emitted. A SELECT statement will not cause transactional
state to begin. While this mode of usage is fine for typical situations
-and has the advantage that the SQLite database file is not prematurely
+and has the advantage that the SQLite database file is not prematurely
locked, it breaks serializable transaction isolation, which requires
that the database file be locked upon any SQL being emitted.
Unicode Support
---------------
-The pyodbc driver currently supports usage of these Sybase types with
+The pyodbc driver currently supports usage of these Sybase types with
Unicode or multibyte strings::
CHAR
class _SybNumeric_pyodbc(sqltypes.Numeric):
"""Turns Decimals with adjusted() < -6 into floats.
- It's not yet known how to get decimals with many
+ It's not yet known how to get decimals with many
significant digits or very large adjusted() into Sybase
via pyodbc.
def set_ddl_autocommit(self, dbapi_connection, value):
if value:
# call commit() on the Sybase connection directly,
- # to avoid any side effects of calling a Connection
+ # to avoid any side effects of calling a Connection
# transactional method inside of pre_exec()
dbapi_connection.commit()
def _get_server_version_info(self, connection):
vers = connection.scalar("select @@version_number")
- # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
+ # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
# (12, 5, 0, 0)
return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
def create_engine(*args, **kwargs):
"""Create a new :class:`.Engine` instance.
- The standard calling form is to send the URL as the
- first positional argument, usually a string
+ The standard calling form is to send the URL as the
+ first positional argument, usually a string
that indicates database dialect and connection arguments.
Additional keyword arguments may then follow it which
establish various options on the resulting :class:`.Engine`
The string form of the URL is
``dialect+driver://user:password@host/dbname[?key=value..]``, where
- ``dialect`` is a database name such as ``mysql``, ``oracle``,
- ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
- ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
+ ``dialect`` is a database name such as ``mysql``, ``oracle``,
+ ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
+ ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
- ``**kwargs`` takes a wide variety of options which are routed
- towards their appropriate components. Arguments may be
- specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
+ ``**kwargs`` takes a wide variety of options which are routed
+ towards their appropriate components. Arguments may be
+ specific to the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
:class:`.Pool`. Specific dialects also accept keyword arguments that
are unique to that dialect. Here, we describe the parameters
that are common to most :func:`.create_engine()` usage.
:ref:`engines_toplevel`
:ref:`connections_toplevel`
-
+
:param assert_unicode: Deprecated. This flag
sets an engine-wide default value for
- the ``assert_unicode`` flag on the
- :class:`.String` type - see that
+ the ``assert_unicode`` flag on the
+ :class:`.String` type - see that
type for further details.
:param case_sensitive=True: if False, result column names
:param convert_unicode=False: if set to True, sets
the default behavior of ``convert_unicode`` on the
:class:`.String` type to ``True``, regardless
- of a setting of ``False`` on an individual
+ of a setting of ``False`` on an individual
:class:`.String` type, thus causing all :class:`.String`
-based columns
to accommodate Python ``unicode`` objects. This flag
- is useful as an engine-wide setting when using a
+ is useful as an engine-wide setting when using a
DBAPI that does not natively support Python
``unicode`` objects and raises an error when
one is received (such as pyodbc with FreeTDS).
-
- See :class:`.String` for further details on
+
+ See :class:`.String` for further details on
what this flag indicates.
:param creator: a callable which returns a DBAPI connection.
:ref:`dbengine_logging` for information on how to configure logging
directly.
- :param encoding: Defaults to ``utf-8``. This is the string
- encoding used by SQLAlchemy for string encode/decode
- operations which occur within SQLAlchemy, **outside of
- the DBAPI.** Most modern DBAPIs feature some degree of
+ :param encoding: Defaults to ``utf-8``. This is the string
+ encoding used by SQLAlchemy for string encode/decode
+ operations which occur within SQLAlchemy, **outside of
+ the DBAPI.** Most modern DBAPIs feature some degree of
direct support for Python ``unicode`` objects,
what you see in Python 2 as a string of the form
- ``u'some string'``. For those scenarios where the
+ ``u'some string'``. For those scenarios where the
DBAPI is detected as not supporting a Python ``unicode``
- object, this encoding is used to determine the
+ object, this encoding is used to determine the
source/destination encoding. It is **not used**
for those cases where the DBAPI handles unicode
directly.
-
+
To properly configure a system to accommodate Python
- ``unicode`` objects, the DBAPI should be
+ ``unicode`` objects, the DBAPI should be
configured to handle unicode to the greatest
degree as is appropriate - see
the notes on unicode pertaining to the specific
- target database in use at :ref:`dialect_toplevel`.
-
- Areas where string encoding may need to be accommodated
- outside of the DBAPI include zero or more of:
-
- * the values passed to bound parameters, corresponding to
+ target database in use at :ref:`dialect_toplevel`.
+
+ Areas where string encoding may need to be accommodated
+ outside of the DBAPI include zero or more of:
+
+ * the values passed to bound parameters, corresponding to
the :class:`.Unicode` type or the :class:`.String` type
when ``convert_unicode`` is ``True``;
- * the values returned in result set columns corresponding
- to the :class:`.Unicode` type or the :class:`.String`
+ * the values returned in result set columns corresponding
+ to the :class:`.Unicode` type or the :class:`.String`
type when ``convert_unicode`` is ``True``;
- * the string SQL statement passed to the DBAPI's
- ``cursor.execute()`` method;
- * the string names of the keys in the bound parameter
- dictionary passed to the DBAPI's ``cursor.execute()``
+ * the string SQL statement passed to the DBAPI's
+ ``cursor.execute()`` method;
+ * the string names of the keys in the bound parameter
+ dictionary passed to the DBAPI's ``cursor.execute()``
as well as ``cursor.setinputsizes()`` methods;
- * the string column names retrieved from the DBAPI's
+ * the string column names retrieved from the DBAPI's
``cursor.description`` attribute.
-
+
When using Python 3, the DBAPI is required to support
*all* of the above values as Python ``unicode`` objects,
which in Python 3 are just known as ``str``. In Python 2,
:param implicit_returning=True: When ``True``, a RETURNING-
compatible construct, if available, will be used to
fetch newly generated primary key values when a single row
- INSERT statement is emitted with no existing returning()
- clause. This applies to those backends which support RETURNING
- or a compatible construct, including Postgresql, Firebird, Oracle,
+ INSERT statement is emitted with no existing returning()
+ clause. This applies to those backends which support RETURNING
+ or a compatible construct, including Postgresql, Firebird, Oracle,
Microsoft SQL Server. Set this to ``False`` to disable
the automatic usage of RETURNING.
"_(counter)". If ``None``, the value of
``dialect.max_identifier_length`` is used instead.
- :param listeners: A list of one or more
- :class:`~sqlalchemy.interfaces.PoolListener` objects which will
+ :param listeners: A list of one or more
+ :class:`~sqlalchemy.interfaces.PoolListener` objects which will
receive connection pool events.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
- "sqlalchemy.engine" logger. Defaults to a hexstring of the
+ "sqlalchemy.engine" logger. Defaults to a hexstring of the
object's id.
:param max_overflow=10: the number of connections to allow in
of pool to be used.
:param pool_logging_name: String identifier which will be used within
- the "name" field of logging records generated within the
- "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
+ the "name" field of logging records generated within the
+ "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param pool_size=5: the number of connections to keep open
server configuration as well).
:param pool_reset_on_return='rollback': set the "reset on return"
- behavior of the pool, which is whether ``rollback()``,
+ behavior of the pool, which is whether ``rollback()``,
``commit()``, or nothing is called upon connections
being returned to the pool. See the docstring for
``reset_on_return`` at :class:`.Pool`.
if table.schema:
self.dialect.validate_identifier(table.schema)
return not self.checkfirst or \
- not self.dialect.has_table(self.connection,
+ not self.dialect.has_table(self.connection,
table.name, schema=table.schema)
def _can_create_sequence(self, sequence):
(
not self.checkfirst or
not self.dialect.has_sequence(
- self.connection,
- sequence.name,
+ self.connection,
+ sequence.name,
schema=sequence.schema)
)
)
tables = self.tables
else:
tables = metadata.tables.values()
- collection = [t for t in sql_util.sort_tables(tables)
+ collection = [t for t in sql_util.sort_tables(tables)
if self._can_create_table(t)]
- seq_coll = [s for s in metadata._sequences.values()
+ seq_coll = [s for s in metadata._sequences.values()
if s.column is None and self._can_create_sequence(s)]
metadata.dispatch.before_create(metadata, self.connection,
def visit_sequence(self, sequence, create_ok=False):
if not create_ok and not self._can_create_sequence(sequence):
- return
+ return
self.connection.execute(schema.CreateSequence(sequence))
def visit_index(self, index):
tables = self.tables
else:
tables = metadata.tables.values()
- collection = [t for t in reversed(sql_util.sort_tables(tables))
+ collection = [t for t in reversed(sql_util.sort_tables(tables))
if self._can_drop_table(t)]
- seq_coll = [s for s in metadata._sequences.values()
+ seq_coll = [s for s in metadata._sequences.values()
if s.column is None and self._can_drop_sequence(s)]
metadata.dispatch.before_drop(metadata, self.connection,
self.dialect.validate_identifier(table.name)
if table.schema:
self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or self.dialect.has_table(self.connection,
+ return not self.checkfirst or self.dialect.has_table(self.connection,
table.name, schema=table.schema)
def _can_drop_sequence(self, sequence):
not sequence.optional) and
(not self.checkfirst or
self.dialect.has_sequence(
- self.connection,
- sequence.name,
+ self.connection,
+ sequence.name,
schema=sequence.schema))
)
return dialect.connect(*cargs, **cparams)
except Exception, e:
# Py3K
- #raise exc.DBAPIError.instance(None, None,
+ #raise exc.DBAPIError.instance(None, None,
# e, dialect.dbapi.Error,
# connection_invalidated=
# dialect.is_disconnect(e, None, None)
from sqlalchemy.engine import ddl
ddl.SchemaDropper(self.dialect, self, **kwargs).traverse_single(entity)
- def _run_visitor(self, visitorcallable, element,
- connection=None,
+ def _run_visitor(self, visitorcallable, element,
+ connection=None,
**kwargs):
kwargs['checkfirst'] = False
visitorcallable(self.dialect, self,
"""Provides a thread-local transactional wrapper around the root Engine class.
The ``threadlocal`` module is invoked when using the ``strategy="threadlocal"`` flag
-with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is
+with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is
invoked automatically when the threadlocal engine strategy is used.
"""
list(const.columns)[0].name
)
event.listen(
- UniqueConstraint,
- "after_parent_attach",
+ UniqueConstraint,
+ "after_parent_attach",
unique_constraint_name)
"""
raise AttributeError("No class with a 'dispatch' member present.")
class _Dispatch(object):
- """Mirror the event listening definitions of an Events class with
+ """Mirror the event listening definitions of an Events class with
listener collections.
- Classes which define a "dispatch" member will return a
- non-instantiated :class:`._Dispatch` subclass when the member
- is accessed at the class level. When the "dispatch" member is
+ Classes which define a "dispatch" member will return a
+ non-instantiated :class:`._Dispatch` subclass when the member
+ is accessed at the class level. When the "dispatch" member is
accessed at the instance level of its owner, an instance
of the :class:`._Dispatch` class is returned.
class defined, by the :func:`._create_dispatcher_class` function.
The original :class:`.Events` classes remain untouched.
This decouples the construction of :class:`.Events` subclasses from
- the implementation used by the event internals, and allows
+ the implementation used by the event internals, and allows
inspecting tools like Sphinx to work in an unsurprising
way against the public API.
return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
class _EventMeta(type):
- """Intercept new Event subclasses and create
+ """Intercept new Event subclasses and create
associated _Dispatch classes."""
def __init__(cls, classname, bases, dict_):
return type.__init__(cls, classname, bases, dict_)
def _create_dispatcher_class(cls, classname, bases, dict_):
- """Create a :class:`._Dispatch` class corresponding to an
+ """Create a :class:`._Dispatch` class corresponding to an
:class:`.Events` class."""
# there's all kinds of ways to do this,
# i.e. make a Dispatch class that shares the '_listen' method
# of the Event class, this is the straight monkeypatch.
dispatch_base = getattr(cls, 'dispatch', _Dispatch)
- cls.dispatch = dispatch_cls = type("%sDispatch" % classname,
+ cls.dispatch = dispatch_cls = type("%sDispatch" % classname,
(dispatch_base, ), {})
dispatch_cls._listen = cls._listen
dispatch_cls._clear = cls._clear
for cls in target.__mro__[1:]:
if cls in self._clslevel:
clslevel.extend([
- fn for fn
- in self._clslevel[cls]
+ fn for fn
+ in self._clslevel[cls]
if fn not in clslevel
])
class _EmptyListener(object):
"""Serves as a class-level interface to the events
- served by a _DispatchDescriptor, when there are no
+ served by a _DispatchDescriptor, when there are no
instance-level events present.
Is replaced by _ListenerCollection when instance-level
"""Return an event collection which can be modified.
For _EmptyListener at the instance level of
- a dispatcher, this generates a new
+ a dispatcher, this generates a new
_ListenerCollection, applies it to the instance,
and returns it.
# I'm not entirely thrilled about the overhead here,
# but this allows class-level listeners to be added
# at any point.
- #
+ #
# In the absense of instance-level listeners,
# we stay with the _EmptyListener object when called
# at the instance level.
existing_listeners = self.listeners
existing_listener_set = set(existing_listeners)
self.propagate.update(other.propagate)
- existing_listeners.extend([l for l
- in other.listeners
+ existing_listeners.extend([l for l
+ in other.listeners
if l not in existing_listener_set
and not only_propagate or l in self.propagate
])
self.propagate.clear()
class dispatcher(object):
- """Descriptor used by target classes to
+ """Descriptor used by target classes to
deliver the _Dispatch class at the class level
and produce new _Dispatch instances for target
instances.
'sqlalchemy'. For class-level logging, the class name is appended.
The "echo" keyword parameter, available on SQLA :class:`.Engine`
-and :class:`.Pool` objects, corresponds to a logger specific to that
+and :class:`.Pool` objects, corresponds to a logger specific to that
instance only.
"""
"""A logger adapter (wrapper) for :class:`.Identified` subclasses.
This allows multiple instances (e.g. Engine or Pool instances)
- to share a logger, but have its verbosity controlled on a
+ to share a logger, but have its verbosity controlled on a
per-instance basis.
The basic functionality is to return a logging level
logger = logging.getLogger(name)
else:
# if a specified echo flag, return an EchoLogger,
- # which checks the flag, overrides normal log
+ # which checks the flag, overrides normal log
# levels by calling logger._log()
logger = InstanceLogger(echoflag, name)
from .. import log, util
from ..sql import operators
from . import (
- attributes, object_session, util as orm_util, strategies,
+ attributes, object_session, util as orm_util, strategies,
object_mapper, exc as orm_exc, collections
)
from .query import Query
else:
return self.query_class(self, state)
- def get_collection(self, state, dict_, user_data=None,
+ def get_collection(self, state, dict_, user_data=None,
passive=attributes.PASSIVE_NO_INITIALIZE):
if not passive & attributes.SQL_OK:
return self._get_collection_history(state,
if self.key not in state.committed_state:
state.committed_state[self.key] = CollectionHistory(self, state)
- state._modified_event(dict_,
+ state._modified_event(dict_,
self,
attributes.NEVER_SET)
return state.committed_state[self.key]
def set(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF,
+ passive=attributes.PASSIVE_OFF,
check_old=None, pop=False):
if initiator and initiator.parent_token is self.parent_token:
return
def get_all_pending(self, state, dict_):
c = self._get_collection_history(state, attributes.PASSIVE_NO_INITIALIZE)
return [
- (attributes.instance_state(x), x)
- for x in
+ (attributes.instance_state(x), x)
+ for x in
c.added_items + c.unchanged_items + c.deleted_items
]
else:
return c
- def append(self, state, dict_, value, initiator,
+ def append(self, state, dict_, value, initiator,
passive=attributes.PASSIVE_OFF):
if initiator is not self:
self.fire_append_event(state, dict_, value, initiator)
- def remove(self, state, dict_, value, initiator,
+ def remove(self, state, dict_, value, initiator,
passive=attributes.PASSIVE_OFF):
if initiator is not self:
self.fire_remove_event(state, dict_, value, initiator)
mapper = object_mapper(instance)
prop = mapper._props[self.attr.key]
self._criterion = prop.compare(
- operators.eq,
- instance,
- value_is_parent=True,
+ operators.eq,
+ instance,
+ value_is_parent=True,
alias_secondary=False)
if self.attr.order_by:
def append(self, item):
self.attr.append(
- attributes.instance_state(self.instance),
+ attributes.instance_state(self.instance),
attributes.instance_dict(self.instance), item, None)
def remove(self, item):
self.attr.remove(
- attributes.instance_state(self.instance),
+ attributes.instance_state(self.instance),
attributes.instance_dict(self.instance), item, None)
pass
_straight_ops = set(getattr(operators, op)
- for op in ('add', 'mul', 'sub',
+ for op in ('add', 'mul', 'sub',
# Py2K
'div',
- # end Py2K
+ # end Py2K
'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
return True
else:
raise UnevaluatableError(
- "Cannot evaluate clauselist with operator %s" %
+ "Cannot evaluate clauselist with operator %s" %
clause.operator)
return evaluate
def visit_binary(self, clause):
- eval_left,eval_right = map(self.process,
+ eval_left,eval_right = map(self.process,
[clause.left, clause.right])
operator = clause.operator
if operator is operators.is_:
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
- "Cannot evaluate %s with operator %s" %
+ "Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
return evaluate
return not value
return evaluate
raise UnevaluatableError(
- "Cannot evaluate %s with operator %s" %
+ "Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
def visit_bindparam(self, clause):
Conditions which cause this to happen include:
* A flush may have attempted to update or delete rows
- and an unexpected number of rows were matched during
- the UPDATE or DELETE statement. Note that when
+ and an unexpected number of rows were matched during
+ the UPDATE or DELETE statement. Note that when
version_id_col is used, rows in UPDATE or DELETE statements
are also matched against the current known version
identifier.
- * A mapped object with version_id_col was refreshed,
+ * A mapped object with version_id_col was refreshed,
and the version number coming back from the database does
not match that of the object itself.
"""An operation cannot complete due to an object being garbage collected."""
class DetachedInstanceError(sa_exc.SQLAlchemyError):
- """An attempt to access unloaded attributes on a
+ """An attempt to access unloaded attributes on a
mapped instance that is detached."""
class UnmappedInstanceError(UnmappedError):
class ObjectDeletedError(sa_exc.InvalidRequestError):
"""A refresh operation failed to retrieve the database
row corresponding to an object's known primary key identity.
-
- A refresh operation proceeds when an expired attribute is
+
+ A refresh operation proceeds when an expired attribute is
accessed on an object, or when :meth:`.Query.get` is
used to retrieve an object which is, upon retrieval, detected
as expired. A SELECT is emitted for the target row
based on primary key; if no row is returned, this
exception is raised.
-
- The true meaning of this exception is simply that
+
+ The true meaning of this exception is simply that
no row exists for the primary key identifier associated
- with a persistent object. The row may have been
+ with a persistent object. The row may have been
deleted, or in some cases the primary key updated
to a new value, outside of the ORM's management of the target
- object.
-
+ object.
+
"""
def __init__(self, state, msg=None):
if not msg:
o = existing_state._is_really_none()
if o is not None:
raise AssertionError("A conflicting state is already "
- "present in the identity map for key %r"
+ "present in the identity map for key %r"
% (key, ))
else:
return
class ScopedSession(object):
"""Provides thread-local management of Sessions.
-
+
Typical invocation is via the :func:`.scoped_session`
function::
-
+
Session = scoped_session(sessionmaker())
The internal registry is accessible,
self.session_factory.configure(**kwargs)
def query_property(self, query_cls=None):
- """return a class property which produces a `Query` object
+ """return a class property which produces a `Query` object
against the class when called.
e.g.::
def get(self):
return getattr(self.registry(), name)
return property(get, set)
-for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
+for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map',
'is_active', 'autoflush', 'no_autoflush'):
setattr(ScopedSession, prop, makeprop(prop))
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""private module containing functions used for copying data
+"""private module containing functions used for copying data
between instances based on join conditions.
"""
from . import exc, util as orm_util, attributes
-def populate(source, source_mapper, dest, dest_mapper,
+def populate(source, source_mapper, dest, dest_mapper,
synchronize_pairs, uowcommit, flag_cascaded_pks):
source_dict = source.dict
dest_dict = dest.dict
try:
# inline of source_mapper._get_state_attr_by_column
prop = source_mapper._columntoproperty[l]
- value = source.manager[prop.key].impl.get(source, source_dict,
+ value = source.manager[prop.key].impl.get(source, source_dict,
attributes.PASSIVE_OFF)
except exc.UnmappedColumnError:
_raise_col_to_prop(False, source_mapper, l, dest_mapper, r)
if r.primary_key:
raise AssertionError(
"Dependency rule tried to blank-out primary key "
- "column '%s' on instance '%s'" %
+ "column '%s' on instance '%s'" %
(r, orm_util.state_str(dest))
)
try:
dict_[r.key] = value
def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
- """return true if the source object has changes from an old to a
+ """return true if the source object has changes from an old to a
new value on the given synchronize pairs
"""
prop = source_mapper._columntoproperty[l]
except exc.UnmappedColumnError:
_raise_col_to_prop(False, source_mapper, l, None, r)
- history = uowcommit.get_attribute_history(source, prop.key,
+ history = uowcommit.get_attribute_history(source, prop.key,
attributes.PASSIVE_NO_INITIALIZE)
return bool(history.deleted)
else:
proxies = {}
def manage(module, **params):
- """Return a proxy for a DB-API module that automatically
+ """Return a proxy for a DB-API module that automatically
pools connections.
Given a DB-API 2.0 module and pool management parameters, returns
class Pool(log.Identified):
"""Abstract base class for connection pools."""
- def __init__(self,
- creator, recycle=-1, echo=None,
+ def __init__(self,
+ creator, recycle=-1, echo=None,
use_threadlocal=False,
logging_name=None,
- reset_on_return=True,
+ reset_on_return=True,
listeners=None,
events=None,
_dispatch=None):
replaced with a newly opened connection. Defaults to -1.
:param logging_name: String identifier which will be used within
- the "name" field of logging records generated within the
- "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
+ the "name" field of logging records generated within the
+ "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param echo: If True, connections being pulled and retrieved
:class:`~sqlalchemy.interfaces.PoolListener`-like objects or
dictionaries of callables that receive events when DB-API
connections are created, checked out and checked in to the
- pool. This has been superseded by
+ pool. This has been superseded by
:func:`~sqlalchemy.event.listen`.
"""
self._reset_on_return = reset_commit
else:
raise exc.ArgumentError(
- "Invalid value for 'reset_on_return': %r"
+ "Invalid value for 'reset_on_return': %r"
% reset_on_return)
self.echo = echo
"""Return a new :class:`.Pool`, of the same class as this one
and configured with identical creation arguments.
- This method is used in conjunection with :meth:`dispose`
- to close out an entire :class:`.Pool` and create a new one in
+ This method is used in conjunection with :meth:`dispose`
+ to close out an entire :class:`.Pool` and create a new one in
its place.
"""
This method leaves the possibility of checked-out connections
remaining open, as it only affects connections that are
idle in the pool.
-
+
See also the :meth:`Pool.recreate` method.
"""
def _replace(self):
"""Dispose + recreate this pool.
-
- Subclasses may employ special logic to
+
+ Subclasses may employ special logic to
move threads waiting on this pool to the
new one.
-
+
"""
self.dispose()
return self.recreate()
def connect(self):
"""Return a DBAPI connection from the pool.
- The connection is instrumented such that when its
- ``close()`` method is called, the connection will be returned to
+ The connection is instrumented such that when its
+ ``close()`` method is called, the connection will be returned to
the pool.
"""
if connection_record is not None:
connection_record.fairy = None
if echo:
- pool.logger.debug("Connection %r being returned to pool",
+ pool.logger.debug("Connection %r being returned to pool",
connection)
if connection_record.finalize_callback:
connection_record.finalize_callback(connection)
- del connection_record.finalize_callback
+ del connection_record.finalize_callback
if pool.dispatch.checkin:
pool.dispatch.checkin(connection, connection_record)
pool._return_conn(connection_record)
rec = self._connection_record = pool._do_get()
conn = self.connection = self._connection_record.get_connection()
rec.fairy = weakref.ref(
- self,
+ self,
lambda ref:_finalize_fairy and _finalize_fairy(conn, rec, pool, ref, _echo)
)
_refs.add(rec)
except:
# helps with endless __getattr__ loops later on
- self.connection = None
+ self.connection = None
self._connection_record = None
raise
if self._echo:
attempts = 2
while attempts > 0:
try:
- self._pool.dispatch.checkout(self.connection,
+ self._pool.dispatch.checkout(self.connection,
self._connection_record,
self)
return self
self._close()
def _close(self):
- _finalize_fairy(self.connection, self._connection_record,
+ _finalize_fairy(self.connection, self._connection_record,
self._pool, None, self._echo)
self.connection = None
self._connection_record = None
Options are the same as those of :class:`.Pool`, as well as:
- :param pool_size: The number of threads in which to maintain connections
+ :param pool_size: The number of threads in which to maintain connections
at once. Defaults to five.
:class:`.SingletonThreadPool` is used by the SQLite dialect
def recreate(self):
self.logger.info("Pool recreating")
- return self.__class__(self._creator,
- pool_size=self.size,
- recycle=self._recycle,
- echo=self.echo,
+ return self.__class__(self._creator,
+ pool_size=self.size,
+ recycle=self._recycle,
+ echo=self.echo,
logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
+ use_threadlocal=self._use_threadlocal,
_dispatch=self.dispatch)
def dispose(self):
class QueuePool(Pool):
"""A :class:`.Pool` that imposes a limit on the number of open connections.
- :class:`.QueuePool` is the default pooling implementation used for
+ :class:`.QueuePool` is the default pooling implementation used for
all :class:`.Engine` objects, unless the SQLite dialect is in use.
"""
:meth:`unique_connection` method is provided to bypass the
threadlocal behavior installed into :meth:`connect`.
- :param reset_on_return: Determine steps to take on
- connections as they are returned to the pool.
+ :param reset_on_return: Determine steps to take on
+ connections as they are returned to the pool.
reset_on_return can have any of these values:
* 'rollback' - call rollback() on the connection,
to release locks and transaction resources.
This is the default value. The vast majority
of use cases should leave this value set.
- * True - same as 'rollback', this is here for
+ * True - same as 'rollback', this is here for
backwards compatibility.
* 'commit' - call commit() on the connection,
- to release locks and transaction resources.
+ to release locks and transaction resources.
A commit here may be desirable for databases that
cache query plans if a commit is emitted,
such as Microsoft SQL Server. However, this
that has no transaction support at all,
namely MySQL MyISAM. By not doing anything,
performance can be improved. This
- setting should **never be selected** for a
+ setting should **never be selected** for a
database that supports transactions,
as it will lead to deadlocks and stale
state.
else:
raise exc.TimeoutError(
"QueuePool limit of size %d overflow %d reached, "
- "connection timed out, timeout %d" %
+ "connection timed out, timeout %d" %
(self.size(), self.overflow(), self._timeout))
self._overflow_lock.acquire()
def recreate(self):
self.logger.info("Pool recreating")
- return self.__class__(self._creator, pool_size=self._pool.maxsize,
+ return self.__class__(self._creator, pool_size=self._pool.maxsize,
max_overflow=self._max_overflow,
- timeout=self._timeout,
- recycle=self._recycle, echo=self.echo,
+ timeout=self._timeout,
+ recycle=self._recycle, echo=self.echo,
logging_name=self._orig_logging_name,
use_threadlocal=self._use_threadlocal,
_dispatch=self.dispatch)
def status(self):
return "Pool size: %d Connections in pool: %d "\
"Current Overflow: %d Current Checked out "\
- "connections: %d" % (self.size(),
- self.checkedin(),
- self.overflow(),
+ "connections: %d" % (self.size(),
+ self.checkedin(),
+ self.overflow(),
self.checkedout())
def size(self):
def recreate(self):
self.logger.info("Pool recreating")
- return self.__class__(self._creator,
- recycle=self._recycle,
- echo=self.echo,
+ return self.__class__(self._creator,
+ recycle=self._recycle,
+ echo=self.echo,
logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
+ use_threadlocal=self._use_threadlocal,
_dispatch=self.dispatch)
def dispose(self):
def recreate(self):
self.logger.info("Pool recreating")
- return self.__class__(self._creator, echo=self.echo,
+ return self.__class__(self._creator, echo=self.echo,
logging_name=self._orig_logging_name,
_dispatch=self.dispatch)
try:
if key not in self.pools:
kw.pop('sa_pool_key', None)
- pool = self.poolclass(lambda:
+ pool = self.poolclass(lambda:
self.module.connect(*args, **kw), **self.kw)
self.pools[key] = pool
return pool
return kw['sa_pool_key']
return tuple(
- list(args) +
+ list(args) +
[(k, kw[k]) for k in sorted(kw)]
)
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-"""defines generic type conversion functions, as used in bind and result
+"""defines generic type conversion functions, as used in bind and result
processors.
They all share one common characteristic: None is passed through unchanged.
def to_decimal_processor_factory(target_class, scale=10):
# Note that the scale argument is not taken into account for integer
- # values in the C implementation while it is in the Python one.
- # For example, the Python implementation might return
- # Decimal('5.00000') whereas the C implementation will
+ # values in the C implementation while it is in the Python one.
+ # For example, the Python implementation might return
+ # Decimal('5.00000') whereas the C implementation will
# return Decimal('5'). These are equivalent of course.
return DecimalResultProcessor(target_class, "%%.%df" % scale).process
class next_value(Function):
"""Represent the 'next value', given a :class:`.Sequence`
as it's single argument.
-
+
Compiles into the appropriate function on each backend,
or will raise NotImplementedError if used on a backend
that does not provide support for sequences.
-
+
"""
type = sqltypes.Integer()
name = "next_value"
SQLAlchemy schema and expression constructs rely on a Python-centric
version of the classic "visitor" pattern as the primary way in which
-they apply functionality. The most common use of this pattern
-is statement compilation, where individual expression classes match
-up to rendering methods that produce a string result. Beyond this,
-the visitor system is also used to inspect expressions for various
-information and patterns, as well as for usage in
+they apply functionality. The most common use of this pattern
+is statement compilation, where individual expression classes match
+up to rendering methods that produce a string result. Beyond this,
+the visitor system is also used to inspect expressions for various
+information and patterns, as well as for usage in
some kinds of expression transformation. Other kinds of transformation
use a non-visitor traversal system.
-For many examples of how the visit system is used, see the
+For many examples of how the visit system is used, see the
sqlalchemy.sql.util and the sqlalchemy.sql.compiler modules.
For an introduction to clause adaption, see
http://techspot.zzzeek.org/2008/01/23/expression-transformations/
from .. import util
import operator
-__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor',
- 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate',
+__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor',
+ 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate',
'iterate_depthfirst', 'traverse_using', 'traverse',
'cloned_traverse', 'replacement_traverse']
class VisitableType(type):
"""Metaclass which assigns a `_compiler_dispatch` method to classes
having a `__visit_name__` attribute.
-
+
The _compiler_dispatch attribute becomes an instance method which
looks approximately like the following::
-
+
def _compiler_dispatch (self, visitor, **kw):
'''Look for an attribute named "visit_" + self.__visit_name__
on the visitor, and call it with the same kw params.'''
__metaclass__ = VisitableType
class ClauseVisitor(object):
- """Base class for visitor objects which can traverse using
+ """Base class for visitor objects which can traverse using
the traverse() function.
"""
return self
class CloningVisitor(ClauseVisitor):
- """Base class for visitor objects which can traverse using
+ """Base class for visitor objects which can traverse using
the cloned_traverse() function.
"""
return cloned_traverse(obj, self.__traverse_options__, self._visitor_dict)
class ReplacingCloningVisitor(CloningVisitor):
- """Base class for visitor objects which can traverse using
+ """Base class for visitor objects which can traverse using
the replacement_traverse() function.
"""
def replace(self, elem):
"""receive pre-copied elements during a cloning traversal.
- If the method returns a new element, the element is used
- instead of creating a simple copy of the element. Traversal
+ If the method returns a new element, the element is used
+ instead of creating a simple copy of the element. Traversal
will halt on the newly returned element if it is re-encountered.
"""
return None
return traverse_using(iterate_depthfirst(obj, opts), obj, visitors)
def cloned_traverse(obj, opts, visitors):
- """clone the given expression structure, allowing
+ """clone the given expression structure, allowing
modifications by visitors."""
cloned = util.column_dict()
def replacement_traverse(obj, opts, replace):
- """clone the given expression structure, allowing element
+ """clone the given expression structure, allowing element
replacement by a given replacement function."""
cloned = util.column_dict()
def abort(self, context):
"""Issue an 'abort', will force any thread waiting on get()
to stop waiting and raise SAAbort.
-
+
"""
self._sqla_abort_context = context
if not self.not_full.acquire(False):
if not output:
raise CircularDependencyError(
"Circular dependency detected.",
- find_cycles(tuples, allitems),
+ find_cycles(tuples, allitems),
_gen_edges(edges)
)
output = set()
- # we'd like to find all nodes that are
+ # we'd like to find all nodes that are
# involved in cycles, so we do the full
# pass through the whole thing for each
# node in the original list.
def _gen_edges(edges):
return set([
- (right, left)
- for left in edges
- for right in edges[left]
+ (right, left)
+ for left in edges
+ for right in edges[left]
])
x = counter[0]
dec = 10
while dec > 0:
- # trying to count in binary here,
+ # trying to count in binary here,
# works enough to trip the test case
if pow(2, dec) < x:
setattr(w1, 'col%d' % dec, counter[0])
pass
mapper(A, table1, properties={
- 'bs':relationship(B, secondary=table3,
+ 'bs':relationship(B, secondary=table3,
backref='as', order_by=table3.c.t1)
})
mapper(B, table2)
def test_profile_2_insert(self):
self.test_baseline_2_insert()
- @profiling.function_call_count(3118, {'2.7':3333,
+ @profiling.function_call_count(3118, {'2.7':3333,
'2.7+cextension':3109, '2.6':3109})
def test_profile_3_properties(self):
self.test_baseline_3_properties()
def test_profile_5_aggregates(self):
self.test_baseline_5_aggregates()
- @profiling.function_call_count(1788, {'2.4': 1118, '3.2':1647,
+ @profiling.function_call_count(1788, {'2.4': 1118, '3.2':1647,
'2.7+cextension':1698})
def test_profile_6_editing(self):
self.test_baseline_6_editing()
- @profiling.function_call_count(2252, {'2.4': 1673,
+ @profiling.function_call_count(2252, {'2.4': 1673,
'2.6':2412,
'2.7':2412,
'3.2':2396,
- '2.7+cextension':2110,
+ '2.7+cextension':2110,
'2.6+cextension': 2252})
def test_profile_7_multiview(self):
self.test_baseline_7_multiview()
eq_(err.cycles, set(['node1', 'node3', 'node2', 'node5',
'node4']))
eq_(err.edges, set([('node3', 'node1'), ('node4', 'node1'),
- ('node2', 'node3'), ('node1', 'node2'),
+ ('node2', 'node3'), ('node1', 'node2'),
('node4','node5'), ('node5', 'node4')]))
def test_raise_on_cycle_two(self):
except exc.CircularDependencyError, err:
eq_(err.cycles, set(['node1', 'node3', 'node2']))
eq_(err.edges, set([('node3', 'node1'), ('node2', 'node3'),
- ('node3', 'node2'), ('node1', 'node2'),
+ ('node3', 'node2'), ('node1', 'node2'),
('node2','node4')]))
def test_raise_on_cycle_three(self):
])
# node6 only became present here once [ticket:2282] was addressed.
eq_(
- topological.find_cycles(tuples, allnodes),
+ topological.find_cycles(tuples, allnodes),
set(['node1','node2', 'node4', 'node6'])
)
def test_find_multiple_cycles_four(self):
tuples = [
- ('node6', 'node2'),
- ('node15', 'node19'),
+ ('node6', 'node2'),
+ ('node15', 'node19'),
('node19', 'node2'), ('node4', 'node10'),
('node15', 'node13'),
- ('node17', 'node11'), ('node1', 'node19'), ('node15', 'node8'),
- ('node6', 'node20'), ('node14', 'node11'), ('node6', 'node14'),
+ ('node17', 'node11'), ('node1', 'node19'), ('node15', 'node8'),
+ ('node6', 'node20'), ('node14', 'node11'), ('node6', 'node14'),
('node11', 'node2'), ('node10', 'node20'), ('node1', 'node11'),
('node20', 'node19'), ('node4', 'node20'), ('node15', 'node20'),
('node9', 'node19'), ('node11', 'node10'), ('node11', 'node19'),
('node13', 'node6'), ('node3', 'node15'), ('node9', 'node11'),
- ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'),
+ ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'),
('node8', 'node4'), ('node11', 'node3'), ('node6', 'node1')
]
allnodes = ['node%d' % i for i in xrange(1, 21)]
eq_(
- topological.find_cycles(tuples, allnodes),
- set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17',
- 'node19', 'node20', 'node8', 'node1', 'node3',
+ topological.find_cycles(tuples, allnodes),
+ set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17',
+ 'node19', 'node20', 'node8', 'node1', 'node3',
'node2', 'node4', 'node6'])
)
)
assert_raises(
- exc.InvalidRequestError,
+ exc.InvalidRequestError,
event.listen,
listen, "event_one", self.Target
)
from test.lib import fixtures
from test.lib.testing import eq_
-# Py3K
-#StandardError = BaseException
+# Py3K
+#StandardError = BaseException
# Py2K
from exceptions import StandardError, KeyboardInterrupt, SystemExit
# end Py2K
def test_tostring_large_dict(self):
try:
raise sa_exceptions.DBAPIError.instance('this is a message'
- ,
+ ,
{'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h':
8, 'i': 9, 'j': 10, 'k': 11,
}, OperationalError(), DatabaseError)
def test_tostring_large_list(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
- [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
+ raise sa_exceptions.DBAPIError.instance('this is a message',
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError, exc:
assert str(exc).startswith("(OperationalError) 'this is a "
def test_tostring_large_executemany(self):
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
- [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1: 1}, {1:1}, {1: 1}, {1: 1},],
+ raise sa_exceptions.DBAPIError.instance('this is a message',
+ [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
+ {1: 1}, {1:1}, {1: 1}, {1: 1},],
OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError, exc:
eq_(str(exc) ,
"1}, {1: 1}, {1: 1}]")
try:
raise sa_exceptions.DBAPIError.instance('this is a message', [
- {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
+ {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
{1:1}, {1: 1}, {1: 1}, {1: 1},
], OperationalError(), DatabaseError)
except sa_exceptions.DBAPIError, exc:
"bound parameter sets ... {1: 1}, {1: 1}]"
)
try:
- raise sa_exceptions.DBAPIError.instance('this is a message',
+ raise sa_exceptions.DBAPIError.instance('this is a message',
[
(1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
(1, ),
def test_db_error_noncompliant_dbapi(self):
try:
- raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(),
+ raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(),
DatabaseError)
except sa_exceptions.DBAPIError, e:
self.assert_(e.__class__ is sa_exceptions.DBAPIError)
ids2 - ids1,
IdentitySet([o2, o3])
)
-
+
ids2 -= ids1
eq_(ids2, IdentitySet([o2, o3]))
_create_testing_engine, _engine_pool, _engine_strategy, _engine_uri, _list_dbs, _log,
_prep_testing_database, _require, _reverse_topological, _server_side_cursors,
_monkeypatch_cdecimal, _zero_timeout,
- _set_table_options, base_config, db, db_label, db_url, file_config, post_configure,
+ _set_table_options, base_config, db, db_label, db_url, file_config, post_configure,
pre_configure)
log = logging.getLogger('nose.plugins.sqlalchemy')
class BuggyDomainReflectionTest(fixtures.TestBase, AssertsExecutionResults):
- """Test Firebird domains (and some other reflection bumps),
+ """Test Firebird domains (and some other reflection bumps),
see [ticket:1663] and http://tracker.firebirdsql.org/browse/CORE-356"""
__only_on__ = 'firebird'
@testing.provide_metadata
def test_infinite_float(self):
metadata = self.metadata
- t = Table('t', metadata,
+ t = Table('t', metadata,
Column('data', Float)
)
metadata.create_all()
# TODO
-# - add "Database" test, a quick check for join behavior on different
+# - add "Database" test, a quick check for join behavior on different
# max versions
# - full max-specific reflection suite
# - datetime tests
Column("master_ssl_verify_server_cert", Integer))
x = select([table.c.col1, table.c.master_ssl_verify_server_cert])
- self.assert_compile(x,
+ self.assert_compile(x,
'''SELECT mysql_table.col1, mysql_table.`master_ssl_verify_server_cert` FROM mysql_table''')
def test_create_index_simple(self):
def test_create_pk_plain(self):
m = MetaData()
- tbl = Table('testtbl', m, Column('data', String(255)),
+ tbl = Table('testtbl', m, Column('data', String(255)),
PrimaryKeyConstraint('data'))
self.assert_compile(schema.CreateTable(tbl),
def test_create_pk_with_using(self):
m = MetaData()
- tbl = Table('testtbl', m, Column('data', String(255)),
+ tbl = Table('testtbl', m, Column('data', String(255)),
PrimaryKeyConstraint('data', mysql_using='btree'))
self.assert_compile(schema.CreateTable(tbl),
class DialectTest(fixtures.TestBase):
__only_on__ = 'mysql'
- @testing.only_on(['mysql+mysqldb', 'mysql+oursql'],
+ @testing.only_on(['mysql+mysqldb', 'mysql+oursql'],
'requires particular SSL arguments')
def test_ssl_arguments(self):
dialect = testing.db.dialect
for k in ('use_unicode', 'found_rows', 'client_flag'):
kwarg.pop(k, None)
eq_(
- kwarg,
+ kwarg,
{
- 'passwd': 'tiger', 'db': 'test',
- 'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem',
- 'key': '/key.pem'},
- 'host': 'localhost', 'user': 'scott',
+ 'passwd': 'tiger', 'db': 'test',
+ 'ssl': {'ca': '/ca.pem', 'cert': '/cert.pem',
+ 'key': '/key.pem'},
+ 'host': 'localhost', 'user': 'scott',
'port': 3306
}
)
columns = [
# column type, args, kwargs, expected ddl
- # e.g. Column(Integer(10, unsigned=True)) ==
+ # e.g. Column(Integer(10, unsigned=True)) ==
# 'INTEGER(10) UNSIGNED'
(mysql.MSNumeric, [], {},
'NUMERIC'),
# there's a slight assumption here that this test can
# complete within the scope of a single second.
# if needed, can break out the eq_() just to check for
- # timestamps that are within a few seconds of "now"
+ # timestamps that are within a few seconds of "now"
# using timedelta.
now = testing.db.execute("select now()").scalar()
enum_table.drop(checkfirst=True)
enum_table.create()
- assert_raises(exc.DBAPIError, enum_table.insert().execute,
+ assert_raises(exc.DBAPIError, enum_table.insert().execute,
e1=None, e2=None, e3=None, e4=None)
assert_raises(exc.StatementError, enum_table.insert().execute,
res = enum_table.select().execute().fetchall()
- expected = [(None, 'a', 'a', None, 'a', None, None, None),
- ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"),
+ expected = [(None, 'a', 'a', None, 'a', None, None, None),
+ ('a', 'a', 'a', 'a', 'a', 'a', 'a', "'a'"),
('b', 'b', 'b', 'b', 'b', 'b', 'b', 'b')]
# This is known to fail with MySQLDB 1.2.2 beta versions
t1.insert().execute(value=u'drôle', value2=u'drôle')
t1.insert().execute(value=u'réveillé', value2=u'réveillé')
t1.insert().execute(value=u'S’il', value2=u'S’il')
- eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
- [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'),
+ eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+ [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'),
(3, u'S’il', u'S’il')]
)
assert t2.c.value.type.enums[0:2] == \
(u'réveillé', u'drôle') #, u'S’il') # eh ?
assert t2.c.value2.type.enums[0:2] == \
- (u'réveillé', u'drôle') #, u'S’il') # eh ?
+ (u'réveillé', u'drôle') #, u'S’il') # eh ?
finally:
metadata.drop_all()
)
eq_(gen(prefixes=['ALL']), 'SELECT ALL q')
- eq_(gen(prefixes=['DISTINCTROW']),
+ eq_(gen(prefixes=['DISTINCTROW']),
'SELECT DISTINCTROW q')
# Interaction with MySQL prefix extensions
)
self.assert_compile(
- select(['q'], distinct='ALL',
+ select(['q'], distinct='ALL',
prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']),
'SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL q'
)
)
self.assert_compile(
select([t]).limit(10),
- "SELECT t.col1, t.col2 FROM t LIMIT %s",
+ "SELECT t.col1, t.col2 FROM t LIMIT %s",
{'param_1':10})
self.assert_compile(
):
type_ = sqltypes.to_instance(type_)
assert_raises_message(
- exc.CompileError,
+ exc.CompileError,
"VARCHAR requires a length on dialect mysql",
- type_.compile,
+ type_.compile,
dialect=mysql.dialect())
t1 = Table('sometable', MetaData(),
# 'SIGNED INTEGER' is a bigint, so this is ok.
(m.MSBigInteger, "CAST(t.col AS SIGNED INTEGER)"),
(m.MSBigInteger(unsigned=False), "CAST(t.col AS SIGNED INTEGER)"),
- (m.MSBigInteger(unsigned=True),
+ (m.MSBigInteger(unsigned=True),
"CAST(t.col AS UNSIGNED INTEGER)"),
(m.MSBit, "t.col"),
tname = 'zyrenian_zyme_zyzzogeton_zyzzogeton'
cname = 'zyrenian_zyme_zyzzogeton_zo'
- t1 = Table(tname, MetaData(),
+ t1 = Table(tname, MetaData(),
Column(cname, Integer, index=True),
)
ix1 = list(t1.indexes)[0]
def test_create_index_with_ops(self):
m = MetaData()
tbl = Table('testtbl', m,
- Column('data', String),
+ Column('data', String),
Column('data2', Integer, key='d2'))
idx = Index('test_idx1', tbl.c.data,
Column("variadic", Integer))
x = select([table.c.col1, table.c.variadic])
- self.assert_compile(x,
+ self.assert_compile(x,
'''SELECT pg_table.col1, pg_table."variadic" FROM pg_table''')
def test_from_only(self):
@testing.provide_metadata
def test_arrays(self):
metadata = self.metadata
- t1 = Table('t', metadata,
+ t1 = Table('t', metadata,
Column('x', postgresql.ARRAY(Float)),
Column('y', postgresql.ARRAY(REAL)),
Column('z', postgresql.ARRAY(postgresql.DOUBLE_PRECISION)),
t1.insert().execute(x=[5], y=[5], z=[6], q=[decimal.Decimal("6.4")])
row = t1.select().execute().first()
eq_(
- row,
+ row,
([5], [5], [6], [decimal.Decimal("6.4")])
)
metadata = MetaData(testing.db)
t1 = Table('table', metadata,
Column('id', Integer, primary_key=True),
- Column('value',
+ Column('value',
Enum(u'réveillé', u'drôle', u'S’il',
name='onetwothreetype'))
)
t1.insert().execute(value=u'drôle')
t1.insert().execute(value=u'réveillé')
t1.insert().execute(value=u'S’il')
- eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
+ eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
[(1, u'drôle'), (2, u'réveillé'), (3, u'S’il')]
)
m2 = MetaData(testing.db)
def test_disable_create(self):
metadata = self.metadata
- e1 = postgresql.ENUM('one', 'two', 'three',
+ e1 = postgresql.ENUM('one', 'two', 'three',
name="myenum",
create_type=False)
- t1 = Table('e1', metadata,
+ t1 = Table('e1', metadata,
Column('c1', e1)
)
# table can be created separately
"""
metadata = self.metadata
- e1 = Enum('one', 'two', 'three',
+ e1 = Enum('one', 'two', 'three',
name="myenum")
t1 = Table('e1', metadata,
Column('c1', e1)
@testing.provide_metadata
def test_numeric_default(self):
metadata = self.metadata
- # pg8000 appears to fail when the value is 0,
+ # pg8000 appears to fail when the value is 0,
# returns an int instead of decimal.
- t =Table('t', metadata,
+ t =Table('t', metadata,
Column('id', Integer, primary_key=True),
Column('nd', Numeric(asdecimal=True), default=1),
Column('nf', Numeric(asdecimal=False), default=1),
__dialect__ = postgresql.dialect()
def setup(self):
- self.table = Table('t', MetaData(),
- Column('id',Integer, primary_key=True),
+ self.table = Table('t', MetaData(),
+ Column('id',Integer, primary_key=True),
Column('a', String),
Column('b', String),
)
def test_on_columns_inline_list(self):
self.assert_compile(
- select([self.table],
+ select([self.table],
distinct=[self.table.c.a, self.table.c.b]).
order_by(self.table.c.a, self.table.c.b),
"SELECT DISTINCT ON (t.a, t.b) t.id, "
m1 = MetaData()
- t2_schema = Table('some_other_table',
- m1,
- schema="test_schema_2",
- autoload=True,
+ t2_schema = Table('some_other_table',
+ m1,
+ schema="test_schema_2",
+ autoload=True,
autoload_with=conn)
- t1_schema = Table('some_table',
- m1,
- schema="test_schema",
+ t1_schema = Table('some_table',
+ m1,
+ schema="test_schema",
autoload=True,
autoload_with=conn)
- t2_no_schema = Table('some_other_table',
- m1,
- autoload=True,
+ t2_no_schema = Table('some_other_table',
+ m1,
+ autoload=True,
autoload_with=conn)
- t1_no_schema = Table('some_table',
- m1,
- autoload=True,
+ t1_no_schema = Table('some_table',
+ m1,
+ autoload=True,
autoload_with=conn)
- # OK, this because, "test_schema" is
+ # OK, this because, "test_schema" is
# in the search path, and might as well be
# the default too. why would we assign
# a "schema" to the Table ?
@testing.provide_metadata
def test_index_reflection_modified(self):
- """reflect indexes when a column name has changed - PG 9
+ """reflect indexes when a column name has changed - PG 9
does not update the name of the column in the index def.
[ticket:2141]
current_encoding = c.connection.connection.encoding
c.close()
- # attempt to use an encoding that's not
+ # attempt to use an encoding that's not
# already set
if current_encoding == 'UTF8':
test_encoding = 'LATIN1'
for v in value
]
- arrtable = Table('arrtable', metadata,
- Column('id', Integer, primary_key=True),
- Column('intarr',postgresql.ARRAY(Integer)),
+ arrtable = Table('arrtable', metadata,
+ Column('id', Integer, primary_key=True),
+ Column('intarr',postgresql.ARRAY(Integer)),
Column('strarr',postgresql.ARRAY(Unicode())),
Column('dimarr', ProcValue)
)
metadata.create_all()
testing.db.execute(t1.insert(), id=1, data=["1","2","3"], data2=[5.4, 5.6])
testing.db.execute(t1.insert(), id=2, data=["4", "5", "6"], data2=[1.0])
- testing.db.execute(t1.insert(), id=3, data=[["4", "5"], ["6", "7"]],
+ testing.db.execute(t1.insert(), id=3, data=[["4", "5"], ["6", "7"]],
data2=[[5.4, 5.6], [1.0, 1.1]])
r = testing.db.execute(t1.select().order_by(t1.c.id)).fetchall()
eq_(
- r,
+ r,
[
- (1, ('1', '2', '3'), (5.4, 5.6)),
- (2, ('4', '5', '6'), (1.0,)),
+ (1, ('1', '2', '3'), (5.4, 5.6)),
+ (2, ('4', '5', '6'), (1.0,)),
(3, (('4', '5'), ('6', '7')), ((5.4, 5.6), (1.0, 1.1)))
]
)
def test_uuid_string(self):
import uuid
self._test_round_trip(
- Table('utable', MetaData(),
+ Table('utable', MetaData(),
Column('data', postgresql.UUID())
),
str(uuid.uuid4()),
def test_uuid_uuid(self):
import uuid
self._test_round_trip(
- Table('utable', MetaData(),
+ Table('utable', MetaData(),
Column('data', postgresql.UUID(as_uuid=True))
),
uuid.uuid4(),
{'id': 2, 'description': 'Ruby'}])
matchtable.insert().execute([{'id': 1, 'title'
: 'Agile Web Development with Rails'
- , 'category_id': 2},
+ , 'category_id': 2},
{'id': 2,
'title': 'Dive Into Python',
- 'category_id': 1},
+ 'category_id': 1},
{'id': 3, 'title'
: "Programming Matz's Ruby",
- 'category_id': 2},
+ 'category_id': 2},
{'id': 4, 'title'
: 'The Definitive Guide to Django',
- 'category_id': 1},
+ 'category_id': 1},
{'id': 5, 'title'
: 'Python in a Nutshell',
'category_id': 1}])
testing.db.execute(
select([
tuple_(
- literal_column("'a'"),
+ literal_column("'a'"),
literal_column("'b'")
).\
in_([
tuple_(*[
- literal_column("'%s'" % letter)
+ literal_column("'%s'" % letter)
for letter in elem
]) for elem in test
])
("crap.crap.crap", ()),
]:
eq_(
- connector._parse_dbapi_version(vers),
+ connector._parse_dbapi_version(vers),
expected
)
\ No newline at end of file
t.create(engine)
try:
engine.execute(t.insert(), {'d1': datetime.date(2010, 5,
- 10),
+ 10),
'd2': datetime.datetime( 2010, 5, 10, 12, 15, 25,
)})
row = engine.execute(t.select()).first()
- eq_(row, (1, datetime.date(2010, 5, 10),
+ eq_(row, (1, datetime.date(2010, 5, 10),
datetime.datetime( 2010, 5, 10, 12, 15, 25, )))
r = engine.execute(func.current_date()).scalar()
assert isinstance(r, basestring)
m2 = MetaData(db)
t2 = Table('r_defaults', m2, autoload=True)
self.assert_compile(
- CreateTable(t2),
+ CreateTable(t2),
"CREATE TABLE r_defaults (data VARCHAR(40) "
"DEFAULT 'my_default', val INTEGER DEFAULT 0 "
"NOT NULL)"
@testing.provide_metadata
def test_boolean_default(self):
- t= Table("t", self.metadata,
+ t= Table("t", self.metadata,
Column("x", Boolean, server_default=sql.false()))
t.create(testing.db)
testing.db.execute(t.insert())
CREATE TABLE "django_admin_log" (
"id" integer NOT NULL PRIMARY KEY,
"action_time" datetime NOT NULL,
- "content_type_id" integer NULL
+ "content_type_id" integer NULL
REFERENCES "django_content_type" ("id"),
"object_id" text NULL,
"change_message" text NOT NULL
sql.false(), "0"
)
self.assert_compile(
- sql.true(),
+ sql.true(),
"1"
)
def test_constraints_with_schemas(self):
metadata = MetaData()
- t1 = Table('t1', metadata,
+ t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True),
schema='master')
- t2 = Table('t2', metadata,
+ t2 = Table('t2', metadata,
Column('id', Integer, primary_key=True),
Column('t1_id', Integer, ForeignKey('master.t1.id')),
schema='master'
)
- t3 = Table('t3', metadata,
+ t3 = Table('t3', metadata,
Column('id', Integer, primary_key=True),
Column('t1_id', Integer, ForeignKey('master.t1.id')),
schema='alternate'
)
- t4 = Table('t4', metadata,
+ t4 = Table('t4', metadata,
Column('id', Integer, primary_key=True),
Column('t1_id', Integer, ForeignKey('master.t1.id')),
)
metadata = MetaData(testing.db)
testing.db.execute("""
CREATE VIRTUAL TABLE cattable using FTS3 (
- id INTEGER NOT NULL,
- description VARCHAR(50),
+ id INTEGER NOT NULL,
+ description VARCHAR(50),
PRIMARY KEY (id)
)
""")
cattable = Table('cattable', metadata, autoload=True)
testing.db.execute("""
CREATE VIRTUAL TABLE matchtable using FTS3 (
- id INTEGER NOT NULL,
+ id INTEGER NOT NULL,
title VARCHAR(200),
- category_id INTEGER NOT NULL,
+ category_id INTEGER NOT NULL,
PRIMARY KEY (id)
)
""")
def test_name_not_none(self):
# we don't have names for PK constraints,
- # it appears we get back None in the pragma for
+ # it appears we get back None in the pragma for
# FKs also (also it doesn't even appear to be documented on sqlite's docs
# at http://www.sqlite.org/pragma.html#pragma_foreign_key_list
# how did we ever know that's the "name" field ??)
def test_deprecated_append_ddl_listener_table(self):
metadata, users, engine = self.metadata, self.users, self.engine
canary = []
- users.append_ddl_listener('before-create',
+ users.append_ddl_listener('before-create',
lambda e, t, b:canary.append('mxyzptlk')
)
- users.append_ddl_listener('after-create',
+ users.append_ddl_listener('after-create',
lambda e, t, b:canary.append('klptzyxm')
)
- users.append_ddl_listener('before-drop',
+ users.append_ddl_listener('before-drop',
lambda e, t, b:canary.append('xyzzy')
)
- users.append_ddl_listener('after-drop',
+ users.append_ddl_listener('after-drop',
lambda e, t, b:canary.append('fnord')
)
def test_deprecated_append_ddl_listener_metadata(self):
metadata, users, engine = self.metadata, self.users, self.engine
canary = []
- metadata.append_ddl_listener('before-create',
+ metadata.append_ddl_listener('before-create',
lambda e, t, b, tables=None:canary.append('mxyzptlk')
)
- metadata.append_ddl_listener('after-create',
+ metadata.append_ddl_listener('after-create',
lambda e, t, b, tables=None:canary.append('klptzyxm')
)
- metadata.append_ddl_listener('before-drop',
+ metadata.append_ddl_listener('before-drop',
lambda e, t, b, tables=None:canary.append('xyzzy')
)
- metadata.append_ddl_listener('after-drop',
+ metadata.append_ddl_listener('after-drop',
lambda e, t, b, tables=None:canary.append('fnord')
)
assert DDL('').execute_if(callable_=lambda d, y,z, **kw: True).\
_should_execute(tbl, cx)
assert(DDL('').execute_if(
- callable_=lambda d, y,z, **kw: z.engine.name
+ callable_=lambda d, y,z, **kw: z.engine.name
!= 'bogus').
_should_execute(tbl, cx))
assert e.echo is True
for param, values in [
- ('convert_unicode', ('true', 'false', 'force')),
+ ('convert_unicode', ('true', 'false', 'force')),
('echo', ('true', 'false', 'debug')),
('echo_pool', ('true', 'false', 'debug')),
('use_native_unicode', ('true', 'false')),
assert e.pool._reset_on_return is expected
assert_raises(
- exc.ArgumentError,
+ exc.ArgumentError,
create_engine, "postgresql://",
pool_reset_on_return='hi', module=dbapi,
_initialize=False
every backend.
"""
- # pretend pysqlite throws the
+ # pretend pysqlite throws the
# "Cannot operate on a closed database." error
# on connect. IRL we'd be getting Oracle's "shutdown in progress"
# note - using straight create_engine here
# since we are testing gc
db = create_engine(
- 'postgresql://foo:bar@localhost/test',
+ 'postgresql://foo:bar@localhost/test',
module=dbapi, _initialize=False)
# monkeypatch disconnect checker
dbapi = MDBAPI()
db = testing_engine(
- 'postgresql://foo:bar@localhost/test',
+ 'postgresql://foo:bar@localhost/test',
options=dict(module=dbapi, _initialize=False))
def test_cursor_explode(self):
# set the pool recycle down to 1.
# we aren't doing this inline with the
- # engine create since cx_oracle takes way
+ # engine create since cx_oracle takes way
# too long to create the 1st connection and don't
# want to build a huge delay into this test.
t2 = Table('t', m2, old_z, old_q)
eq_(t2.primary_key.columns, (t2.c.z, ))
t2 = Table('t', m2, old_y,
- extend_existing=True,
- autoload=True,
+ extend_existing=True,
+ autoload=True,
autoload_with=testing.db)
eq_(
- set(t2.columns.keys()),
+ set(t2.columns.keys()),
set(['x', 'y', 'z', 'q', 'id'])
)
eq_(t2.primary_key.columns, (t2.c.id, ))
m3 = MetaData()
t3 = Table('t', m3, Column('z', Integer))
- t3 = Table('t', m3, extend_existing=False,
- autoload=True,
+ t3 = Table('t', m3, extend_existing=False,
+ autoload=True,
autoload_with=testing.db)
eq_(
- set(t3.columns.keys()),
+ set(t3.columns.keys()),
set(['z'])
)
t4 = Table('t', m4, old_z, old_q)
eq_(t4.primary_key.columns, (t4.c.z, ))
t4 = Table('t', m4, old_y,
- extend_existing=True,
- autoload=True,
+ extend_existing=True,
+ autoload=True,
autoload_replace=False,
autoload_with=testing.db)
eq_(
- set(t4.columns.keys()),
+ set(t4.columns.keys()),
set(['x', 'y', 'z', 'q', 'id'])
)
eq_(t4.primary_key.columns, (t4.c.id, ))
@testing.provide_metadata
def test_autoload_replace_foreign_key_nonpresent(self):
- """test autoload_replace=False with col plus FK
+ """test autoload_replace=False with col plus FK
establishes the FK not present in the DB.
-
+
"""
a = Table('a', self.metadata, Column('id', Integer, primary_key=True))
b = Table('b', self.metadata, Column('id', Integer, primary_key=True),
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
- b2 = Table('b', m2, extend_existing=True, autoload=True,
- autoload_with=testing.db,
+ b2 = Table('b', m2, extend_existing=True, autoload=True,
+ autoload_with=testing.db,
autoload_replace=False)
assert b2.c.id is not None
@testing.provide_metadata
def test_autoload_replace_foreign_key_ispresent(self):
"""test autoload_replace=False with col plus FK mirroring
- DB-reflected FK skips the reflected FK and installs
+ DB-reflected FK skips the reflected FK and installs
the in-python one only.
-
+
"""
a = Table('a', self.metadata, Column('id', Integer, primary_key=True))
b = Table('b', self.metadata, Column('id', Integer, primary_key=True),
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer, sa.ForeignKey('a.id')))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
- b2 = Table('b', m2, extend_existing=True, autoload=True,
- autoload_with=testing.db,
+ b2 = Table('b', m2, extend_existing=True, autoload=True,
+ autoload_with=testing.db,
autoload_replace=False)
assert b2.c.id is not None
m2 = MetaData()
b2 = Table('b', m2, Column('a_id', Integer))
a2 = Table('a', m2, autoload=True, autoload_with=testing.db)
- b2 = Table('b', m2, extend_existing=True, autoload=True,
- autoload_with=testing.db,
+ b2 = Table('b', m2, extend_existing=True, autoload=True,
+ autoload_with=testing.db,
autoload_replace=False)
assert b2.c.id is not None
meta4 = MetaData(testing.db)
- u4 = Table('users', meta4,
+ u4 = Table('users', meta4,
Column('id', sa.Integer, key='u_id', primary_key=True),
autoload=True)
@testing.provide_metadata
def test_override_keys(self):
- """test that columns can be overridden with a 'key',
+ """test that columns can be overridden with a 'key',
and that ForeignKey targeting during reflection still works."""
meta = self.metadata
)
meta.create_all()
m2 = MetaData(testing.db)
- a2 = Table('a', m2,
+ a2 = Table('a', m2,
Column('x', sa.Integer, primary_key=True, key='x1'),
autoload=True)
b2 = Table('b', m2, autoload=True)
meta.create_all()
meta2 = MetaData(testing.db)
- a2 = Table('addresses', meta2,
+ a2 = Table('addresses', meta2,
Column('user_id',sa.Integer, sa.ForeignKey('users.id')),
autoload=True)
u2 = Table('users', meta2, autoload=True)
@testing.crashes('oracle', 'FIXME: unknown, confirm not fails_on')
- @testing.fails_on('+informixdb',
+ @testing.fails_on('+informixdb',
"FIXME: should be supported via the "
"DELIMITED env var but that breaks "
"everything else for now")
check_col = 'true'
quoter = meta.bind.dialect.identifier_preparer.quote_identifier
- table_b = Table('false', meta,
- Column('create', sa.Integer, primary_key=True),
+ table_b = Table('false', meta,
+ Column('create', sa.Integer, primary_key=True),
Column('true', sa.Integer,sa.ForeignKey('select.not')),
sa.CheckConstraint('%s <> 1'
% quoter(check_col), name='limit')
)
- table_c = Table('is', meta,
- Column('or', sa.Integer, nullable=False, primary_key=True),
+ table_c = Table('is', meta,
+ Column('or', sa.Integer, nullable=False, primary_key=True),
Column('join', sa.Integer, nullable=False, primary_key=True),
sa.PrimaryKeyConstraint('or', 'join', name='to')
)
m2.reflect(views=False)
eq_(
- set(m2.tables),
+ set(m2.tables),
set(['users', 'email_addresses', 'dingalings'])
)
m2 = MetaData(testing.db)
m2.reflect(views=True)
eq_(
- set(m2.tables),
- set(['email_addresses_v', 'users_v',
+ set(m2.tables),
+ set(['email_addresses_v', 'users_v',
'users', 'dingalings', 'email_addresses'])
)
finally:
def setup_class(cls):
global metadata, users
metadata = MetaData()
- users = Table('users', metadata,
+ users = Table('users', metadata,
Column('user_id', sa.Integer,
sa.Sequence('user_id_seq', optional=True),
- primary_key=True),
+ primary_key=True),
Column('user_name',sa.String(40)))
addresses = Table('email_addresses', metadata,
Column('address_id', sa.Integer,
sa.Sequence('address_id_seq', optional=True),
- primary_key=True),
+ primary_key=True),
Column('user_id',
sa.Integer, sa.ForeignKey(users.c.user_id)),
Column('email_address', sa.String(40)))
meta = MetaData()
users = Table('users', meta, Column('id', sa.Integer))
- addresses = Table('addresses', meta,
- Column('id', sa.Integer),
+ addresses = Table('addresses', meta,
+ Column('id', sa.Integer),
Column('user_id', sa.Integer))
fk = sa.ForeignKeyConstraint(['user_id'],[users.c.id])
(u'\u6e2c\u8a66', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
]
- # as you can see, our options for this kind of thing
+ # as you can see, our options for this kind of thing
# are really limited unless you're on PG or SQLite
# forget about it on these backends
elif testing.against("mysql") and \
not testing.requires._has_mysql_fully_case_sensitive():
names = no_multibyte_period.union(no_case_sensitivity)
- # mssql + pyodbc + freetds can't compare multibyte names to
+ # mssql + pyodbc + freetds can't compare multibyte names to
# information_schema.tables.table_name
elif testing.against("mssql"):
names = no_multibyte_period.union(no_has_table)
m2 = MetaData(schema="test_schema", bind=testing.db)
m2.reflect()
eq_(
- set(m2.tables),
- set(['test_schema.dingalings', 'test_schema.users',
+ set(m2.tables),
+ set(['test_schema.dingalings', 'test_schema.users',
'test_schema.email_addresses'])
)
)
dingalings = Table("dingalings", meta,
Column('dingaling_id', sa.Integer, primary_key=True),
- Column('address_id', sa.Integer,
+ Column('address_id', sa.Integer,
sa.ForeignKey('%semail_addresses.address_id' % schema_prefix)),
Column('data', sa.String(30)),
schema=schema,
@classmethod
def define_tables(cls, metadata):
- Table('SomeTable', metadata,
+ Table('SomeTable', metadata,
Column('x', Integer, primary_key=True),
test_needs_fk=True
)
- Table('SomeOtherTable', metadata,
+ Table('SomeOtherTable', metadata,
Column('x', Integer, primary_key=True),
Column('y', Integer, sa.ForeignKey("SomeTable.x")),
test_needs_fk=True
eq_(t1.name, "SomeTable")
assert t1.c.x is not None
- @testing.fails_if(lambda:
- testing.against(('mysql', '<', (5, 5))) and
+ @testing.fails_if(lambda:
+ testing.against(('mysql', '<', (5, 5))) and
not testing.requires._has_mysql_fully_case_sensitive()
)
def test_reflect_via_fk(self):
eng = testing_engine(options=dict())
conn = eng.connect()
eq_(
- eng.dialect.get_isolation_level(conn.connection),
+ eng.dialect.get_isolation_level(conn.connection),
self._default_isolation_level()
)
conn.connection, self._non_default_isolation_level()
)
eq_(
- eng.dialect.get_isolation_level(conn.connection),
+ eng.dialect.get_isolation_level(conn.connection),
self._non_default_isolation_level()
)
eng.dialect.reset_isolation_level(conn.connection)
eq_(
- eng.dialect.get_isolation_level(conn.connection),
+ eng.dialect.get_isolation_level(conn.connection),
self._default_isolation_level()
)
def test_invalid_level(self):
eng = testing_engine(options=dict(isolation_level='FOO'))
assert_raises_message(
- exc.ArgumentError,
+ exc.ArgumentError,
"Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
- ("FOO", eng.dialect.name,
+ "Valid isolation levels for %s are %s" %
+ ("FOO", eng.dialect.name,
", ".join(eng.dialect._isolation_lookup)),
eng.connect)
def test_per_connection(self):
from sqlalchemy.pool import QueuePool
eng = testing_engine(options=dict(
- poolclass=QueuePool,
+ poolclass=QueuePool,
pool_size=2, max_overflow=0))
c1 = eng.connect()
r"on Connection.execution_options\(\), or "
r"per-engine using the isolation_level "
r"argument to create_engine\(\).",
- select([1]).execution_options,
+ select([1]).execution_options,
isolation_level=self._non_default_isolation_level()
)
r"To set engine-wide isolation level, "
r"use the isolation_level argument to create_engine\(\).",
create_engine,
- testing.db.url,
+ testing.db.url,
execution_options={'isolation_level':
self._non_default_isolation_level}
)
)
class Parent(object):
- children = association_proxy('_children', 'name',
- proxy_factory=CustomProxy,
+ children = association_proxy('_children', 'name',
+ proxy_factory=CustomProxy,
proxy_bulk_set=CustomProxy.extend
)
@classmethod
def define_tables(cls, metadata):
- Table('userkeywords', metadata,
+ Table('userkeywords', metadata,
Column('keyword_id', Integer,ForeignKey('keywords.id'), primary_key=True),
Column('user_id', Integer, ForeignKey('users.id'))
)
- Table('users', metadata,
+ Table('users', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('name', String(64)),
Column('singular_id', Integer, ForeignKey('singular.id'))
)
- Table('keywords', metadata,
+ Table('keywords', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('keyword', String(64)),
})
mapper(UserKeyword, userkeywords, properties={
- 'user' : relationship(User, backref='user_keywords'),
+ 'user' : relationship(User, backref='user_keywords'),
'keyword' : relationship(Keyword)
})
mapper(Singular, singular, properties={
User = self.classes.User
self.assert_compile(
self.session.query(User).join(
- User.keywords.local_attr,
+ User.keywords.local_attr,
User.keywords.remote_attr),
"SELECT users.id AS users_id, users.name AS users_name, "
"users.singular_id AS users_singular_id "
m = MetaData()
a = Table('a', m, Column('id', Integer, primary_key=True))
- b = Table('b', m, Column('id', Integer, primary_key=True),
+ b = Table('b', m, Column('id', Integer, primary_key=True),
Column('aid', Integer, ForeignKey('a.id')))
mapper(A, a, properties={
'orig':relationship(B, collection_class=attribute_mapped_collection('key'))
)
def test_annotations(self):
- """test that annotated clause constructs use the
+ """test that annotated clause constructs use the
decorated class' compiler.
"""
return "BIND(%s)" % compiler.visit_bindparam(element, **kw)
self.assert_compile(
- t.select().where(t.c.c == 5),
+ t.select().where(t.c.c == 5),
"SELECT t.a, t.b, t.c FROM t WHERE t.c = BIND(:c_1)",
use_default_dialect=True
)
return "BIND(%s)" % compiler.visit_bindparam(element, **kw)
self.assert_compile(
- t.insert(),
+ t.insert(),
"INSERT INTO t (a, b) VALUES (BIND(:a), BIND(:b))",
{'a':1, 'b':2},
use_default_dialect=True
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
email = Column(String(50))
- user_id = Column(Integer)
- user = relationship("User",
+ user_id = Column(Integer)
+ user = relationship("User",
primaryjoin="remote(User.id)==foreign(Address.user_id)"
)
name = Column(String(50))
props = relationship('Prop', secondary='fooschema.user_to_prop',
primaryjoin='User.id==fooschema.user_to_prop.c.user_id',
- secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id',
+ secondaryjoin='fooschema.user_to_prop.c.prop_id==Prop.id',
backref='users')
class Prop(Base):
name = Column(String(50))
user_to_prop = Table('user_to_prop', Base.metadata,
- Column('user_id', Integer, ForeignKey('fooschema.users.id')),
+ Column('user_id', Integer, ForeignKey('fooschema.users.id')),
Column('prop_id',Integer, ForeignKey('fooschema.props.id')),
schema='fooschema')
configure_mappers()
except exc.InvalidRequestError:
assert sa.util.compat.py32
- # the exception is preserved. Remains the
+ # the exception is preserved. Remains the
# same through repeated calls.
for i in range(3):
assert_raises_message(sa.exc.InvalidRequestError,
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'user'
- id = Column(Integer, primary_key=True,
+ id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
- address = composite(AddressComposite,
+ address = composite(AddressComposite,
Column('street', String(50)),
Column('state', String(2)),
)
Base.metadata.create_all()
sess = Session()
sess.add(User(
- address=AddressComposite('123 anywhere street',
+ address=AddressComposite('123 anywhere street',
'MD')
))
sess.commit()
eq_(
- sess.query(User).all(),
- [User(address=AddressComposite('123 anywhere street',
+ sess.query(User).all(),
+ [User(address=AddressComposite('123 anywhere street',
'MD'))]
)
class User(Base, fixtures.ComparableEntity):
__tablename__ = 'user'
- id = Column(Integer, primary_key=True,
+ id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
street = Column(String(50))
state = Column(String(2))
- address = composite(AddressComposite,
+ address = composite(AddressComposite,
street, state)
Base.metadata.create_all()
sess = Session()
sess.add(User(
- address=AddressComposite('123 anywhere street',
+ address=AddressComposite('123 anywhere street',
'MD')
))
sess.commit()
eq_(
- sess.query(User).all(),
- [User(address=AddressComposite('123 anywhere street',
+ sess.query(User).all(),
+ [User(address=AddressComposite('123 anywhere street',
'MD'))]
)
pass
eq_(
- MyModel.__mapper__.polymorphic_on.name,
+ MyModel.__mapper__.polymorphic_on.name,
'type_'
)
assert MyModel.__mapper__.polymorphic_on.table is not None
class Model(Base, ColumnMixin):
- __table__ = Table('foo', Base.metadata,
- Column('data',Integer),
+ __table__ = Table('foo', Base.metadata,
+ Column('data',Integer),
Column('id', Integer,primary_key=True))
foo = relationship("Dest")
class Model(Base, ColumnMixin):
- __table__ = Table('foo', Base.metadata,
- Column('data',Integer),
+ __table__ = Table('foo', Base.metadata,
+ Column('data',Integer),
Column('tada', Integer),
Column('id', Integer,primary_key=True))
foo = relationship("Dest")
@classmethod
def define_tables(cls, metadata):
- Table('users', metadata,
+ Table('users', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
)
- Table("b", metadata,
+ Table("b", metadata,
Column('id', Integer,
ForeignKey('a.id'),
primary_key=True),
)
def test_pk_fk(self):
- class B(decl.DeferredReflection, fixtures.ComparableEntity,
+ class B(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'b'
a = relationship("A")
- class A(decl.DeferredReflection, fixtures.ComparableEntity,
+ class A(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'a'
@classmethod
def define_tables(cls, metadata):
- Table('users', metadata,
+ Table('users', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('name', String(50)), test_needs_fk=True)
eq_(a1.user, User(name='u1'))
def test_basic_deferred(self):
- class User(decl.DeferredReflection, fixtures.ComparableEntity,
+ class User(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'users'
addresses = relationship("Address", backref="user")
- class Address(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Address(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'addresses'
self._roundtrip()
def test_redefine_fk_double(self):
- class User(decl.DeferredReflection, fixtures.ComparableEntity,
+ class User(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'users'
addresses = relationship("Address", backref="user")
- class Address(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Address(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'addresses'
user_id = Column(Integer, ForeignKey('users.id'))
def test_mapper_args_deferred(self):
"""test that __mapper_args__ is not called until *after* table reflection"""
- class User(decl.DeferredReflection, fixtures.ComparableEntity,
+ class User(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'users'
Bar = Base._decl_class_registry['Bar']
s = Session(testing.db)
-
+
s.add_all([
Bar(data='d1', bar_data='b1'),
Bar(data='d2', bar_data='b2'),
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(32)),
Column('data', String(30)),
)
def test_basic(self):
- class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
+ __mapper_args__ = {"polymorphic_on":"type",
"polymorphic_identity":"foo"}
class Bar(Foo):
self._roundtrip()
def test_add_subclass_column(self):
- class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
+ __mapper_args__ = {"polymorphic_on":"type",
"polymorphic_identity":"foo"}
class Bar(Foo):
self._roundtrip()
def test_add_pk_column(self):
- class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
+ __mapper_args__ = {"polymorphic_on":"type",
"polymorphic_identity":"foo"}
id = Column(Integer, primary_key=True)
@classmethod
def define_tables(cls, metadata):
Table("foo", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(32)),
Column('data', String(30)),
)
def test_basic(self):
- class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
+ __mapper_args__ = {"polymorphic_on":"type",
"polymorphic_identity":"foo"}
class Bar(Foo):
self._roundtrip()
def test_add_subclass_column(self):
- class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
+ __mapper_args__ = {"polymorphic_on":"type",
"polymorphic_identity":"foo"}
class Bar(Foo):
self._roundtrip()
def test_add_pk_column(self):
- class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
+ __mapper_args__ = {"polymorphic_on":"type",
"polymorphic_identity":"foo"}
id = Column(Integer, primary_key=True)
self._roundtrip()
def test_add_fk_pk_column(self):
- class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
+ class Foo(decl.DeferredReflection, fixtures.ComparableEntity,
Base):
__tablename__ = 'foo'
- __mapper_args__ = {"polymorphic_on":"type",
+ __mapper_args__ = {"polymorphic_on":"type",
"polymorphic_identity":"foo"}
class Bar(Foo):
"""Testing environment and utilities.
-This package contains base classes and routines used by
+This package contains base classes and routines used by
the unit tests. Tests are based on Nose and bootstrapped
by noseplugin.NoseSQLAlchemy.
"""Run a setup method, framing the operation with a Base class
that will catch new subclasses to be established within
the "classes" registry.
-
+
"""
cls_registry = cls.classes
class FindFixture(type):
def _teardown_each_mappers(self):
# some tests create mappers in the test bodies
- # and will define setup_mappers as None -
+ # and will define setup_mappers as None -
# clear mappers in any case
if self.run_setup_mappers != 'once':
sa.orm.clear_mappers()
cls, classname, bases, dict_)
class DeclarativeBasic(object):
__table_cls__ = schema.Table
- _DeclBase = declarative_base(metadata=cls.declarative_meta,
+ _DeclBase = declarative_base(metadata=cls.declarative_meta,
metaclass=FindFixtureDeclarative,
cls=DeclarativeBasic)
cls.DeclarativeBasic = _DeclBase
else:
stats.print_stats()
- print_callers = target_opts.get('print_callers',
+ print_callers = target_opts.get('print_callers',
profile_config['print_callers'])
if print_callers:
stats.print_callers()
- print_callees = target_opts.get('print_callees',
+ print_callees = target_opts.get('print_callees',
profile_config['print_callees'])
if print_callees:
stats.print_callees()
def reflectable_autoincrement(fn):
"""Target database must support tables that can automatically generate
PKs assuming they were reflected.
-
+
this is essentially all the DBs in "identity" plus Postgresql, which
- has SERIAL support. FB and Oracle (and sybase?) require the Sequence to
+ has SERIAL support. FB and Oracle (and sybase?) require the Sequence to
be explicitly added, including if the table was reflected.
"""
return _chain_decorators_on(
"""Target must support UPDATE..FROM syntax"""
return _chain_decorators_on(
fn,
- only_on(('postgresql', 'mssql', 'mysql'),
+ only_on(('postgresql', 'mssql', 'mysql'),
"Backend does not support UPDATE..FROM")
)
def cpython(fn):
return _chain_decorators_on(
fn,
- skip_if(lambda: util.jython or util.pypy,
+ skip_if(lambda: util.jython or util.pypy,
"cPython interpreter needed"
)
)
def ad_hoc_engines(fn):
"""Test environment must allow ad-hoc engine/connection creation.
-
+
DBs that scale poorly for many connections, even when closed, i.e.
Oracle, may use the "--low-connections" option which flags this requirement
as not present.
-
+
"""
return _chain_decorators_on(
fn,
"""target driver must support the literal statement 'select 1'"""
return _chain_decorators_on(
fn,
- skip_if(lambda: testing.against('oracle'),
+ skip_if(lambda: testing.against('oracle'),
"non-standard SELECT scalar syntax")
)
def fails_on(dbs, reason):
- """Mark a test as expected to fail on the specified database
+ """Mark a test as expected to fail on the specified database
implementation.
Unlike ``crashes``, tests marked as ``fails_on`` will be run
util.warn = util.langhelpers.warn = testing_warn
warnings.filterwarnings('ignore',
- category=sa_exc.SAPendingDeprecationWarning)
+ category=sa_exc.SAPendingDeprecationWarning)
warnings.filterwarnings('error', category=sa_exc.SADeprecationWarning)
warnings.filterwarnings('error', category=sa_exc.SAWarning)
def run_as_contextmanager(ctx, fn, *arg, **kw):
"""Run the given function under the given contextmanager,
- simulating the behavior of 'with' to support older
+ simulating the behavior of 'with' to support older
Python versions.
-
+
"""
obj = ctx.__enter__()
class AssertsCompiledSQL(object):
- def assert_compile(self, clause, result, params=None,
- checkparams=None, dialect=None,
+ def assert_compile(self, clause, result, params=None,
+ checkparams=None, dialect=None,
checkpositional=None,
use_default_dialect=False,
allow_dialect_select=False):
mapper(Keyword, keywords)
mapper(Node, nodes, properties={
- 'children':relationship(Node,
+ 'children':relationship(Node,
backref=backref('parent', remote_side=[nodes.c.id])
)
})
parent_class = parent_mapper.class_
child_class = child_mapper.class_
- parent_mapper.add_property("collection",
- relationship(child_mapper,
- primaryjoin=relationshipjoin,
- foreign_keys=foreign_keys,
+ parent_mapper.add_property("collection",
+ relationship(child_mapper,
+ primaryjoin=relationshipjoin,
+ foreign_keys=foreign_keys,
remote_side=remote_side, uselist=True))
sess = create_session()
global people, managers
people = Table('people', metadata,
- Column('person_id', Integer, Sequence('person_id_seq',
- optional=True),
+ Column('person_id', Integer, Sequence('person_id_seq',
+ optional=True),
primary_key=True),
- Column('manager_id', Integer,
- ForeignKey('managers.person_id',
+ Column('manager_id', Integer,
+ ForeignKey('managers.person_id',
use_alter=True, name="mpid_fq")),
Column('name', String(50)),
Column('type', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('status', String(30)),
Column('manager_name', String(50))
mapper(Manager, managers, inherits=Person,
inherit_condition=people.c.person_id==managers.c.person_id)
- eq_(class_mapper(Person).get_property('manager').synchronize_pairs,
+ eq_(class_mapper(Person).get_property('manager').synchronize_pairs,
[(managers.c.person_id,people.c.manager_id)])
session = create_session()
pass
mapper(Person, people)
- mapper(Manager, managers, inherits=Person,
+ mapper(Manager, managers, inherits=Person,
inherit_condition=people.c.person_id==
- managers.c.person_id,
+ managers.c.person_id,
properties={
'employee':relationship(Person, primaryjoin=(
people.c.manager_id ==
def define_tables(cls, metadata):
global people, managers, data
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(30)))
)
data = Table('data', metadata,
- Column('person_id', Integer, ForeignKey('managers.person_id'),
+ Column('person_id', Integer, ForeignKey('managers.person_id'),
primary_key=True),
Column('data', String(30))
)
if jointype == "join1":
poly_union = polymorphic_union({
'person':people.select(people.c.type=='person'),
- 'manager':join(people, managers,
+ 'manager':join(people, managers,
people.c.person_id==managers.c.person_id)
}, None)
polymorphic_on=poly_union.c.type
elif jointype == "join2":
poly_union = polymorphic_union({
'person':people.select(people.c.type=='person'),
- 'manager':managers.join(people,
+ 'manager':managers.join(people,
people.c.person_id==managers.c.person_id)
}, None)
polymorphic_on=poly_union.c.type
self.data = data
mapper(Data, data)
- mapper(Person, people,
- with_polymorphic=('*', poly_union),
- polymorphic_identity='person',
+ mapper(Person, people,
+ with_polymorphic=('*', poly_union),
+ polymorphic_identity='person',
polymorphic_on=polymorphic_on)
if usedata:
- mapper(Manager, managers,
- inherits=Person,
+ mapper(Manager, managers,
+ inherits=Person,
inherit_condition=people.c.person_id==
- managers.c.person_id,
+ managers.c.person_id,
polymorphic_identity='manager',
properties={
'colleague':relationship(
- Person,
+ Person,
primaryjoin=managers.c.manager_id==
- people.c.person_id,
+ people.c.person_id,
lazy='select', uselist=False),
'data':relationship(Data, uselist=False)
}
)
else:
- mapper(Manager, managers, inherits=Person,
+ mapper(Manager, managers, inherits=Person,
inherit_condition=people.c.person_id==
- managers.c.person_id,
+ managers.c.person_id,
polymorphic_identity='manager',
properties={
- 'colleague':relationship(Person,
+ 'colleague':relationship(Person,
primaryjoin=managers.c.manager_id==
- people.c.person_id,
+ people.c.person_id,
lazy='select', uselist=False)
}
)
def define_tables(cls, metadata):
global people, managers, data
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('colleague_id', Integer, ForeignKey('people.person_id')),
Column('name', String(50)),
Column('type', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('status', String(30)),
)
data = Table('data', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('data', String(30))
)
if jointype == "join1":
poly_union = polymorphic_union({
- 'manager':managers.join(people,
+ 'manager':managers.join(people,
people.c.person_id==managers.c.person_id),
'person':people.select(people.c.type=='person')
}, None)
elif jointype =="join2":
poly_union = polymorphic_union({
- 'manager':join(people, managers,
+ 'manager':join(people, managers,
people.c.person_id==managers.c.person_id),
'person':people.select(people.c.type=='person')
}, None)
mapper(Data, data)
if usedata:
- mapper(Person, people,
- with_polymorphic=('*', poly_union),
- polymorphic_identity='person',
+ mapper(Person, people,
+ with_polymorphic=('*', poly_union),
+ polymorphic_identity='person',
polymorphic_on=people.c.type,
properties={
- 'colleagues':relationship(Person,
+ 'colleagues':relationship(Person,
primaryjoin=people.c.colleague_id==
- people.c.person_id,
- remote_side=people.c.colleague_id,
+ people.c.person_id,
+ remote_side=people.c.colleague_id,
uselist=True),
'data':relationship(Data, uselist=False)
}
)
else:
- mapper(Person, people,
- with_polymorphic=('*', poly_union),
- polymorphic_identity='person',
+ mapper(Person, people,
+ with_polymorphic=('*', poly_union),
+ polymorphic_identity='person',
polymorphic_on=people.c.type,
properties={
- 'colleagues':relationship(Person,
+ 'colleagues':relationship(Person,
primaryjoin=people.c.colleague_id==people.c.person_id,
remote_side=people.c.colleague_id, uselist=True)
}
)
- mapper(Manager, managers, inherits=Person,
+ mapper(Manager, managers, inherits=Person,
inherit_condition=people.c.person_id==
- managers.c.person_id,
+ managers.c.person_id,
polymorphic_identity='manager')
sess = create_session()
def define_tables(cls, metadata):
global people, engineers, managers, cars
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('status', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('longer_status', String(70)))
cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
+ Column('car_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('owner', Integer, ForeignKey('people.person_id')))
'manager':people.join(managers),
}, "type", 'employee_join')
- person_mapper = mapper(Person, people,
- with_polymorphic=('*', employee_join),
- polymorphic_on=employee_join.c.type,
+ person_mapper = mapper(Person, people,
+ with_polymorphic=('*', employee_join),
+ polymorphic_on=employee_join.c.type,
polymorphic_identity='person')
- engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
+ engineer_mapper = mapper(Engineer, engineers,
+ inherits=person_mapper,
polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
polymorphic_identity='manager')
- car_mapper = mapper(Car, cars,
+ car_mapper = mapper(Car, cars,
properties= {'employee':
relationship(person_mapper)})
def define_tables(cls, metadata):
global people, engineers, managers, cars
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
Column('type', String(50)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('status', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('longer_status', String(70)))
cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
+ Column('car_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('owner', Integer, ForeignKey('people.person_id')))
def __repr__(self):
return "Car number %d" % self.car_id
- person_mapper = mapper(Person, people,
- polymorphic_on=people.c.type,
+ person_mapper = mapper(Person, people,
+ polymorphic_on=people.c.type,
polymorphic_identity='person')
- engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
+ engineer_mapper = mapper(Engineer, engineers,
+ inherits=person_mapper,
polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
polymorphic_identity='manager')
car_mapper = mapper(Car, cars, properties= {
'manager':relationship(
def define_tables(cls, metadata):
global people, managers, data
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
)
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
- Column('colleague_id', Integer,
+ Column('colleague_id', Integer,
ForeignKey('managers.person_id')),
Column('status', String(30)),
)
mapper(Person, people)
- mapper(Manager, managers, inherits=Person,
+ mapper(Manager, managers, inherits=Person,
inherit_condition=people.c.person_id==\
managers.c.person_id,
properties={
- 'colleague':relationship(Manager,
+ 'colleague':relationship(Manager,
primaryjoin=managers.c.colleague_id==\
- managers.c.person_id,
+ managers.c.person_id,
lazy='select', uselist=False)
}
)
def define_tables(cls, metadata):
global people, engineers, managers, cars, offroad_cars
cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
+ Column('car_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(30)))
nullable=False,primary_key=True))
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
- Column('car_id', Integer, ForeignKey('cars.car_id'),
+ Column('car_id', Integer, ForeignKey('cars.car_id'),
nullable=False),
Column('name', String(50)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('field', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('category', String(70)))
class Engineer(Person):
def __repr__(self):
- return "Engineer %s, field %s" % (self.name,
+ return "Engineer %s, field %s" % (self.name,
self.field)
class Manager(Person):
def __repr__(self):
- return "Manager %s, category %s" % (self.name,
+ return "Manager %s, category %s" % (self.name,
self.category)
class Car(PersistentObject):
car_join = polymorphic_union(
{
'car' : cars.outerjoin(offroad_cars).\
- select(offroad_cars.c.car_id == None,
+ select(offroad_cars.c.car_id == None,
fold_equivalents=True),
'offroad' : cars.join(offroad_cars)
}, "type", 'car_join')
with_polymorphic=('*', car_join) ,polymorphic_on=car_join.c.type,
polymorphic_identity='car',
)
- offroad_car_mapper = mapper(Offraod_Car, offroad_cars,
+ offroad_car_mapper = mapper(Offraod_Car, offroad_cars,
inherits=car_mapper, polymorphic_identity='offroad')
person_mapper = mapper(Person, people,
- with_polymorphic=('*', employee_join),
+ with_polymorphic=('*', employee_join),
polymorphic_on=employee_join.c.type,
polymorphic_identity='person',
properties={
'car':relationship(car_mapper)
})
- engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
+ engineer_mapper = mapper(Engineer, engineers,
+ inherits=person_mapper,
polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
polymorphic_identity='manager')
session = create_session()
def define_tables(cls, metadata):
global taggable, users
taggable = Table('taggable', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(30)),
Column('owner_id', Integer, ForeignKey('taggable.id')),
)
users = Table ('users', metadata,
- Column('id', Integer, ForeignKey('taggable.id'),
+ Column('id', Integer, ForeignKey('taggable.id'),
primary_key=True),
Column('data', String(50)),
)
class User(Taggable):
pass
- mapper( Taggable, taggable,
- polymorphic_on=taggable.c.type,
- polymorphic_identity='taggable',
+ mapper( Taggable, taggable,
+ polymorphic_on=taggable.c.type,
+ polymorphic_identity='taggable',
properties = {
'owner' : relationship (User,
primaryjoin=taggable.c.owner_id ==taggable.c.id,
})
- mapper(User, users, inherits=Taggable,
+ mapper(User, users, inherits=Taggable,
polymorphic_identity='user',
inherit_condition=users.c.id == taggable.c.id,
)
metadata = MetaData(testing.db)
# table definitions
status = Table('status', metadata,
- Column('status_id', Integer, primary_key=True,
+ Column('status_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(20)))
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
- Column('status_id', Integer, ForeignKey('status.status_id'),
+ Column('status_id', Integer, ForeignKey('status.status_id'),
nullable=False),
Column('name', String(50)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('field', String(30)))
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('category', String(70)))
cars = Table('cars', metadata,
- Column('car_id', Integer, primary_key=True,
+ Column('car_id', Integer, primary_key=True,
test_needs_autoincrement=True),
- Column('status_id', Integer, ForeignKey('status.status_id'),
+ Column('status_id', Integer, ForeignKey('status.status_id'),
nullable=False),
- Column('owner', Integer, ForeignKey('people.person_id'),
+ Column('owner', Integer, ForeignKey('people.person_id'),
nullable=False))
metadata.create_all()
status_mapper = mapper(Status, status)
person_mapper = mapper(Person, people,
- with_polymorphic=('*', employee_join),
+ with_polymorphic=('*', employee_join),
polymorphic_on=employee_join.c.type,
- polymorphic_identity='person',
+ polymorphic_identity='person',
properties={'status':relationship(status_mapper)})
- engineer_mapper = mapper(Engineer, engineers,
- inherits=person_mapper,
+ engineer_mapper = mapper(Engineer, engineers,
+ inherits=person_mapper,
polymorphic_identity='engineer')
- manager_mapper = mapper(Manager, managers,
- inherits=person_mapper,
+ manager_mapper = mapper(Manager, managers,
+ inherits=person_mapper,
polymorphic_identity='manager')
car_mapper = mapper(Car, cars, properties= {
- 'employee':relationship(person_mapper),
+ 'employee':relationship(person_mapper),
'status':relationship(status_mapper)})
session = create_session()
session.add(dead)
session.flush()
- # TODO: we haven't created assertions for all
+ # TODO: we haven't created assertions for all
# the data combinations created here
- # creating 5 managers named from M1 to M5
+ # creating 5 managers named from M1 to M5
# and 5 engineers named from E1 to E5
# M4, M5, E4 and E5 are dead
for i in range(1,5):
"status Status active]")
r = session.query(Engineer).join('status').\
filter(Person.name.in_(
- ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) &
+ ['E2', 'E3', 'E4', 'M4', 'M2', 'M1']) &
(status.c.name=="active")).order_by(Person.name)
eq_(str(list(r)), "[Engineer E2, field X, status Status "
"active, Engineer E3, field X, status "
"Status active]")
- r = session.query(Person).filter(exists([1],
+ r = session.query(Person).filter(exists([1],
Car.owner==Person.person_id))
eq_(str(list(r)), "[Engineer E4, field X, status Status dead]")
global table_Employee, table_Engineer, table_Manager
table_Employee = Table( 'Employee', metadata,
Column( 'name', type_= String(100), ),
- Column( 'id', primary_key= True, type_= Integer,
+ Column( 'id', primary_key= True, type_= Integer,
test_needs_autoincrement=True),
Column( 'atype', type_= String(100), ),
)
table_Engineer = Table( 'Engineer', metadata,
Column( 'machine', type_= String(100), ),
- Column( 'id', Integer, ForeignKey( 'Employee.id', ),
+ Column( 'id', Integer, ForeignKey( 'Employee.id', ),
primary_key= True),
)
table_Manager = Table( 'Manager', metadata,
Column( 'duties', type_= String(100), ),
- Column( 'id', Integer, ForeignKey( 'Engineer.id', ),
+ Column( 'id', Integer, ForeignKey( 'Engineer.id', ),
primary_key= True, ),
)
def set( me, **kargs):
for k,v in kargs.iteritems(): setattr( me, k, v)
return me
- def __str__(me):
+ def __str__(me):
return str(me.__class__.__name__)+':'+str(me.name)
__repr__ = __str__
- class Engineer(Employee):
+ class Engineer(Employee):
pass
- class Manager(Engineer):
+ class Manager(Engineer):
pass
pu_Employee = polymorphic_union( {
- 'Manager': table_Employee.join(
+ 'Manager': table_Employee.join(
table_Engineer).join( table_Manager),
- 'Engineer': select([table_Employee,
- table_Engineer.c.machine],
- table_Employee.c.atype == 'Engineer',
+ 'Engineer': select([table_Employee,
+ table_Engineer.c.machine],
+ table_Employee.c.atype == 'Engineer',
from_obj=[
table_Employee.join(table_Engineer)]),
- 'Employee': table_Employee.select(
+ 'Employee': table_Employee.select(
table_Employee.c.atype == 'Employee'),
}, None, 'pu_employee', )
pu_Engineer = polymorphic_union( {
'Manager': table_Employee.join( table_Engineer).
join( table_Manager),
- 'Engineer': select([table_Employee,
- table_Engineer.c.machine],
- table_Employee.c.atype == 'Engineer',
+ 'Engineer': select([table_Employee,
+ table_Engineer.c.machine],
+ table_Employee.c.atype == 'Engineer',
from_obj=[
table_Employee.join(table_Engineer)
]),
a = Employee().set( name= 'one')
b = Engineer().set( egn= 'two', machine= 'any')
- c = Manager().set( name= 'head', machine= 'fast',
+ c = Manager().set( name= 'head', machine= 'fast',
duties= 'many')
session = create_session()
collection_table
base_item_table = Table(
'base_item', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('child_name', String(255), default=None))
item_table = Table(
'item', metadata,
- Column('id', Integer, ForeignKey('base_item.id'),
+ Column('id', Integer, ForeignKey('base_item.id'),
primary_key=True),
Column('dummy', Integer, default=0))
collection_table = Table(
'collection', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', Unicode(255)))
with_polymorphic=('*', item_join),
polymorphic_on=base_item_table.c.child_name,
polymorphic_identity='BaseItem',
- properties=dict(collections=relationship(Collection,
- secondary=base_item_collection_table,
+ properties=dict(collections=relationship(Collection,
+ secondary=base_item_collection_table,
backref="items")))
mapper(
def define_tables(cls, metadata):
global t1, t2
t1 = Table('t1', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(30), nullable=False),
Column('data', String(30)))
d['t2'] = t1.join(t2)
pjoin = polymorphic_union(d, None, 'pjoin')
- mapper(T1, t1, polymorphic_on=t1.c.type,
- polymorphic_identity='t1',
- with_polymorphic=('*', pjoin),
+ mapper(T1, t1, polymorphic_on=t1.c.type,
+ polymorphic_identity='t1',
+ with_polymorphic=('*', pjoin),
primary_key=[pjoin.c.id])
mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
ot1 = T1()
sess.flush()
sess.expunge_all()
- # query using get(), using only one value.
+ # query using get(), using only one value.
# this requires the select_table mapper
# has the same single-col primary key.
assert sess.query(T1).get(ot1.id).id == ot1.id
d['t2'] = t1.join(t2)
pjoin = polymorphic_union(d, None, 'pjoin')
- mapper(T1, t1, polymorphic_on=t1.c.type,
- polymorphic_identity='t1',
+ mapper(T1, t1, polymorphic_on=t1.c.type,
+ polymorphic_identity='t1',
with_polymorphic=('*', pjoin))
mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
assert len(class_mapper(T1).primary_key) == 1
sess.flush()
sess.expunge_all()
- # query using get(), using only one value. this requires the
+ # query using get(), using only one value. this requires the
# select_table mapper
# has the same single-col primary key.
assert sess.query(T1).get(ot1.id).id == ot1.id
global people, employees, tags, peopleTags
people = Table('people', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('_type', String(30), nullable=False),
)
)
tags = Table('tags', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('label', String(50), nullable=False),
)
def __init__(self, label):
self.label = label
- mapper(Person, people, polymorphic_on=people.c._type,
+ mapper(Person, people, polymorphic_on=people.c._type,
polymorphic_identity='person', properties={
- 'tags': relationship(Tag,
- secondary=peopleTags,
+ 'tags': relationship(Tag,
+ secondary=peopleTags,
backref='people', lazy='joined')
})
mapper(Employee, employees, inherits=Person,
class MissingPolymorphicOnTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- tablea = Table('tablea', metadata,
- Column('id', Integer, primary_key=True,
+ tablea = Table('tablea', metadata,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('adata', String(50)),
)
- tableb = Table('tableb', metadata,
- Column('id', Integer, primary_key=True,
+ tableb = Table('tableb', metadata,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('aid', Integer, ForeignKey('tablea.id')),
Column('data', String(50)),
)
- tablec = Table('tablec', metadata,
- Column('id', Integer, ForeignKey('tablea.id'),
+ tablec = Table('tablec', metadata,
+ Column('id', Integer, ForeignKey('tablea.id'),
primary_key=True),
Column('cdata', String(50)),
)
- tabled = Table('tabled', metadata,
- Column('id', Integer, ForeignKey('tablec.id'),
+ tabled = Table('tabled', metadata,
+ Column('id', Integer, ForeignKey('tablec.id'),
primary_key=True),
Column('ddata', String(50)),
)
A, B, C, D = self.classes.A, self.classes.B, self.classes.C, \
self.classes.D
poly_select = select(
- [tablea, tableb.c.data.label('discriminator')],
+ [tablea, tableb.c.data.label('discriminator')],
from_obj=tablea.join(tableb)).alias('poly')
mapper(B, tableb)
- mapper(A, tablea,
+ mapper(A, tablea,
with_polymorphic=('*', poly_select),
- polymorphic_on=poly_select.c.discriminator,
+ polymorphic_on=poly_select.c.discriminator,
properties={
'b':relationship(B, uselist=False)
})
sess.flush()
sess.expunge_all()
eq_(
- sess.query(A).all(),
+ sess.query(A).all(),
[
- C(cdata='c1', adata='a1'),
+ C(cdata='c1', adata='a1'),
D(cdata='c2', adata='a2', ddata='d2')
]
)
@classmethod
def define_tables(cls, metadata):
Table('people', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(30)),
)
Table('users', metadata,
- Column('id', Integer, ForeignKey('people.id'),
+ Column('id', Integer, ForeignKey('people.id'),
primary_key=True),
Column('supervisor_id', Integer, ForeignKey('people.id')),
)
Table('dudes', metadata,
- Column('id', Integer, ForeignKey('users.id'),
+ Column('id', Integer, ForeignKey('users.id'),
primary_key=True),
)
class A(Base):
__tablename__ = "a"
- id = Column(Integer, primary_key=True,
+ id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
class B(Base):
__tablename__ = "b"
- id = Column(Integer, primary_key=True,
+ id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
ds = relationship("D")
es = relationship("E")
test_needs_autoincrement=True)
b_id = Column(Integer, ForeignKey('b.id'))
- @testing.fails_on("oracle",
+ @testing.fails_on("oracle",
"seems like oracle's query engine can't "
"handle this, not clear if there's an "
"expression-level bug on our end though")
'magazine': relationship(Magazine, backref=backref('pages', order_by=page_table.c.page_no))
})
- classified_page_mapper = mapper(ClassifiedPage,
- classified_page_table,
- inherits=magazine_page_mapper,
- polymorphic_identity='c',
+ classified_page_mapper = mapper(ClassifiedPage,
+ classified_page_table,
+ inherits=magazine_page_mapper,
+ polymorphic_identity='c',
primary_key=[page_table.c.id])
global companies, people, engineers, managers, boss
companies = Table('companies', metadata,
- Column('company_id', Integer, primary_key=True,
+ Column('company_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)))
people = Table('people', metadata,
- Column('person_id', Integer, primary_key=True,
+ Column('person_id', Integer, primary_key=True,
test_needs_autoincrement=True),
- Column('company_id', Integer, ForeignKey('companies.company_id'),
+ Column('company_id', Integer, ForeignKey('companies.company_id'),
nullable=False),
Column('name', String(50)),
Column('type', String(30)))
engineers = Table('engineers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('status', String(30)),
Column('engineer_name', String(50)),
)
managers = Table('managers', metadata,
- Column('person_id', Integer, ForeignKey('people.person_id'),
+ Column('person_id', Integer, ForeignKey('people.person_id'),
primary_key=True),
Column('status', String(30)),
Column('manager_name', String(50))
)
boss = Table('boss', metadata,
- Column('boss_id', Integer, ForeignKey('managers.person_id'),
+ Column('boss_id', Integer, ForeignKey('managers.person_id'),
primary_key=True),
Column('golf_swing', String(30)),
)
'person':people.select(people.c.type=='person'),
}, None, 'pjoin')
- person_mapper = mapper(Person, people,
- with_polymorphic=('*', person_join),
- polymorphic_on=person_join.c.type,
+ person_mapper = mapper(Person, people,
+ with_polymorphic=('*', person_join),
+ polymorphic_on=person_join.c.type,
polymorphic_identity='person')
- mapper(Engineer, engineers, inherits=person_mapper,
+ mapper(Engineer, engineers, inherits=person_mapper,
polymorphic_identity='engineer')
- mapper(Manager, managers, inherits=person_mapper,
+ mapper(Manager, managers, inherits=person_mapper,
polymorphic_identity='manager')
mapper(Company, companies, properties={
'employees': relationship(Person,
def _generate_round_trip_test(include_base, lazy_relationship,
redefine_colprop, with_polymorphic):
"""generates a round trip test.
-
+
include_base - whether or not to include the base 'person' type in
the union.
-
+
lazy_relationship - whether or not the Company relationship to
People is lazy or eager.
-
+
redefine_colprop - if we redefine the 'name' column to be
'people_name' on the base Person class
-
+
use_literal_join - primary join condition is explicitly specified
"""
def test_roundtrip(self):
manager_with_polymorphic = None
if redefine_colprop:
- person_mapper = mapper(Person, people,
- with_polymorphic=person_with_polymorphic,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
+ person_mapper = mapper(Person, people,
+ with_polymorphic=person_with_polymorphic,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
properties= {'person_name':people.c.name})
else:
- person_mapper = mapper(Person, people,
- with_polymorphic=person_with_polymorphic,
- polymorphic_on=people.c.type,
+ person_mapper = mapper(Person, people,
+ with_polymorphic=person_with_polymorphic,
+ polymorphic_on=people.c.type,
polymorphic_identity='person')
- mapper(Engineer, engineers, inherits=person_mapper,
+ mapper(Engineer, engineers, inherits=person_mapper,
polymorphic_identity='engineer')
- mapper(Manager, managers, inherits=person_mapper,
- with_polymorphic=manager_with_polymorphic,
+ mapper(Manager, managers, inherits=person_mapper,
+ with_polymorphic=manager_with_polymorphic,
polymorphic_identity='manager')
mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss')
person_attribute_name = 'name'
employees = [
- Manager(status='AAB', manager_name='manager1',
+ Manager(status='AAB', manager_name='manager1',
**{person_attribute_name:'pointy haired boss'}),
- Engineer(status='BBA', engineer_name='engineer1',
- primary_language='java',
+ Engineer(status='BBA', engineer_name='engineer1',
+ primary_language='java',
**{person_attribute_name:'dilbert'}),
]
if include_base:
employees.append(Person(**{person_attribute_name:'joesmith'}))
employees += [
- Engineer(status='CGG', engineer_name='engineer2',
- primary_language='python',
+ Engineer(status='CGG', engineer_name='engineer2',
+ primary_language='python',
**{person_attribute_name:'wally'}),
- Manager(status='ABA', manager_name='manager2',
+ Manager(status='ABA', manager_name='manager2',
**{person_attribute_name:'jsmith'})
]
session.expunge_all()
eq_(session.query(Person).filter(
- Person.person_id==dilbert.person_id).one(),
+ Person.person_id==dilbert.person_id).one(),
dilbert)
session.expunge_all()
else:
self.assert_sql_count(testing.db, go, 6)
- # test selecting from the query, using the base
+ # test selecting from the query, using the base
# mapped table (people) as the selection criterion.
- # in the case of the polymorphic Person query,
+ # in the case of the polymorphic Person query,
# the "people" selectable should be adapted to be "person_join"
eq_(
session.query(Person).filter(
dilbert
)
- # test selecting from the query, joining against
+ # test selecting from the query, joining against
# an alias of the base "people" table. test that
- # the "palias" alias does *not* get sucked up
+ # the "palias" alias does *not* get sucked up
# into the "person_join" conversion.
palias = people.alias("palias")
dilbert = session.query(Person).get(dilbert.person_id)
session.expunge_all()
def go():
- session.query(Person).filter(getattr(Person,
+ session.query(Person).filter(getattr(Person,
person_attribute_name)=='dilbert').first()
self.assert_sql_count(testing.db, go, 1)
session.expunge_all()
- dilbert = session.query(Person).filter(getattr(Person,
+ dilbert = session.query(Person).filter(getattr(Person,
person_attribute_name)=='dilbert').first()
def go():
- # assert that only primary table is queried for
+ # assert that only primary table is queried for
# already-present-in-session
- d = session.query(Person).filter(getattr(Person,
+ d = session.query(Person).filter(getattr(Person,
person_attribute_name)=='dilbert').first()
self.assert_sql_count(testing.db, go, 1)
# test standalone orphans
- daboss = Boss(status='BBB',
- manager_name='boss',
- golf_swing='fore',
+ daboss = Boss(status='BBB',
+ manager_name='boss',
+ golf_swing='fore',
**{person_attribute_name:'daboss'})
session.add(daboss)
assert_raises(sa_exc.DBAPIError, session.flush)
c = session.query(Company).first()
daboss.company = c
- manager_list = [e for e in c.employees
+ manager_list = [e for e in c.employees
if isinstance(e, Manager)]
session.flush()
session.expunge_all()
- eq_(session.query(Manager).order_by(Manager.person_id).all(),
+ eq_(session.query(Manager).order_by(Manager.person_id).all(),
manager_list)
c = session.query(Company).first()
for with_polymorphic in ['unions', 'joins', 'auto', 'none']:
if with_polymorphic == 'unions':
for include_base in [True, False]:
- _generate_round_trip_test(include_base,
- lazy_relationship,
+ _generate_round_trip_test(include_base,
+ lazy_relationship,
redefine_colprop, with_polymorphic)
else:
- _generate_round_trip_test(False,
- lazy_relationship,
+ _generate_round_trip_test(False,
+ lazy_relationship,
redefine_colprop, with_polymorphic)
def test_loads_at_once(self):
"""
- Test that all objects load from the full query, when
+ Test that all objects load from the full query, when
with_polymorphic is used.
"""
self.assert_sql_count(testing.db, go, count)
def test_primary_eager_aliasing_one(self):
- # For both joinedload() and subqueryload(), if the original q is
+ # For both joinedload() and subqueryload(), if the original q is
# not loading the subclass table, the joinedload doesn't happen.
sess = create_session()
def test_get_one(self):
"""
- For all mappers, ensure the primary key has been calculated as
+ For all mappers, ensure the primary key has been calculated as
just the "person_id" column.
"""
sess = create_session()
def test_join_from_columns_or_subclass_six(self):
sess = create_session()
if self.select_type == '':
- # this now raises, due to [ticket:1892]. Manager.person_id
+ # this now raises, due to [ticket:1892]. Manager.person_id
# is now the "person_id" column on Manager. SQL is incorrect.
assert_raises(
sa_exc.DBAPIError,
Manager.person_id == paperwork.c.person_id)
.order_by(Person.name).all)
elif self.select_type == 'Unions':
- # with the union, not something anyone would really be using
- # here, it joins to the full result set. This is 0.6's
+ # with the union, not something anyone would really be using
+ # here, it joins to the full result set. This is 0.6's
# behavior and is more or less wrong.
expected = [
(u'dilbert',),
.order_by(Person.name).all(),
expected)
else:
- # when a join is present and managers.person_id is available,
+ # when a join is present and managers.person_id is available,
# you get the managers.
expected = [
(u'dogbert',),
# need it anymore.
def test_polymorphic_option(self):
"""
- Test that polymorphic loading sets state.load_path with its
+ Test that polymorphic loading sets state.load_path with its
actual mapper on a subclass, and not the superclass mapper.
This only works for non-aliased mappers.
def test_expire(self):
"""
- Test that individual column refresh doesn't get tripped up by
+ Test that individual column refresh doesn't get tripped up by
the select_table mapper.
"""
def test_with_polymorphic_five(self):
sess = create_session()
def go():
- # limit the polymorphic join down to just "Person",
+ # limit the polymorphic join down to just "Person",
# overriding select_table
eq_(sess.query(Person)
.with_polymorphic(Person).all(),
def test_with_polymorphic_seven(self):
sess = create_session()
- # compare to entities without related collections to prevent
+ # compare to entities without related collections to prevent
# additional lazy SQL from firing on loaded entities
eq_(sess.query(Person).with_polymorphic('*').all(),
self._emps_wo_relationships_fixture())
# query one is company->Person/Engineer->Machines
# query two is Person/Engineer subq
- # query three is Machines subq
- # (however this test can't tell if the Q was a
+ # query three is Machines subq
+ # (however this test can't tell if the Q was a
# lazyload or subqload ...)
# query four is managers + boss for row #3
# query five is managers for row #4
.filter(Engineer.engineer_name == 'vlad').one(),
c2)
- # same, using explicit join condition. Query.join() must
- # adapt the on clause here to match the subquery wrapped around
+ # same, using explicit join condition. Query.join() must
+ # adapt the on clause here to match the subquery wrapped around
# "people join engineers".
eq_(sess.query(Company)
.join(Engineer, Company.company_id == Engineer.company_id)
expected)
def test_nesting_queries(self):
- # query.statement places a flag "no_adapt" on the returned
- # statement. This prevents the polymorphic adaptation in the
- # second "filter" from hitting it, which would pollute the
- # subquery and usually results in recursion overflow errors
+ # query.statement places a flag "no_adapt" on the returned
+ # statement. This prevents the polymorphic adaptation in the
+ # second "filter" from hitting it, which would pollute the
+ # subquery and usually results in recursion overflow errors
# within the adaption.
sess = create_session()
subq = (sess.query(engineers.c.person_id)
#def test_mixed_entities(self):
# sess = create_session()
- # TODO: I think raise error on these for now. different
- # inheritance/loading schemes have different results here,
+ # TODO: I think raise error on these for now. different
+ # inheritance/loading schemes have different results here,
# all incorrect
#
# eq_(
#def test_mixed_entities(self):
# sess = create_session()
# eq_(sess.query(
- # Person.name,
- # Engineer.primary_language,
+ # Person.name,
+ # Engineer.primary_language,
# Manager.manager_name)
# .all(),
# [])
sess.add(c1)
sess.flush()
- # test that the splicing of the join works here, doesn't break in
+ # test that the splicing of the join works here, doesn't break in
# the middle of "parent join child1"
q = sess.query(Child1).options(joinedload('left_child2'))
self.assert_compile(q.limit(1).with_labels().statement,
ealias = aliased(Engineer)
eq_(
- session.query(Manager, ealias).all(),
+ session.query(Manager, ealias).all(),
[(m1, e1), (m1, e2)]
)
# TODO: I think raise error on this for now
# self.assertEquals(
- # session.query(Employee.name, Manager.manager_data, Engineer.engineer_info).all(),
+ # session.query(Employee.name, Manager.manager_data, Engineer.engineer_info).all(),
# []
# )
sess.flush()
eq_(
- sess.query(Manager).select_from(employees.select().limit(10)).all(),
+ sess.query(Manager).select_from(employees.select().limit(10)).all(),
[m1, m2]
)
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, employees.name AS employees_name "
"FROM companies LEFT OUTER JOIN employees ON companies.company_id "
- "= employees.company_id AND employees.type IN (:type_1)"
+ "= employees.company_id AND employees.type IN (:type_1)"
)
def test_outer_join_alias(self):
eq_(c2.engineers, [e1])
sess.expunge_all()
- eq_(sess.query(Company).order_by(Company.name).all(),
+ eq_(sess.query(Company).order_by(Company.name).all(),
[
Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
Company(name='c2', engineers=[Engineer(name='Kurt')])
# eager load join should limit to only "Engineer"
sess.expunge_all()
- eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(),
+ eq_(sess.query(Company).options(joinedload('engineers')).order_by(Company.name).all(),
[
Company(name='c1', engineers=[JuniorEngineer(name='Ed')]),
Company(name='c2', engineers=[Engineer(name='Kurt')])
eq_(
sess.query(pa.name, pa.Engineer.primary_language, pa.Manager.manager_name).\
- filter(or_(pa.Engineer.primary_language=='java',
+ filter(or_(pa.Engineer.primary_language=='java',
pa.Manager.manager_name=='dogbert')).\
order_by(pa.Engineer.type).all(),
[
(u'dilbert', u'java', None),
- (u'dogbert', None, u'dogbert'),
+ (u'dogbert', None, u'dogbert'),
]
)
eq_(
[(p1.name, type(p1), p2.name, type(p2)) for (p1, p2) in sess.query(
pa, pa_alias
- ).join(pa_alias,
+ ).join(pa_alias,
or_(
pa.Engineer.primary_language==\
pa_alias.Engineer.primary_language,
)
).order_by(pa.name, pa_alias.name)],
[
- (u'dilbert', Engineer, u'dilbert', Engineer),
- (u'dogbert', Manager, u'pointy haired boss', Boss),
- (u'vlad', Engineer, u'vlad', Engineer),
+ (u'dilbert', Engineer, u'dilbert', Engineer),
+ (u'dogbert', Manager, u'pointy haired boss', Boss),
+ (u'vlad', Engineer, u'vlad', Engineer),
(u'wally', Engineer, u'wally', Engineer)
]
)
eq_(
[row for row in sess.query(
- pa.name, pa.Engineer.primary_language,
+ pa.name, pa.Engineer.primary_language,
pa_alias.name, pa_alias.Engineer.primary_language
- ).join(pa_alias,
+ ).join(pa_alias,
or_(
pa.Engineer.primary_language==\
pa_alias.Engineer.primary_language,
)
).order_by(pa.name, pa_alias.name)],
[
- (u'dilbert', u'java', u'dilbert', u'java'),
- (u'dogbert', None, u'pointy haired boss', None),
- (u'vlad', u'cobol', u'vlad', u'cobol'),
+ (u'dilbert', u'java', u'dilbert', u'java'),
+ (u'dogbert', None, u'pointy haired boss', None),
+ (u'vlad', u'cobol', u'vlad', u'cobol'),
(u'wally', u'c++', u'wally', u'c++')
]
)
Derived from mailing list-reported problems and trac tickets.
-These are generally very old 0.1-era tests and at some point should
+These are generally very old 0.1-era tests and at some point should
be cleaned up and modernized.
"""
# backref fires
assert a1.user is u2
- # everything expires, no changes in
+ # everything expires, no changes in
# u1.addresses, so all is fine
sess.commit()
assert a1 not in u1.addresses
u1.addresses
# direct set - the "old" is "fetched",
- # but only from the local session - not the
+ # but only from the local session - not the
# database, due to the PASSIVE_NO_FETCH flag.
# this is a more fine grained behavior introduced
# in 0.6
sess.add_all([u1, u2, a1])
sess.commit()
- # direct set - the fetching of the
+ # direct set - the fetching of the
# "old" u1 here allows the backref
# to remove it from the addresses collection
a1.user = u2
# u1.addresses is loaded
u1.addresses
- # direct set - the fetching of the
+ # direct set - the fetching of the
# "old" u1 here allows the backref
# to remove it from the addresses collection
a1.user = u2
mapper(Address, addresses)
mapper(User, users, properties = {
- 'address':relationship(Address, uselist=False,
- backref=backref('user', single_parent=True,
+ 'address':relationship(Address, uselist=False,
+ backref=backref('user', single_parent=True,
cascade="all, delete-orphan"))
})
cls.classes.Item)
mapper(Item, items, properties={
- 'keywords':relationship(Keyword, secondary=item_keywords,
+ 'keywords':relationship(Keyword, secondary=item_keywords,
backref='items')
})
mapper(Keyword, keywords)
cls.classes.Item)
mapper(Item, items, properties={
- 'keyword':relationship(Keyword, secondary=item_keywords,
- uselist=False,
+ 'keyword':relationship(Keyword, secondary=item_keywords,
+ uselist=False,
backref=backref("item", uselist=False))
})
mapper(Keyword, keywords)
cls.classes.Item)
mapper(Item, items, properties={
- 'keywords':relationship(Keyword, secondary=item_keywords,
+ 'keywords':relationship(Keyword, secondary=item_keywords,
backref='items')
})
mapper(Keyword, keywords)
def test_cascade_immutable(self):
assert isinstance(
- orm_util.CascadeOptions("all, delete-orphan"),
+ orm_util.CascadeOptions("all, delete-orphan"),
frozenset)
class O2MCascadeDeleteOrphanTest(fixtures.MappedTest):
@classmethod
def setup_mappers(cls):
users, User, Address, addresses = (
- cls.tables.users, cls.classes.User,
+ cls.tables.users, cls.classes.User,
cls.classes.Address, cls.tables.addresses)
mapper(Address, addresses)
run_inserts = None
- def _one_to_many_fixture(self, o2m_cascade=True,
- m2o_cascade=True,
+ def _one_to_many_fixture(self, o2m_cascade=True,
+ m2o_cascade=True,
o2m=False,
m2o=False,
o2m_cascade_backrefs=True,
if o2m:
if m2o:
addresses_rel = {'addresses':relationship(
- Address,
+ Address,
cascade_backrefs=o2m_cascade_backrefs,
cascade=o2m_cascade and 'save-update' or '',
- backref=backref('user',
+ backref=backref('user',
cascade=m2o_cascade and 'save-update' or '',
cascade_backrefs=m2o_cascade_backrefs
)
else:
addresses_rel = {'addresses':relationship(
- Address,
+ Address,
cascade=o2m_cascade and 'save-update' or '',
cascade_backrefs=o2m_cascade_backrefs,
)}
mapper(User, users, properties=addresses_rel)
mapper(Address, addresses, properties=user_rel)
- def _many_to_many_fixture(self, fwd_cascade=True,
- bkd_cascade=True,
+ def _many_to_many_fixture(self, fwd_cascade=True,
+ bkd_cascade=True,
fwd=False,
bkd=False,
fwd_cascade_backrefs=True,
if fwd:
if bkd:
keywords_rel = {'keywords':relationship(
- Keyword,
+ Keyword,
secondary=item_keywords,
cascade_backrefs=fwd_cascade_backrefs,
cascade=fwd_cascade and 'save-update' or '',
- backref=backref('items',
+ backref=backref('items',
cascade=bkd_cascade and 'save-update' or '',
cascade_backrefs=bkd_cascade_backrefs
)
else:
keywords_rel = {'keywords':relationship(
- Keyword,
+ Keyword,
secondary=item_keywords,
cascade=fwd_cascade and 'save-update' or '',
cascade_backrefs=fwd_cascade_backrefs,
def test_o2m_backref_child_transient(self):
User, Address = self.classes.User, self.classes.Address
- self._one_to_many_fixture(o2m=True, m2o=True,
+ self._one_to_many_fixture(o2m=True, m2o=True,
o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
def test_o2m_backref_child_transient_nochange(self):
User, Address = self.classes.User, self.classes.Address
- self._one_to_many_fixture(o2m=True, m2o=True,
+ self._one_to_many_fixture(o2m=True, m2o=True,
o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
def test_o2m_backref_child_expunged(self):
User, Address = self.classes.User, self.classes.Address
- self._one_to_many_fixture(o2m=True, m2o=True,
+ self._one_to_many_fixture(o2m=True, m2o=True,
o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
def test_o2m_backref_child_expunged_nochange(self):
User, Address = self.classes.User, self.classes.Address
- self._one_to_many_fixture(o2m=True, m2o=True,
+ self._one_to_many_fixture(o2m=True, m2o=True,
o2m_cascade=False)
sess = Session()
u1 = User(name='u1')
def test_m2m_backref_child_transient(self):
Item, Keyword = self.classes.Item, self.classes.Keyword
- self._many_to_many_fixture(fwd=True, bkd=True,
+ self._many_to_many_fixture(fwd=True, bkd=True,
fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
def test_m2m_backref_child_transient_nochange(self):
Item, Keyword = self.classes.Item, self.classes.Keyword
- self._many_to_many_fixture(fwd=True, bkd=True,
+ self._many_to_many_fixture(fwd=True, bkd=True,
fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
def test_m2m_backref_child_expunged(self):
Item, Keyword = self.classes.Item, self.classes.Keyword
- self._many_to_many_fixture(fwd=True, bkd=True,
+ self._many_to_many_fixture(fwd=True, bkd=True,
fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
def test_m2m_backref_child_expunged_nochange(self):
Item, Keyword = self.classes.Item, self.classes.Keyword
- self._many_to_many_fixture(fwd=True, bkd=True,
+ self._many_to_many_fixture(fwd=True, bkd=True,
fwd_cascade=False)
sess = Session()
i1 = Item(description='i1')
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata, Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data',String(50)),
+ test_needs_autoincrement=True),
+ Column('data',String(50)),
Column('t2id', Integer, ForeignKey('t2.id')))
- Table('t2', metadata,
+ Table('t2', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
- Column('data',String(50)),
+ test_needs_autoincrement=True),
+ Column('data',String(50)),
Column('t3id', Integer, ForeignKey('t3.id')))
- Table('t3', metadata,
+ Table('t3', metadata,
Column('id', Integer, primary_key=True,
- test_needs_autoincrement=True),
+ test_needs_autoincrement=True),
Column('data', String(50)))
@classmethod
self.tables.atob)
mapper(A, a, properties={
- 'bs':relationship(B, secondary=atob,
+ 'bs':relationship(B, secondary=atob,
cascade="all, delete-orphan")
})
mapper(B, b)
mapper(A, a, properties={
- 'bs':relationship(B,
- secondary=atob,
+ 'bs':relationship(B,
+ secondary=atob,
cascade="all, delete-orphan", single_parent=True,
backref=backref('a', uselist=False))
})
@classmethod
def define_tables(cls, metadata):
Table('node', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('parent_id', Integer, ForeignKey('node.id'))
)
node = cls.tables.node
mapper(Node, node, properties={
"children":relationship(
- Node,
- cascade="all, delete-orphan",
+ Node,
+ cascade="all, delete-orphan",
backref=backref(
- "parent",
+ "parent",
remote_side=node.c.id
)
)
mapper(Address, addresses)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user',
+ 'addresses':relationship(Address, backref='user',
cascade_backrefs=False)
})
mapper(Dingaling, dingalings, properties={
- 'address' : relationship(Address, backref='dingalings',
+ 'address' : relationship(Address, backref='dingalings',
cascade_backrefs=False)
})
pass
def test_pending_standalone_orphan(self):
- """Standalone 'orphan' objects can now be persisted, if the underlying
+ """Standalone 'orphan' objects can now be persisted, if the underlying
constraints of the database allow it.
This now supports persisting of objects based on foreign key
assert_raises(sa_exc.DBAPIError, s.commit)
s.rollback()
- # can assign o.user_id by foreign key,
+ # can assign o.user_id by foreign key,
# flush succeeds
u = User()
s.add(u)
mapper(Address, addresses)
mapper(User, users, properties=dict(
- addresses=relationship(Address, cascade="all,delete-orphan",
+ addresses=relationship(Address, cascade="all,delete-orphan",
backref="user")
))
s = create_session()
class CollectionAssignmentOrphanTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table('table_a', metadata,
+ Table('table_a', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
Column('name', String(30)))
- Table('table_b', metadata,
+ Table('table_b', metadata,
Column('id', Integer,
primary_key=True, test_needs_autoincrement=True),
- Column('name', String(30)),
+ Column('name', String(30)),
Column('a_id', Integer, ForeignKey('table_a.id')))
def test_basic(self):
self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False,
+ 'child':relationship(Child, uselist=False,
cascade="all, delete, delete-orphan")
})
mapper(Child, child)
self.tables.child)
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False,
- cascade="all, delete, delete-orphan",
+ 'child':relationship(Child, uselist=False,
+ cascade="all, delete, delete-orphan",
backref='parent')
})
mapper(Child, child)
mapper(Parent, parent)
mapper(Child, child, properties = {
- 'parent' : relationship(Parent, uselist=False, single_parent=True,
- backref=backref('child', uselist=False),
+ 'parent' : relationship(Parent, uselist=False, single_parent=True,
+ backref=backref('child', uselist=False),
cascade="all,delete,delete-orphan")
})
self._do_move_test(True)
mapper(Parent, parent)
mapper(Child, child, properties = {
- 'parent' : relationship(Parent, uselist=False, single_parent=True,
- backref=backref('child', uselist=True),
+ 'parent' : relationship(Parent, uselist=False, single_parent=True,
+ backref=backref('child', uselist=True),
cascade="all,delete,delete-orphan")
})
self._do_move_test(True)
Column("descr", String(50))
)
- Table("noninh_child", metadata,
+ Table("noninh_child", metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('base_id', Integer, ForeignKey('base.id'))
sess.flush([b1])
# c1, c2 get cascaded into the session on o2m.
- # not sure if this is how I like this
+ # not sure if this is how I like this
# to work but that's how it works for now.
assert c1 in sess and c1 not in sess.new
assert c2 in sess and c2 not in sess.new
inherits=Base,
properties={'parent': relationship(
Parent,
- backref='children',
+ backref='children',
primaryjoin=inh_child.c.parent_id == parent.c.id
)}
)
((Foo.id, Foo.bar_id), Foo(id=3, bar_id=12), (3, 12))
):
eq_(
- collections.column_mapped_collection(spec)().keyfunc(obj),
+ collections.column_mapped_collection(spec)().keyfunc(obj),
expected
)
@classmethod
def define_tables(cls, metadata):
- Table('foo', metadata,
+ Table('foo', metadata,
Column('id', Integer(), primary_key=True),
Column('b', String(128))
)
- Table('bar', metadata,
+ Table('bar', metadata,
Column('id', Integer(), primary_key=True),
Column('foo_id', Integer, ForeignKey('foo.id')),
Column('bat_id', Integer),
for spec, obj, expected in specs:
coll = collections.column_mapped_collection(spec)()
eq_(
- coll.keyfunc(obj),
+ coll.keyfunc(obj),
expected
)
# ensure we do the right thing with __reduce__
meta = MetaData()
a = Table('a', meta, Column('id', Integer, primary_key=True))
- b = Table('b', meta, Column('id', Integer, primary_key=True),
+ b = Table('b', meta, Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('a.id')))
class A(object):pass
class BiDirectionalOneToManyTest2(fixtures.MappedTest):
- """Two mappers with a one-to-many relationship to each other,
+ """Two mappers with a one-to-many relationship to each other,
with a second one-to-many on one of the mappers"""
run_define_tables = 'each'
sess.delete(p)
self.assert_sql_execution(
- testing.db,
- sess.flush,
+ testing.db,
+ sess.flush,
ExactSQL("UPDATE person SET favorite_ball_id=:favorite_ball_id "
"WHERE person.id = :person_id",
lambda ctx: {'person_id': p.id, 'favorite_ball_id': None}),
p2, b1.person
)
- # do it the other way
+ # do it the other way
p3.balls.append(b1)
sess.commit()
eq_(
sess.delete(p)
- self.assert_sql_execution(testing.db, sess.flush,
+ self.assert_sql_execution(testing.db, sess.flush,
CompiledSQL("UPDATE ball SET person_id=:person_id "
"WHERE ball.id = :ball_id",
lambda ctx:[
# pre-trigger lazy loader on 'cats' to make the test easier
cats.children
self.assert_sql_execution(
- testing.db,
+ testing.db,
session.flush,
AllOf(
CompiledSQL("UPDATE node SET prev_sibling_id=:prev_sibling_id "
session.delete(root)
self.assert_sql_execution(
- testing.db,
+ testing.db,
session.flush,
CompiledSQL("UPDATE node SET next_sibling_id=:next_sibling_id "
- "WHERE node.id = :node_id",
+ "WHERE node.id = :node_id",
lambda ctx: [
- {'node_id': about.id, 'next_sibling_id': None},
+ {'node_id': about.id, 'next_sibling_id': None},
{'node_id': stories.id, 'next_sibling_id': None}
]
),
p1.c3 = c31
self.assert_sql_execution(
- testing.db,
+ testing.db,
sess.flush,
CompiledSQL(
"UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
p1.c1 = p1.c2 = p1.c3 = None
self.assert_sql_execution(
- testing.db,
+ testing.db,
sess.flush,
CompiledSQL(
"UPDATE parent SET c1_id=:c1_id, c2_id=:c2_id, "
# keywords are not part of self.static.user_all_result, so
# verify all the item keywords were loaded, with no more sql.
# 'any' verifies at least some items have keywords; we build
- # a list for any([...]) instead of any(...) to prove we've
+ # a list for any([...]) instead of any(...) to prove we've
# iterated all the items with no sql.
f = util.flatten_iterator
- assert any([i.keywords for i in
- f([o.items for o in f([u.orders for u in users])])])
+ assert any([i.keywords for i in
+ f([o.items for o in f([u.orders for u in users])])])
self.assert_sql_count(testing.db, go, 0)
def _assert_addresses_loaded(self, users):
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy=True,
order_by=addresses.c.id),
- orders=relationship(Order,
+ orders=relationship(Order,
order_by=orders.c.id)))
return create_session()
def test_downgrade_baseline(self):
- """Mapper strategy defaults load as expected
+ """Mapper strategy defaults load as expected
(compare to rest of DefaultStrategyOptionsTest downgrade tests)."""
sess = self._downgrade_fixture()
users = []
self._assert_fully_loaded(users)
def test_disable_eagerloads(self):
- """Mapper eager load strategy defaults can be shut off
+ """Mapper eager load strategy defaults can be shut off
with enable_eagerloads(False)."""
- # While this isn't testing a mapper option, it is included
- # as baseline reference for how XYZload('*') option
+ # While this isn't testing a mapper option, it is included
+ # as baseline reference for how XYZload('*') option
# should work, namely, it shouldn't affect later queries
# (see other test_select_s)
sess = self._downgrade_fixture()
sa.orm.subqueryload, '*', User.addresses
)
def test_select_with_joinedload(self):
- """Mapper load strategy defaults can be downgraded with
- lazyload('*') option, while explicit joinedload() option
+ """Mapper load strategy defaults can be downgraded with
+ lazyload('*') option, while explicit joinedload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
self.assert_sql_count(testing.db, go, 3)
def test_select_with_subqueryload(self):
- """Mapper load strategy defaults can be downgraded with
- lazyload('*') option, while explicit subqueryload() option
+ """Mapper load strategy defaults can be downgraded with
+ lazyload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
eq_(users, self.static.user_all_result)
def test_noload_with_joinedload(self):
- """Mapper load strategy defaults can be downgraded with
- noload('*') option, while explicit joinedload() option
+ """Mapper load strategy defaults can be downgraded with
+ noload('*') option, while explicit joinedload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
self.assert_sql_count(testing.db, go, 0)
def test_noload_with_subqueryload(self):
- """Mapper load strategy defaults can be downgraded with
- noload('*') option, while explicit subqueryload() option
+ """Mapper load strategy defaults can be downgraded with
+ noload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
self.assert_sql_count(testing.db, go, 0)
def test_joined(self):
- """Mapper load strategy defaults can be upgraded with
+ """Mapper load strategy defaults can be upgraded with
joinedload('*') option."""
sess = self._upgrade_fixture()
users = []
self._assert_fully_loaded(users)
def test_joined_with_lazyload(self):
- """Mapper load strategy defaults can be upgraded with
+ """Mapper load strategy defaults can be upgraded with
joinedload('*') option, while explicit lazyload() option
is still honored"""
sess = self._upgrade_fixture()
self.assert_sql_count(testing.db, go, 1)
def test_joined_with_subqueryload(self):
- """Mapper load strategy defaults can be upgraded with
+ """Mapper load strategy defaults can be upgraded with
joinedload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._upgrade_fixture()
self._assert_fully_loaded(users)
def test_subquery(self):
- """Mapper load strategy defaults can be upgraded with
+ """Mapper load strategy defaults can be upgraded with
subqueryload('*') option."""
sess = self._upgrade_fixture()
users = []
self._assert_fully_loaded(users)
def test_subquery_with_lazyload(self):
- """Mapper load strategy defaults can be upgraded with
+ """Mapper load strategy defaults can be upgraded with
subqueryload('*') option, while explicit lazyload() option
is still honored"""
sess = self._upgrade_fixture()
self.assert_sql_count(testing.db, go, 1)
def test_subquery_with_joinedload(self):
- """Mapper load strategy defaults can be upgraded with
- subqueryload('*') option, while multiple explicit
+ """Mapper load strategy defaults can be upgraded with
+ subqueryload('*') option, while multiple explicit
joinedload() options are still honored"""
sess = self._upgrade_fixture()
users = []
sa.DDL("CREATE TRIGGER dt_ins BEFORE INSERT ON dt "
"FOR EACH ROW BEGIN "
"SET NEW.col2='ins'; SET NEW.col4='ins'; END",
- on=lambda ddl, event, target, bind, **kw:
+ on=lambda ddl, event, target, bind, **kw:
bind.engine.name not in ('oracle', 'mssql', 'sqlite')
),
):
sa.DDL("CREATE TRIGGER dt_up BEFORE UPDATE ON dt "
"FOR EACH ROW BEGIN "
"SET NEW.col3='up'; SET NEW.col4='up'; END",
- on=lambda ddl, event, target, bind, **kw:
+ on=lambda ddl, event, target, bind, **kw:
bind.engine.name not in ('oracle', 'mssql', 'sqlite')
),
):
from test.lib.testing import eq_
class TestDescriptor(descriptor_props.DescriptorProperty):
- def __init__(self, cls, key, descriptor=None, doc=None,
+ def __init__(self, cls, key, descriptor=None, doc=None,
comparator_factory = None):
self.parent = cls.__mapper__
self.key = key
u = q.filter(User.id==7).first()
self.assert_compile(
- u.addresses.statement,
+ u.addresses.statement,
"SELECT addresses.id, addresses.user_id, addresses.email_address FROM "
"addresses WHERE :param_1 = addresses.user_id",
use_default_dialect=True
u = sess.query(User).get(8)
eq_(
list(u.addresses.order_by(desc(Address.email_address))),
- [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'),
+ [Address(email_address=u'ed@wood.com'), Address(email_address=u'ed@lala.com'),
Address(email_address=u'ed@bettyboop.com')]
)
assert o1 in i1.orders.all()
assert i1 in o1.items.all()
- @testing.exclude('mysql', 'between',
+ @testing.exclude('mysql', 'between',
((5, 1,49), (5, 1, 52)),
'https://bugs.launchpad.net/ubuntu/+source/mysql-5.1/+bug/706988')
def test_association_nonaliased(self):
self.classes.Item)
mapper(Order, orders, properties={
- 'items':relationship(Item, secondary=order_items,
- lazy="dynamic",
+ 'items':relationship(Item, secondary=order_items,
+ lazy="dynamic",
order_by=order_items.c.item_id)
})
mapper(Item, items)
use_default_dialect=True
)
- # filter criterion against the secondary table
+ # filter criterion against the secondary table
# works
eq_(
o.items.filter(order_items.c.item_id==2).all(),
sess.flush()
sess.commit()
u1.addresses.append(Address(email_address='foo@bar.com'))
- eq_(u1.addresses.order_by(Address.id).all(),
+ eq_(u1.addresses.order_by(Address.id).all(),
[Address(email_address='lala@hoho.com'), Address(email_address='foo@bar.com')])
sess.rollback()
eq_(u1.addresses.all(), [Address(email_address='lala@hoho.com')])
sess.flush()
eq_(canary,
['init', 'before_insert',
- 'after_insert', 'expire', 'translate_row',
+ 'after_insert', 'expire', 'translate_row',
'populate_instance', 'refresh',
'append_result', 'translate_row', 'create_instance',
'populate_instance', 'load', 'append_result',
- 'before_update', 'after_update', 'before_delete',
+ 'before_update', 'after_update', 'before_delete',
'after_delete'])
def test_merge(self):
sess.add(k1)
sess.flush()
eq_(canary1,
- ['init',
+ ['init',
'before_insert', 'after_insert'])
eq_(canary2,
- ['init',
+ ['init',
'before_insert', 'after_insert'])
canary1[:]= []
assert my_listener in s.dispatch.before_flush
def test_sessionmaker_listen(self):
- """test that listen can be applied to individual
+ """test that listen can be applied to individual
scoped_session() classes."""
def my_listener_one(*arg, **kw):
mapper(User, users)
- sess, canary = self._listener_fixture(autoflush=False,
+ sess, canary = self._listener_fixture(autoflush=False,
autocommit=True, expire_on_commit=False)
u = User(name='u1')
sess.add(u)
sess.flush()
eq_(
- canary,
+ canary,
[ 'before_attach', 'after_attach', 'before_flush', 'after_begin',
- 'after_flush', 'after_flush_postexec',
+ 'after_flush', 'after_flush_postexec',
'before_commit', 'after_commit',]
)
sess.commit
)
sess.rollback()
- eq_(canary, ['before_attach', 'after_attach', 'before_commit', 'before_flush',
- 'after_begin', 'after_flush', 'after_flush_postexec',
- 'after_commit', 'before_attach', 'after_attach', 'before_commit',
- 'before_flush', 'after_begin', 'after_rollback',
+ eq_(canary, ['before_attach', 'after_attach', 'before_commit', 'before_flush',
+ 'after_begin', 'after_flush', 'after_flush_postexec',
+ 'after_commit', 'before_attach', 'after_attach', 'before_commit',
+ 'before_flush', 'after_begin', 'after_rollback',
'after_soft_rollback', 'after_soft_rollback'])
def test_can_use_session_in_outer_rollback_hook(self):
u = User(name='u1')
sess.add(u)
sess.flush()
- eq_(sess.query(User).order_by(User.name).all(),
+ eq_(sess.query(User).order_by(User.name).all(),
[
User(name='another u1'),
User(name='u1')
)
sess.flush()
- eq_(sess.query(User).order_by(User.name).all(),
+ eq_(sess.query(User).order_by(User.name).all(),
[
User(name='another u1'),
User(name='u1')
u.name='u2'
sess.flush()
- eq_(sess.query(User).order_by(User.name).all(),
+ eq_(sess.query(User).order_by(User.name).all(),
[
User(name='another u1'),
User(name='another u2'),
sess.delete(u)
sess.flush()
- eq_(sess.query(User).order_by(User.name).all(),
+ eq_(sess.query(User).order_by(User.name).all(),
[
User(name='another u1'),
]
u = User(name='u1')
sess.add(u)
sess.flush()
- eq_(sess.query(User).order_by(User.name).all(),
+ eq_(sess.query(User).order_by(User.name).all(),
[User(name='u1')]
)
sess.add(User(name='u2'))
sess.flush()
sess.expunge_all()
- eq_(sess.query(User).order_by(User.name).all(),
+ eq_(sess.query(User).order_by(User.name).all(),
[
User(name='u1 modified'),
User(name='u2')
class MapperExtensionTest(_fixtures.FixtureTest):
- """Superseded by MapperEventsTest - test backwards
+ """Superseded by MapperEventsTest - test backwards
compatibility of MapperExtension."""
run_inserts = None
sess.add(k1)
sess.flush()
eq_(methods1,
- ['instrument_class', 'init_instance',
+ ['instrument_class', 'init_instance',
'before_insert', 'after_insert'])
eq_(methods2,
- ['instrument_class', 'init_instance',
+ ['instrument_class', 'init_instance',
'before_insert', 'after_insert'])
del methods1[:]
class AttributeExtensionTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table('t1',
+ Table('t1',
metadata,
Column('id', Integer, primary_key=True),
Column('type', String(40)),
eq_(b1.data, 'ex1b2')
eq_(c1.data, 'ex2c2')
- eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'",
+ eq_(ext_msg, ["Ex1 'a1'", "Ex1 'b1'", "Ex2 'c1'",
"Ex1 'a2'", "Ex1 'b2'", "Ex2 'c2'"])
u = s.query(User).get(7)
s.expunge_all()
- assert_raises_message(sa_exc.InvalidRequestError,
+ assert_raises_message(sa_exc.InvalidRequestError,
r"is not persistent within this Session", s.expire, u)
def test_get_refreshes(self):
s.rollback()
assert u in s
- # but now its back, rollback has occurred, the
+ # but now its back, rollback has occurred, the
# _remove_newly_deleted is reverted
eq_(u.name, 'chuck')
def test_deferred(self):
- """test that unloaded, deferred attributes aren't included in the
+ """test that unloaded, deferred attributes aren't included in the
expiry list."""
Order, orders = self.classes.Order, self.tables.orders
self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address,
+ 'addresses':relationship(Address,
order_by=addresses.c.email_address)
})
mapper(Address, addresses)
u = s.query(User).get(8)
adlist = u.addresses
eq_(adlist, [
- Address(email_address='ed@bettyboop.com'),
+ Address(email_address='ed@bettyboop.com'),
Address(email_address='ed@lala.com'),
- Address(email_address='ed@wood.com'),
+ Address(email_address='ed@wood.com'),
])
a1 = u.addresses[2]
a1.email_address = 'aaaaa'
s.expire(u, ['addresses'])
eq_(u.addresses, [
- Address(email_address='aaaaa'),
- Address(email_address='ed@bettyboop.com'),
+ Address(email_address='aaaaa'),
+ Address(email_address='ed@bettyboop.com'),
Address(email_address='ed@lala.com'),
])
def test_refresh_collection_exception(self):
- """test graceful failure for currently unsupported
+ """test graceful failure for currently unsupported
immediate refresh of a collection"""
users, Address, addresses, User = (self.tables.users,
mapper(Address, addresses)
s = create_session(autoflush=True, autocommit=False)
u = s.query(User).get(8)
- assert_raises_message(sa_exc.InvalidRequestError,
- "properties specified for refresh",
+ assert_raises_message(sa_exc.InvalidRequestError,
+ "properties specified for refresh",
s.refresh, u, ['addresses'])
# in contrast to a regular query with no columns
- assert_raises_message(sa_exc.InvalidRequestError,
+ assert_raises_message(sa_exc.InvalidRequestError,
"no columns with which to SELECT", s.query().all)
def test_refresh_cancels_expire(self):
self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(Address, backref='user', lazy='joined',
+ 'addresses':relationship(Address, backref='user', lazy='joined',
order_by=addresses.c.id),
})
mapper(Address, addresses)
u1 = sess.query(User).options(undefer(User.name)).first()
assert 'name' not in attributes.instance_state(u1).callables
- # mass expire, the attribute was loaded,
+ # mass expire, the attribute was loaded,
# the attribute gets the callable
sess.expire(u1)
assert isinstance(
assert 'name' not in attributes.instance_state(u1).callables
# mass expire, attribute was loaded but then deleted,
- # the callable goes away - the state wants to flip
+ # the callable goes away - the state wants to flip
# it back to its "deferred" loader.
sess.expunge_all()
u1 = sess.query(User).options(undefer(User.name)).first()
# which attach to u1 will expect to be "pending"
sess.expire(u1, ['addresses'])
- # attach an Address. now its "pending"
+ # attach an Address. now its "pending"
# in user.addresses
a2 = Address(email_address='a2')
a2.user = u1
"""Test that the 'hasparent' flag gets flipped to False
only if we're sure this object is the real parent.
- In ambiguous cases a stale data exception is
+ In ambiguous cases a stale data exception is
raised.
"""
u1 = s.query(User).first()
- # primary key change. now we
- # can't rely on state.key as the
+ # primary key change. now we
+ # can't rely on state.key as the
# identifier.
u1.id = 5
a1.user_id = 5
def test_stale_state_negative_child_expired(self):
"""illustrate the current behavior of
expiration on the child.
-
+
there's some uncertainty here in how
this use case should work.
mapper(Keyword, keywords)
mapper(Node, nodes, properties={
- 'children':relationship(Node,
+ 'children':relationship(Node,
backref=backref('parent', remote_side=[nodes.c.id])
)
})
mapper(Machine, machines)
- mapper(Person, people,
- polymorphic_on=people.c.type,
- polymorphic_identity='person',
- order_by=people.c.person_id,
+ mapper(Person, people,
+ polymorphic_on=people.c.type,
+ polymorphic_identity='person',
+ order_by=people.c.person_id,
properties={
'paperwork':relationship(Paperwork, order_by=paperwork.c.paperwork_id)
})
mapper(Engineer, engineers, inherits=Person, polymorphic_identity='engineer', properties={
'machines':relationship(Machine, order_by=machines.c.machine_id)
})
- mapper(Manager, managers,
+ mapper(Manager, managers,
inherits=Person, polymorphic_identity='manager')
mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss')
mapper(Paperwork, paperwork)
)
def test_multi_tuple_form(self):
- """test the 'tuple' form of join, now superseded
+ """test the 'tuple' form of join, now superseded
by the two-element join() form.
Not deprecating this style as of yet.
self.assert_compile(
sess.query(User).join(
- (Order, User.id==Order.user_id),
+ (Order, User.id==Order.user_id),
(Item, Order.items)),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
for oalias,ialias in [(True, True), (False, False), (True, False), (False, True)]:
eq_(
sess.query(User).join('orders', aliased=oalias).\
- join('items',
- from_joinpoint=True,
+ join('items',
+ from_joinpoint=True,
aliased=ialias).\
filter(Item.description == 'item 4').all(),
[User(name='jack')]
eq_(
sess.query(User).join('orders', aliased=oalias).\
filter(Order.user_id==9).\
- join('items', from_joinpoint=True,
+ join('items', from_joinpoint=True,
aliased=ialias).\
filter(Item.description=='item 4').all(),
[]
orderalias = aliased(Order)
itemalias = aliased(Item)
eq_(
- sess.query(User).join(orderalias, 'orders').
+ sess.query(User).join(orderalias, 'orders').
join(itemalias, 'items', from_joinpoint=True).
filter(itemalias.description == 'item 4').all(),
[User(name='jack')]
sess.query(User).join, Address, Address.user,
)
- # but this one would silently fail
+ # but this one would silently fail
adalias = aliased(Address)
assert_raises(
sa_exc.InvalidRequestError,
# be using the aliased flag in this way.
self.assert_compile(
sess.query(User).join(User.orders, aliased=True).
- join(Item,
+ join(Item,
and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id),
from_joinpoint=True, aliased=True
),
oalias = orders.select()
self.assert_compile(
sess.query(User).join(oalias, User.orders).
- join(Item,
+ join(Item,
and_(Order.id==order_items.c.order_id, order_items.c.item_id==Item.id),
from_joinpoint=True
),
)
eq_(
- sess.query(User.name).join(Order, User.id==Order.user_id).
+ sess.query(User.name).join(Order, User.id==Order.user_id).
join(order_items, Order.id==order_items.c.order_id).
join(Item, order_items.c.item_id==Item.id).
filter(Item.description == 'item 4').all(),
sess.query(OrderAlias).join('items').filter_by(description='item 3').\
order_by(OrderAlias.id).all(),
[
- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
- Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2),
+ Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
+ Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2),
Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3)
]
)
filter_by(description='item 3').\
order_by(User.id, OrderAlias.id).all(),
[
- (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'),
- (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'),
+ (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'),
+ (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'),
(User(name=u'fred',id=9), Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), u'item 3')
]
)
IAlias = aliased(Item)
q = sess.query(Order, IAlias).select_from(join(Order, IAlias, 'items')).filter(IAlias.description=='item 3')
l = q.all()
- eq_(l,
+ eq_(l,
[
(order1, item3),
(order2, item3),
eq_(
sess.query(User, ualias).filter(User.id > ualias.id).order_by(desc(ualias.id), User.name).all(),
[
- (User(id=10,name=u'chuck'), User(id=9,name=u'fred')),
- (User(id=10,name=u'chuck'), User(id=8,name=u'ed')),
- (User(id=9,name=u'fred'), User(id=8,name=u'ed')),
- (User(id=10,name=u'chuck'), User(id=7,name=u'jack')),
+ (User(id=10,name=u'chuck'), User(id=9,name=u'fred')),
+ (User(id=10,name=u'chuck'), User(id=8,name=u'ed')),
+ (User(id=9,name=u'fred'), User(id=8,name=u'ed')),
+ (User(id=10,name=u'chuck'), User(id=7,name=u'jack')),
(User(id=8,name=u'ed'), User(id=7,name=u'jack')),
(User(id=9,name=u'fred'), User(id=7,name=u'jack'))
]
@classmethod
def define_tables(cls, metadata):
- Table('table1', metadata,
+ Table('table1', metadata,
Column('id', Integer, primary_key=True)
)
Table('table2', metadata,
backref=backref('parent', remote_side=[nodes.c.id])
),
'subs' : relationship(Sub),
- 'assoc':relationship(Node,
- secondary=assoc_table,
- primaryjoin=nodes.c.id==assoc_table.c.left_id,
+ 'assoc':relationship(Node,
+ secondary=assoc_table,
+ primaryjoin=nodes.c.id==assoc_table.c.left_id,
secondaryjoin=nodes.c.id==assoc_table.c.right_id)
})
mapper(Sub, sub_table)
def _inherits_fixture(self):
m = MetaData()
base = Table('base', m, Column('id', Integer, primary_key=True))
- a = Table('a', m,
+ a = Table('a', m,
Column('id', Integer, ForeignKey('base.id'), primary_key=True),
Column('b_id', Integer, ForeignKey('b.id')))
- b = Table('b', m,
+ b = Table('b', m,
Column('id', Integer, ForeignKey('base.id'), primary_key=True),
Column('c_id', Integer, ForeignKey('c.id')))
- c = Table('c', m,
+ c = Table('c', m,
Column('id', Integer, ForeignKey('base.id'), primary_key=True))
class Base(object):
pass
filter(Node.data=='n122').filter(parent.data=='n12').\
filter(grandparent.data=='n1').from_self().limit(1)
- # parent, grandparent *are* inside the from_self(), so they
+ # parent, grandparent *are* inside the from_self(), so they
# should get aliased to the outside.
self.assert_compile(
q,
sess = create_session()
eq_(sess.query(Node).filter(Node.children.any(Node.data=='n1')).all(), [])
eq_(sess.query(Node).filter(Node.children.any(Node.data=='n12')).all(), [Node(data='n1')])
- eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(),
+ eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id).all(),
[Node(data='n11'), Node(data='n13'),Node(data='n121'),Node(data='n122'),Node(data='n123'),])
def test_has(self):
sess = create_session()
- eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n12')).order_by(Node.id).all(),
+ eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n12')).order_by(Node.id).all(),
[Node(data='n121'),Node(data='n122'),Node(data='n123')])
eq_(sess.query(Node).filter(Node.parent.has(Node.data=='n122')).all(), [])
eq_(sess.query(Node).filter(~Node.parent.has()).all(), [Node(data='n1')])
User(id=8, address=Address(id=3)),
User(id=9, address=None),
User(id=10, address=None),
- ],
+ ],
list(q)
)
SomeDBInteger,
]:
m = sa.MetaData()
- users = Table('users', m,
+ users = Table('users', m,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
)
self.assert_sql_count(testing.db, go, 1)
class GetterStateTest(_fixtures.FixtureTest):
- """test lazyloader on non-existent attribute returns
+ """test lazyloader on non-existent attribute returns
expected attribute symbols, maintain expected state"""
run_inserts = None
User, Address, sess, a1 = self._u_ad_fixture(False)
eq_(
Address.user.impl.get(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_RETURN_NEVER_SET),
attributes.NEVER_SET
)
User, Address, sess, a1 = self._u_ad_fixture(False)
eq_(
Address.user.impl.get_history(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_RETURN_NEVER_SET),
((), (), ())
)
User, Address, sess, a1 = self._u_ad_fixture(False)
eq_(
Address.user.impl.get(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_NO_INITIALIZE),
attributes.PASSIVE_NO_RESULT
)
User, Address, sess, a1 = self._u_ad_fixture(False)
eq_(
Address.user.impl.get_history(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_NO_INITIALIZE),
attributes.HISTORY_BLANK
)
User, Address, sess, a1 = self._u_ad_fixture(True)
eq_(
Address.user.impl.get(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_NO_INITIALIZE),
attributes.PASSIVE_NO_RESULT
)
User, Address, sess, a1 = self._u_ad_fixture(True)
eq_(
Address.user.impl.get_history(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_NO_INITIALIZE),
attributes.HISTORY_BLANK
)
User, Address, sess, a1 = self._u_ad_fixture(True)
eq_(
Address.user.impl.get(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_RETURN_NEVER_SET),
User(name='ed')
)
User, Address, sess, a1 = self._u_ad_fixture(True)
eq_(
Address.user.impl.get_history(
- attributes.instance_state(a1),
- attributes.instance_dict(a1),
+ attributes.instance_state(a1),
+ attributes.instance_dict(a1),
passive=attributes.PASSIVE_RETURN_NEVER_SET),
((), [User(name='ed'), ], ())
)
#if manualflush and (not loadrel or fake_autoexpire):
# # a flush occurs, we get p2
# assert c1.parent is p2
- #elif not loadrel and not loadfk:
+ #elif not loadrel and not loadfk:
# # problematically - we get None since committed state
# # is empty when c1.parent_id was mutated, since we want
# # to save on selects. this is
User = self.classes.User
sess = Session()
assert_raises_message(
- Exception, "Unknown lockmode 'unknown_mode'",
- self.assert_compile,
+ Exception, "Unknown lockmode 'unknown_mode'",
+ self.assert_compile,
sess.query(User.id).with_lockmode('unknown_mode'), None,
dialect=default.DefaultDialect()
)
mapper(Place, place, properties={
'places': relationship(
Place,
- secondary=place_place,
+ secondary=place_place,
primaryjoin=place.c.place_id==place_place.c.pl1_id,
secondaryjoin=place.c.place_id==place_place.c.pl2_id,
order_by=place_place.c.pl2_id
mapper(Place, place, properties={
'child_places': relationship(
Place,
- secondary=place_place,
+ secondary=place_place,
primaryjoin=place.c.place_id==place_place.c.pl1_id,
secondaryjoin=place.c.place_id==place_place.c.pl2_id,
order_by=place_place.c.pl2_id,
self.tables.transition)
mapper(Place, place, properties={
- 'transitions':relationship(Transition, secondary=place_input,
+ 'transitions':relationship(Transition, secondary=place_input,
passive_updates=False)
})
mapper(Transition, transition)
eq_(sess.query(User).first(), User(id=7, name='fred'))
def test_transient_to_pending_no_pk(self):
- """test that a transient object with no PK attribute
+ """test that a transient object with no PK attribute
doesn't trigger a needless load."""
User, users = self.classes.User, self.tables.users
Address(id=3, email_address='fred3')])))
def test_unsaved_cascade(self):
- """Merge of a transient entity with two child transient
+ """Merge of a transient entity with two child transient
entities, with a bidirectional relationship."""
users, Address, addresses, User = (self.tables.users,
sess = create_session()
# merge empty stuff. goes in as NULL.
- # not sure what this was originally trying to
+ # not sure what this was originally trying to
# test.
u1 = sess.merge(User(id=1))
sess.flush()
sess.flush()
# blow it away from u5, but don't
- # mark as expired. so it would just
+ # mark as expired. so it would just
# be blank.
del u5.data
assert u1.addresses.keys() == ['foo@bar.com']
def test_attribute_cascade(self):
- """Merge of a persistent entity with two child
+ """Merge of a persistent entity with two child
persistent entities."""
users, Address, addresses, User = (self.tables.users,
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
+ 'addresses':relationship(mapper(Address, addresses),
backref='user')
})
load = self.load_tracker(User)
sess2 = create_session()
a2 = sess2.merge(a1)
eq_(
- attributes.get_history(a2, 'user'),
+ attributes.get_history(a2, 'user'),
([u2], (), ())
)
assert a2 in sess2.dirty
sess2 = create_session()
a2 = sess2.merge(a1, load=False)
eq_(
- attributes.get_history(a2, 'user'),
+ attributes.get_history(a2, 'user'),
((), [u1], ())
)
assert a2 not in sess2.dirty
mapper(Order, orders, properties={
- 'items':relationship(mapper(Item, items),
+ 'items':relationship(mapper(Item, items),
secondary=order_items)})
load = self.load_tracker(Order)
uselist = False, backref='user')
})
sess = sessionmaker()()
- u = User(id=7, name="fred",
+ u = User(id=7, name="fred",
address=Address(id=1, email_address='foo@bar.com'))
sess.add(u)
sess.commit()
sess = create_session()
u = User()
- assert_raises_message(sa.exc.InvalidRequestError,
- "load=False option does not support",
+ assert_raises_message(sa.exc.InvalidRequestError,
+ "load=False option does not support",
sess.merge, u, load=False)
def test_no_load_with_backrefs(self):
- """load=False populates relationships in both
+ """load=False populates relationships in both
directions without requiring a load"""
users, Address, addresses, User = (self.tables.users,
self.classes.User)
mapper(User, users, properties={
- 'addresses':relationship(mapper(Address, addresses),
+ 'addresses':relationship(mapper(Address, addresses),
backref='user')
})
def test_dontload_with_eager(self):
"""
-
+
This test illustrates that with load=False, we can't just copy
the committed_state of the merged instance over; since it
references collection objects which themselves are to be merged.
moment I'd rather not support this use case; if you are merging
with load=False, you're typically dealing with caching and the
merged objects shouldnt be 'dirty'.
-
+
"""
users, Address, addresses, User = (self.tables.users,
def test_no_load_preserves_parents(self):
"""Merge with load=False does not trigger a 'delete-orphan'
operation.
-
+
merge with load=False sets attributes without using events.
this means the 'hasparent' flag is not propagated to the newly
merged instance. in fact this works out OK, because the
this collection when _is_orphan() is called, it does not count
as an orphan (i.e. this is the 'optimistic' logic in
mapper._is_orphan().)
-
+
"""
users, Address, addresses, User = (self.tables.users,
mapper(User, users, properties={
'addresses':relationship(mapper(Address, addresses),
- backref='user',
+ backref='user',
cascade="all, delete-orphan")})
sess = create_session()
u = User()
a1 = Address(user=s.merge(User(id=1, name='ed')), email_address='x')
before_id = id(a1.user)
- a2 = Address(user=s.merge(User(id=1, name='jack')),
+ a2 = Address(user=s.merge(User(id=1, name='jack')),
email_address='x')
after_id = id(a1.user)
other_id = id(a2.user)
m = mapper(User, users, properties={
'addresses':relationship(mapper(Address, addresses),
backref='user')})
- user = User(id=8, name='fred',
+ user = User(id=8, name='fred',
addresses=[Address(email_address='user')])
merged_user = sess.merge(user)
assert merged_user in sess.new
class M2ONoUseGetLoadingTest(fixtures.MappedTest):
- """Merge a one-to-many. The many-to-one on the other side is set up
+ """Merge a one-to-many. The many-to-one on the other side is set up
so that use_get is False. See if skipping the "m2o" merge
vs. doing it saves on SQL calls.
@classmethod
def define_tables(cls, metadata):
Table('user', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50)),
)
Table('address', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('user_id', Integer, ForeignKey('user.id')),
Column('email', String(50)),
user, address = cls.tables.user, cls.tables.address
mapper(User, user, properties={
'addresses':relationship(Address, backref=
- backref('user',
+ backref('user',
# needlessly complex primaryjoin so that the
# use_get flag is False
primaryjoin=and_(
- user.c.id==address.c.user_id,
+ user.c.id==address.c.user_id,
user.c.id==user.c.id
)
)
User, Address = cls.classes.User, cls.classes.Address
s = Session()
s.add_all([
- User(id=1, name='u1', addresses=[Address(id=1, email='a1'),
+ User(id=1, name='u1', addresses=[Address(id=1, email='a1'),
Address(id=2, email='a2')])
])
s.commit()
# "persistent" - we get at an Address that was already present.
- # With the "skip bidirectional" check removed, the "set" emits SQL
+ # With the "skip bidirectional" check removed, the "set" emits SQL
# for the "previous" version in any case,
# address.user_id is 1, you get a load.
def test_persistent_access_none(self):
User, Address = self.classes.User, self.classes.Address
s = Session()
def go():
- u1 = User(id=1,
+ u1 = User(id=1,
addresses =[Address(id=1), Address(id=2)]
)
u2 = s.merge(u1)
User, Address = self.classes.User, self.classes.Address
s = Session()
def go():
- u1 = User(id=1,
+ u1 = User(id=1,
addresses =[Address(id=1), Address(id=2)]
)
u2 = s.merge(u1)
User, Address = self.classes.User, self.classes.Address
s = Session()
def go():
- u1 = User(id=1,
+ u1 = User(id=1,
addresses =[Address(id=1), Address(id=2)]
)
u2 = s.merge(u1)
User, Address = self.classes.User, self.classes.Address
s = Session()
def go():
- u1 = User(id=1,
- addresses =[Address(id=1), Address(id=2),
+ u1 = User(id=1,
+ addresses =[Address(id=1), Address(id=2),
Address(id=3, email='a3')]
)
u2 = s.merge(u1)
User, Address = self.classes.User, self.classes.Address
s = Session()
def go():
- u1 = User(id=1,
- addresses =[Address(id=1), Address(id=2),
+ u1 = User(id=1,
+ addresses =[Address(id=1), Address(id=2),
Address(id=3, email='a3')]
)
u2 = s.merge(u1)
class MutableMergeTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table("data", metadata,
- Column('id', Integer, primary_key=True,
+ Table("data", metadata,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', PickleType(comparator=operator.eq))
)
class CompositeNullPksTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table("data", metadata,
+ Table("data", metadata,
Column('pk1', String(10), primary_key=True),
Column('pk2', String(10), primary_key=True),
)
r = self.classes.Rock(id=0, description='moldy')
r.bug = bug
m = self.sess.merge(r)
- # we've already passed ticket #2374 problem since merge() returned,
+ # we've already passed ticket #2374 problem since merge() returned,
# but for good measure:
assert m is not r
eq_(m,r)
self._merge_delete_orphan_o2o_with(self.classes.Bug(id=1))
class PolymorphicOnTest(fixtures.MappedTest):
- """Test merge() of polymorphic object when polymorphic_on
+ """Test merge() of polymorphic object when polymorphic_on
isn't a Column"""
@classmethod
def define_tables(cls, metadata):
Table('employees', metadata,
- Column('employee_id', Integer, primary_key=True,
+ Column('employee_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(1), nullable=False),
Column('data', String(50)),
pass
def _setup_polymorphic_on_mappers(self):
- employee_mapper = mapper(self.classes.Employee,
+ employee_mapper = mapper(self.classes.Employee,
self.tables.employees,
- polymorphic_on=case(value=self.tables.employees.c.type,
+ polymorphic_on=case(value=self.tables.employees.c.type,
whens={
'E': 'employee',
'M': 'manager',
"""
self._setup_polymorphic_on_mappers()
- m = self.classes.Manager(employee_id=55, type='M',
+ m = self.classes.Manager(employee_id=55, type='M',
data='original data')
self.sess.add(m)
self.sess.commit()
m = self.classes.Manager(employee_id=55, data='updated data')
merged = self.sess.merge(m)
- # we've already passed ticket #2449 problem since
+ # we've already passed ticket #2449 problem since
# merge() returned, but for good measure:
assert m is not merged
eq_(m,merged)
cls.classes.SubJob
return [
ParentThing(
- container=DataContainer(name="d1",
+ container=DataContainer(name="d1",
jobs=[
SubJob(attr="s1"),
SubJob(attr="s2")
q = s.query(ParentThing).\
options(
subqueryload_all(
- ParentThing.container,
+ ParentThing.container,
DataContainer.jobs.of_type(SubJob)
))
def go():
q = s.query(ParentThing).\
options(
joinedload_all(
- ParentThing.container,
+ ParentThing.container,
DataContainer.jobs.of_type(SubJob)
))
def go():
DataContainer.jobs.of_type(Job_P).\
any(Job_P.id < Job.id)
)
- self.assert_compile(q,
+ self.assert_compile(q,
"SELECT job.id AS job_id, job.type AS job_type, "
"job.container_id "
"AS job_container_id "
DataContainer.jobs.of_type(Job_A).\
any(and_(Job_A.id < Job.id, Job_A.type=='fred'))
)
- self.assert_compile(q,
+ self.assert_compile(q,
"SELECT job.id AS job_id, job.type AS job_type, "
"job.container_id AS job_container_id "
"FROM data_container JOIN job ON data_container.id = job.container_id "
s = Session()
q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_P))
- self.assert_compile(q,
+ self.assert_compile(q,
"SELECT data_container.id AS data_container_id, "
"data_container.name AS data_container_name "
"FROM data_container JOIN (SELECT job.id AS job_id, "
s = Session()
q = s.query(DataContainer).join(DataContainer.jobs.of_type(SubJob))
- # note the of_type() here renders JOIN for the Job->SubJob.
+ # note the of_type() here renders JOIN for the Job->SubJob.
# this is because it's using the SubJob mapper directly within
# query.join(). When we do joinedload() etc., we're instead
# doing a with_polymorphic(), and there we need the join to be
# outer by default.
- self.assert_compile(q,
+ self.assert_compile(q,
"SELECT data_container.id AS data_container_id, "
"data_container.name AS data_container_name "
"FROM data_container JOIN (SELECT job.id AS job_id, "
s = Session()
q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_P))
- self.assert_compile(q,
+ self.assert_compile(q,
"SELECT data_container.id AS data_container_id, "
"data_container.name AS data_container_name "
"FROM data_container JOIN (SELECT job.id AS job_id, "
s = Session()
q = s.query(DataContainer).join(DataContainer.jobs.of_type(Job_A))
- self.assert_compile(q,
+ self.assert_compile(q,
"SELECT data_container.id AS data_container_id, "
"data_container.name AS data_container_name "
"FROM data_container JOIN job AS job_1 "
s = Session()
q = s.query(DataContainer).join(Job_P, DataContainer.jobs)
- self.assert_compile(q,
+ self.assert_compile(q,
"SELECT data_container.id AS data_container_id, "
"data_container.name AS data_container_name "
"FROM data_container JOIN (SELECT job.id AS job_id, "
sess = Session()
u1 = User(name='ed', addresses=[
Address(
- email_address='ed@bar.com',
+ email_address='ed@bar.com',
)
])
sess.expunge_all()
u1 = sess.query(User).\
- options(sa.orm.defer('name'),
+ options(sa.orm.defer('name'),
sa.orm.defer('addresses.email_address')).\
get(u1.id)
assert 'name' not in u1.__dict__
u2 = pickle.loads(pickle.dumps(u1))
def test_collection_setstate(self):
- """test a particular cycle that requires CollectionAdapter
+ """test a particular cycle that requires CollectionAdapter
to not rely upon InstanceState to deserialize."""
m = MetaData()
- c1 = Table('c1', m,
- Column('parent_id', String,
+ c1 = Table('c1', m,
+ Column('parent_id', String,
ForeignKey('p.id'), primary_key=True)
)
c2 = Table('c2', m,
- Column('parent_id', String,
+ Column('parent_id', String,
ForeignKey('p.id'), primary_key=True)
)
p = Table('p', m,
mapper(User, users, properties={
'addresses':relationship(
- Address,
+ Address,
collection_class=
attribute_mapped_collection('email_address')
)
for loads, dumps in picklers():
repickled = loads(dumps(u1))
eq_(u1.addresses, repickled.addresses)
- eq_(repickled.addresses['email1'],
+ eq_(repickled.addresses['email1'],
Address(email_address="email1"))
def test_column_mapped_collection(self):
mapper(User, users, properties={
'addresses':relationship(
- Address,
+ Address,
collection_class=
column_mapped_collection(
addresses.c.email_address)
for loads, dumps in picklers():
repickled = loads(dumps(u1))
eq_(u1.addresses, repickled.addresses)
- eq_(repickled.addresses['email1'],
+ eq_(repickled.addresses['email1'],
Address(email_address="email1"))
def test_composite_column_mapped_collection(self):
mapper(User, users, properties={
'addresses':relationship(
- Address,
+ Address,
collection_class=
column_mapped_collection([
addresses.c.id,
for loads, dumps in picklers():
repickled = loads(dumps(u1))
eq_(u1.addresses, repickled.addresses)
- eq_(repickled.addresses[(1, 'email1')],
+ eq_(repickled.addresses[(1, 'email1')],
Address(id=1, email_address="email1"))
class PolymorphicDeferredTest(fixtures.MappedTest):
test_needs_fk=True
)
def test_rebuild_state(self):
- """not much of a 'test', but illustrate how to
+ """not much of a 'test', but illustrate how to
remove instance-level state before pickling.
"""
Column('sub_id', Integer, ForeignKey('rel_sub.id'))
)
cls.rel_sub = Table('rel_sub', m,
- Column('id', Integer, ForeignKey('base_w_sub_rel.id'),
+ Column('id', Integer, ForeignKey('base_w_sub_rel.id'),
primary_key=True)
)
cls.base = Table('base', m,
Column('id', Integer, primary_key=True),
)
cls.sub = Table('sub', m,
- Column('id', Integer, ForeignKey('base.id'),
+ Column('id', Integer, ForeignKey('base.id'),
primary_key=True),
)
cls.sub_w_base_rel = Table('sub_w_base_rel', m,
- Column('id', Integer, ForeignKey('base.id'),
+ Column('id', Integer, ForeignKey('base.id'),
primary_key=True),
Column('base_id', Integer, ForeignKey('base.id'))
)
- cls.sub_w_sub_rel = Table('sub_w_sub_rel', m,
- Column('id', Integer, ForeignKey('base.id'),
+ cls.sub_w_sub_rel = Table('sub_w_sub_rel', m,
+ Column('id', Integer, ForeignKey('base.id'),
primary_key=True),
Column('sub_id', Integer, ForeignKey('sub.id'))
)
Column('base_id', Integer, ForeignKey('base.id'))
)
- cls.three_tab_a = Table('three_tab_a', m,
+ cls.three_tab_a = Table('three_tab_a', m,
Column('id', Integer, primary_key=True),
)
- cls.three_tab_b = Table('three_tab_b', m,
+ cls.three_tab_b = Table('three_tab_b', m,
Column('id', Integer, primary_key=True),
Column('aid', Integer, ForeignKey('three_tab_a.id'))
)
- cls.three_tab_c = Table('three_tab_c', m,
+ cls.three_tab_c = Table('three_tab_c', m,
Column('id', Integer, primary_key=True),
Column('aid', Integer, ForeignKey('three_tab_a.id')),
Column('bid', Integer, ForeignKey('three_tab_b.id'))
else:
return True
return relationships.JoinCondition(
- self.three_tab_a,
+ self.three_tab_a,
self.three_tab_b,
- self.three_tab_a,
+ self.three_tab_a,
self.three_tab_b,
support_sync=False,
can_be_synced_fn=_can_sync,
def _join_fixture_m2m(self, **kw):
return relationships.JoinCondition(
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
self.m2mright,
secondary=self.m2msecondary,
**kw
def _join_fixture_o2m(self, **kw):
return relationships.JoinCondition(
- self.left,
- self.right,
- self.left,
+ self.left,
+ self.right,
+ self.left,
self.right,
**kw
)
def _join_fixture_m2o(self, **kw):
return relationships.JoinCondition(
- self.right,
- self.left,
+ self.right,
+ self.left,
self.right,
self.left,
**kw
self.composite_selfref,
self.composite_selfref,
self.composite_selfref,
- remote_side=set([self.composite_selfref.c.id,
+ remote_side=set([self.composite_selfref.c.id,
self.composite_selfref.c.group_id]),
**kw
)
)
def _join_fixture_o2m_joined_sub_to_base(self, **kw):
- left = self.base.join(self.sub_w_base_rel,
+ left = self.base.join(self.sub_w_base_rel,
self.base.c.id==self.sub_w_base_rel.c.id)
return relationships.JoinCondition(
left,
def _join_fixture_m2o_joined_sub_to_sub_on_base(self, **kw):
# this is a late add - a variant of the test case
- # in #2491 where we join on the base cols instead. only
+ # in #2491 where we join on the base cols instead. only
# m2o has a problem at the time of this test.
left = self.base.join(self.sub, self.base.c.id==self.sub.c.id)
right = self.base.join(self.sub_w_base_rel, self.base.c.id==self.sub_w_base_rel.c.id)
return relationships.JoinCondition(
- left,
+ left,
right,
self.sub,
self.sub_w_base_rel,
def _join_fixture_m2o_sub_to_joined_sub(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
- right = self.base.join(self.right_w_base_rel,
+ right = self.base.join(self.right_w_base_rel,
self.base.c.id==self.right_w_base_rel.c.id)
return relationships.JoinCondition(
self.right_w_base_rel,
def _join_fixture_m2o_sub_to_joined_sub_func(self, **kw):
# see test.orm.test_mapper:MapperTest.test_add_column_prop_deannotate,
- right = self.base.join(self.right_w_base_rel,
+ right = self.base.join(self.right_w_base_rel,
self.base.c.id==self.right_w_base_rel.c.id)
return relationships.JoinCondition(
self.right_w_base_rel,
)
def _join_fixture_o2o_joined_sub_to_base(self, **kw):
- left = self.base.join(self.sub,
+ left = self.base.join(self.sub,
self.base.c.id==self.sub.c.id)
# see test_relationships->AmbiguousJoinInterpretedAsSelfRef
return relationships.JoinCondition(
left,
self.sub,
- left,
+ left,
self.sub,
)
def _join_fixture_o2m_to_annotated_func(self, **kw):
return relationships.JoinCondition(
- self.left,
- self.right,
- self.left,
+ self.left,
+ self.right,
+ self.left,
self.right,
primaryjoin=self.left.c.id==
foreign(func.foo(self.right.c.lid)),
def _join_fixture_o2m_to_oldstyle_func(self, **kw):
return relationships.JoinCondition(
- self.left,
- self.right,
- self.left,
+ self.left,
+ self.right,
+ self.left,
self.right,
primaryjoin=self.left.c.id==
func.foo(self.right.c.lid),
fn
)
- def _assert_raises_no_relevant_fks(self, fn, expr, relname,
+ def _assert_raises_no_relevant_fks(self, fn, expr, relname,
primary, *arg, **kw):
assert_raises_message(
- exc.ArgumentError,
+ exc.ArgumentError,
r"Could not locate any relevant foreign key columns "
r"for %s join condition '%s' on relationship %s. "
r"Ensure that referencing columns are associated with "
fn, *arg, **kw
)
- def _assert_raises_no_equality(self, fn, expr, relname,
+ def _assert_raises_no_equality(self, fn, expr, relname,
primary, *arg, **kw):
assert_raises_message(
- sa.exc.ArgumentError,
+ sa.exc.ArgumentError,
"Could not locate any simple equality expressions "
"involving locally mapped foreign key columns for %s join "
"condition '%s' on relationship %s. "
exc.AmbiguousForeignKeysError,
"Could not determine join condition between "
"parent/child tables on relationship %s - "
- "there are no foreign keys linking these tables. "
+ "there are no foreign keys linking these tables. "
% (relname,),
fn, *arg, **kw)
eq_(
joincond.local_remote_pairs,
[
- (self.left.c.x, self.right.c.x),
- (self.left.c.x, self.right.c.y),
+ (self.left.c.x, self.right.c.x),
+ (self.left.c.x, self.right.c.y),
(self.left.c.y, self.right.c.x),
(self.left.c.y, self.right.c.y)
]
eq_(
joincond.local_remote_pairs,
[
- (self.left.c.x, self.right.c.x),
- (self.left.c.x, self.right.c.y),
+ (self.left.c.x, self.right.c.x),
+ (self.left.c.x, self.right.c.y),
(self.left.c.y, self.right.c.x),
(self.left.c.y, self.right.c.y)
]
joincond = self._join_fixture_m2o_composite_selfref()
eq_(
joincond.remote_columns,
- set([self.composite_selfref.c.id,
+ set([self.composite_selfref.c.id,
self.composite_selfref.c.group_id])
)
joincond = self._join_fixture_m2m()
eq_(
joincond.local_remote_pairs,
- [(self.m2mleft.c.id, self.m2msecondary.c.lid),
+ [(self.m2mleft.c.id, self.m2msecondary.c.lid),
(self.m2mright.c.id, self.m2msecondary.c.rid)]
)
)
eq_(
joincond.local_remote_pairs,
- [(self.m2mleft.c.id, self.m2msecondary.c.lid),
+ [(self.m2mleft.c.id, self.m2msecondary.c.lid),
(self.m2mright.c.id, self.m2msecondary.c.rid)]
)
"should be counted as containing a foreign "
"key reference to the parent table.",
relationships.JoinCondition,
- self.left,
- self.right_multi_fk,
- self.left,
- self.right_multi_fk,
+ self.left,
+ self.right_multi_fk,
+ self.left,
+ self.right_multi_fk,
)
def test_determine_join_no_fks_o2m(self):
self._assert_raises_no_join(
relationships.JoinCondition,
"None", None,
- self.left,
- self.selfref,
- self.left,
- self.selfref,
+ self.left,
+ self.selfref,
+ self.left,
+ self.selfref,
)
self._assert_raises_ambig_join(
relationships.JoinCondition,
"None", self.m2msecondary_ambig_fks,
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
- self.m2mright,
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
+ self.m2mright,
secondary=self.m2msecondary_ambig_fks
)
self._assert_raises_no_join(
relationships.JoinCondition,
"None", self.m2msecondary_no_fks,
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
- self.m2mright,
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
+ self.m2mright,
secondary=self.m2msecondary_no_fks
)
def _join_fixture_fks_ambig_m2m(self):
return relationships.JoinCondition(
- self.m2mleft,
- self.m2mright,
- self.m2mleft,
- self.m2mright,
+ self.m2mleft,
+ self.m2mright,
+ self.m2mleft,
+ self.m2mright,
secondary=self.m2msecondary_ambig_fks,
consider_as_foreign_keys=[
- self.m2msecondary_ambig_fks.c.lid1,
+ self.m2msecondary_ambig_fks.c.lid1,
self.m2msecondary_ambig_fks.c.rid1]
)
joincond = self._join_fixture_o2m_selfref()
left = select([joincond.parent_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- left,
- joincond.child_selectable,
+ left,
+ joincond.child_selectable,
True)
self.assert_compile(
pj, "pj.id = selfref.sid"
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
+ joincond.parent_selectable,
+ right,
True)
self.assert_compile(
pj, "selfref.id = pj.sid"
def test_join_targets_o2m_plain(self):
joincond = self._join_fixture_o2m()
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- joincond.child_selectable,
+ joincond.parent_selectable,
+ joincond.child_selectable,
False)
self.assert_compile(
pj, "lft.id = rgt.lid"
joincond = self._join_fixture_o2m()
left = select([joincond.parent_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- left,
- joincond.child_selectable,
+ left,
+ joincond.child_selectable,
True)
self.assert_compile(
pj, "pj.id = rgt.lid"
joincond = self._join_fixture_o2m()
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
+ joincond.parent_selectable,
+ right,
True)
self.assert_compile(
pj, "lft.id = pj.lid"
joincond = self._join_fixture_o2m_composite_selfref()
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
+ joincond.parent_selectable,
+ right,
True)
self.assert_compile(
- pj,
+ pj,
"pj.group_id = composite_selfref.group_id "
"AND composite_selfref.id = pj.parent_id"
)
joincond = self._join_fixture_m2o_composite_selfref()
right = select([joincond.child_selectable]).alias('pj')
pj, sj, sec, adapter, ds = joincond.join_targets(
- joincond.parent_selectable,
- right,
+ joincond.parent_selectable,
+ right,
True)
self.assert_compile(
- pj,
+ pj,
"pj.group_id = composite_selfref.group_id "
"AND pj.id = composite_selfref.parent_id"
)
def _test_lazy_clause_o2m_reverse(self):
joincond = self._join_fixture_o2m()
self.assert_compile(
- relationships.create_lazy_clause(joincond,
+ relationships.create_lazy_clause(joincond,
reverse_direction=True),
""
)
from sqlalchemy import exc
class _RelationshipErrors(object):
- def _assert_raises_no_relevant_fks(self, fn, expr, relname,
+ def _assert_raises_no_relevant_fks(self, fn, expr, relname,
primary, *arg, **kw):
assert_raises_message(
- sa.exc.ArgumentError,
+ sa.exc.ArgumentError,
"Could not locate any relevant foreign key columns "
"for %s join condition '%s' on relationship %s. "
"Ensure that referencing columns are associated with "
fn, *arg, **kw
)
- def _assert_raises_no_equality(self, fn, expr, relname,
+ def _assert_raises_no_equality(self, fn, expr, relname,
primary, *arg, **kw):
assert_raises_message(
- sa.exc.ArgumentError,
+ sa.exc.ArgumentError,
"Could not locate any simple equality expressions "
"involving locally mapped foreign key columns for %s join "
"condition '%s' on relationship %s. "
@classmethod
def define_tables(cls, metadata):
Table("tbl_a", metadata,
- Column("id", Integer, primary_key=True,
+ Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
Column("name", String(128)))
Table("tbl_b", metadata,
- Column("id", Integer, primary_key=True,
+ Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
Column("name", String(128)))
Table("tbl_c", metadata,
- Column("id", Integer, primary_key=True,
+ Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
- Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
+ Column("tbl_a_id", Integer, ForeignKey("tbl_a.id"),
nullable=False),
Column("name", String(128)))
Table("tbl_d", metadata,
- Column("id", Integer, primary_key=True,
+ Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
- Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
+ Column("tbl_c_id", Integer, ForeignKey("tbl_c.id"),
nullable=False),
Column("tbl_b_id", Integer, ForeignKey("tbl_b.id")),
Column("name", String(128)))
cls.tables.tbl_d)
mapper(A, tbl_a, properties=dict(
- c_rows=relationship(C, cascade="all, delete-orphan",
+ c_rows=relationship(C, cascade="all, delete-orphan",
backref="a_row")))
mapper(B, tbl_b)
mapper(C, tbl_c, properties=dict(
- d_rows=relationship(D, cascade="all, delete-orphan",
+ d_rows=relationship(D, cascade="all, delete-orphan",
backref="c_row")))
mapper(D, tbl_d, properties=dict(
b_row=relationship(B)))
class CompositeSelfRefFKTest(fixtures.MappedTest):
"""Tests a composite FK where, in
- the relationship(), one col points
+ the relationship(), one col points
to itself in the same table.
this is a very unusual case::
@classmethod
def define_tables(cls, metadata):
Table('company_t', metadata,
- Column('company_id', Integer, primary_key=True,
+ Column('company_id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(30)))
mapper(Company, company_t)
mapper(Employee, employee_t, properties= {
- 'company':relationship(Company,
+ 'company':relationship(Company,
primaryjoin=employee_t.c.company_id==
- company_t.c.company_id,
+ company_t.c.company_id,
backref='employees'),
'reports_to':relationship(Employee, primaryjoin=
sa.and_(
),
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
foreign_keys=[employee_t.c.reports_to_id, employee_t.c.company_id],
- backref=backref('employees',
- foreign_keys=[employee_t.c.reports_to_id,
+ backref=backref('employees',
+ foreign_keys=[employee_t.c.reports_to_id,
employee_t.c.company_id]))
})
'company':relationship(Company, backref='employees'),
'reports_to':relationship(Employee,
remote_side=[employee_t.c.emp_id, employee_t.c.company_id],
- foreign_keys=[employee_t.c.reports_to_id,
+ foreign_keys=[employee_t.c.reports_to_id,
employee_t.c.company_id],
backref=backref('employees', foreign_keys=
[employee_t.c.reports_to_id, employee_t.c.company_id])
(employee_t.c.reports_to_id, employee_t.c.emp_id),
(employee_t.c.company_id, employee_t.c.company_id)
],
- foreign_keys=[employee_t.c.reports_to_id,
+ foreign_keys=[employee_t.c.reports_to_id,
employee_t.c.company_id],
backref=backref('employees', foreign_keys=
[employee_t.c.reports_to_id, employee_t.c.company_id])
Column('z', Integer),
)
Table("child", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('x', Integer),
Column('y', Integer),
@classmethod
def define_tables(cls, metadata):
Table("tableA", metadata,
- Column("id",Integer,primary_key=True,
+ Column("id",Integer,primary_key=True,
test_needs_autoincrement=True),
Column("foo",Integer,),
test_needs_fk=True)
pass
def test_onetoone_switch(self):
- """test that active history is enabled on a
+ """test that active history is enabled on a
one-to-many/one that has use_get==True"""
tableB, A, B, tableA = (self.tables.tableB,
sess.flush()
def test_delete_cascade_BtoA(self):
- """No 'blank the PK' error when the child is to
+ """No 'blank the PK' error when the child is to
be deleted as part of a cascade"""
tableB, A, B, tableA = (self.tables.tableB,
sa.orm.clear_mappers()
def test_delete_cascade_AtoB(self):
- """No 'blank the PK' error when the child is to
+ """No 'blank the PK' error when the child is to
be deleted as part of a cascade"""
tableB, A, B, tableA = (self.tables.tableB,
assert b1 not in sess
class UniqueColReferenceSwitchTest(fixtures.MappedTest):
- """test a relationship based on a primary
+ """test a relationship based on a primary
join against a unique non-pk column"""
@classmethod
def define_tables(cls, metadata):
Table("table_a", metadata,
- Column("id", Integer, primary_key=True,
+ Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
- Column("ident", String(10), nullable=False,
+ Column("ident", String(10), nullable=False,
unique=True),
)
Table("table_b", metadata,
- Column("id", Integer, primary_key=True,
+ Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
- Column("a_ident", String(10),
- ForeignKey('table_a.ident'),
+ Column("a_ident", String(10),
+ ForeignKey('table_a.ident'),
nullable=False),
)
eq_(old.id, new.id)
class FKEquatedToConstantTest(fixtures.MappedTest):
- """test a relationship with a non-column entity in the primary join,
- is not viewonly, and also has the non-column's clause mentioned in the
+ """test a relationship with a non-column entity in the primary join,
+ is not viewonly, and also has the non-column's clause mentioned in the
foreign keys list.
"""
@classmethod
def define_tables(cls, metadata):
- Table('tags', metadata, Column("id", Integer, primary_key=True,
+ Table('tags', metadata, Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
Column("data", String(50)),
)
- Table('tag_foo', metadata,
- Column("id", Integer, primary_key=True,
+ Table('tag_foo', metadata,
+ Column("id", Integer, primary_key=True,
test_needs_autoincrement=True),
Column('tagid', Integer),
Column("data", String(50)),
pass
mapper(Tag, tags, properties={
- 'foo':relationship(TagInstance,
+ 'foo':relationship(TagInstance,
primaryjoin=sa.and_(tag_foo.c.data=='iplc_case',
tag_foo.c.tagid==tags.c.id),
foreign_keys=[tag_foo.c.tagid, tag_foo.c.data],
# relationship works
eq_(
- sess.query(Tag).all(),
+ sess.query(Tag).all(),
[Tag(data='some tag', foo=[TagInstance(data='iplc_case')])]
)
# both TagInstances were persisted
eq_(
- sess.query(TagInstance).order_by(TagInstance.data).all(),
+ sess.query(TagInstance).order_by(TagInstance.data).all(),
[TagInstance(data='iplc_case'), TagInstance(data='not_iplc_case')]
)
@classmethod
def define_tables(cls, metadata):
- Table('users', metadata,
- Column('id', Integer, primary_key=True,
+ Table('users', metadata,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(50))
)
- Table('addresses', metadata,
- Column('id', Integer, primary_key=True,
+ Table('addresses', metadata,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('user_id', Integer),
Column('email', String(50))
mapper(User, users, properties={
- 'addresses':relationship(Address,
- primaryjoin=addresses.c.user_id==users.c.id,
+ 'addresses':relationship(Address,
+ primaryjoin=addresses.c.user_id==users.c.id,
foreign_keys=addresses.c.user_id,
backref='user')
})
@classmethod
def define_tables(cls, metadata):
subscriber_table = Table('subscriber', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
)
address_table = Table('address',
metadata,
- Column('subscriber_id', Integer,
+ Column('subscriber_id', Integer,
ForeignKey('subscriber.id'), primary_key=True),
Column('type', String(1), primary_key=True),
)
def setup_mappers(cls):
subscriber, address = cls.tables.subscriber, cls.tables.address
- subscriber_and_address = subscriber.join(address,
- and_(address.c.subscriber_id==subscriber.c.id,
+ subscriber_and_address = subscriber.join(address,
+ and_(address.c.subscriber_id==subscriber.c.id,
address.c.type.in_(['A', 'B', 'C'])))
class Address(cls.Comparable):
mapper(Subscriber, subscriber_and_address, properties={
'id':[subscriber.c.id, address.c.subscriber_id],
- 'addresses' : relationship(Address,
+ 'addresses' : relationship(Address,
backref=backref("customer"))
})
eq_(
sess.query(Subscriber).order_by(Subscriber.type).all(),
[
- Subscriber(id=1, type=u'A'),
- Subscriber(id=2, type=u'B'),
+ Subscriber(id=1, type=u'A'),
+ Subscriber(id=2, type=u'B'),
Subscriber(id=2, type=u'C')
]
)
'dingaling':relationship(Dingaling)
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(sa.exc.ArgumentError,
r"reverse_property 'dingaling' on relationship "
"User.addresses references "
"relationship Address.dingaling, which does not "
- "reference mapper Mapper\|User\|users",
+ "reference mapper Mapper\|User\|users",
configure_mappers)
class JoinConditionErrorTest(fixtures.TestBase):
def test_only_column_elements(self):
m = MetaData()
- t1 = Table('t1', m,
+ t1 = Table('t1', m,
Column('id', Integer, primary_key=True),
Column('foo_id', Integer, ForeignKey('t2.id')),
)
c2 = relationship(C1, **kw)
assert_raises_message(
- sa.exc.ArgumentError,
+ sa.exc.ArgumentError,
"Column-based expression object expected "
- "for argument '%s'; got: '%s', type %r" %
+ "for argument '%s'; got: '%s', type %r" %
(argname, arg[0], type(arg[0])),
configure_mappers)
def test_fk_error_not_raised_unrelated(self):
m = MetaData()
- t1 = Table('t1', m,
+ t1 = Table('t1', m,
Column('id', Integer, primary_key=True),
Column('foo_id', Integer, ForeignKey('t2.nonexistent_id')),
)
def test_join_error_raised(self):
m = MetaData()
- t1 = Table('t1', m,
+ t1 = Table('t1', m,
Column('id', Integer, primary_key=True),
)
t2 = Table('t2', m,
clear_mappers()
class TypeMatchTest(fixtures.MappedTest):
- """test errors raised when trying to add items
+ """test errors raised when trying to add items
whose type is not handled by a relationship"""
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
- Column('aid', Integer, primary_key=True,
+ Column('aid', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(30)))
Table("b", metadata,
- Column('bid', Integer, primary_key=True,
+ Column('bid', Integer, primary_key=True,
test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('data', String(30)))
Table("c", metadata,
- Column('cid', Integer, primary_key=True,
+ Column('cid', Integer, primary_key=True,
test_needs_autoincrement=True),
Column("b_id", Integer, ForeignKey("b.bid")),
Column('data', String(30)))
Table("d", metadata,
- Column('did', Integer, primary_key=True,
+ Column('did', Integer, primary_key=True,
test_needs_autoincrement=True),
Column("a_id", Integer, ForeignKey("a.aid")),
Column('data', String(30)))
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
+ "Attempting to flush an item",
sess.flush)
def test_o2m_nopoly_onflush(self):
sess.add(b1)
sess.add(c1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
+ "Attempting to flush an item",
sess.flush)
def test_m2o_nopoly_onflush(self):
sess.add(b1)
sess.add(d1)
assert_raises_message(sa.orm.exc.FlushError,
- "Attempting to flush an item",
+ "Attempting to flush an item",
sess.flush)
def test_m2o_oncascade(self):
d1.a = b1
sess = create_session()
assert_raises_message(AssertionError,
- "doesn't handle objects of type",
+ "doesn't handle objects of type",
sess.add, d1)
class TypedAssociationTable(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)),
)
class B(fixtures.ComparableEntity):pass
mapper(A, t1, properties={
- 'bs':relationship(B, secondary=t1t2,
+ 'bs':relationship(B, secondary=t1t2,
backref=backref('as_', viewonly=True))
})
mapper(B, t2)
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)),
Column('t1id', Integer, ForeignKey('t1.id')))
Table("t3", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)),
Column('t2id', Integer, ForeignKey('t2.id')))
@classmethod
def define_tables(cls, metadata):
Table("t1", metadata,
- Column('t1id', Integer, primary_key=True,
+ Column('t1id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)))
Table("t2", metadata,
- Column('t2id', Integer, primary_key=True,
+ Column('t2id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)),
Column('t1id_ref', Integer, ForeignKey('t1.t1id')))
Table("t3", metadata,
- Column('t3id', Integer, primary_key=True,
+ Column('t3id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(40)),
Column('t2id_ref', Integer, ForeignKey('t2.t2id')))
@classmethod
def define_tables(cls, metadata):
Table('foos', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)))
- Table('bars', metadata, Column('id', Integer, primary_key=True,
+ Table('bars', metadata, Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('fid1', Integer, ForeignKey('foos.id')),
Column('fid2', Integer, ForeignKey('foos.id')),
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)),
Column('t1id', Integer, ForeignKey('t1.id')))
Table('t3', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2tot3', metadata,
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
- Column('id', String(50), primary_key=True,
+ Column('id', String(50), primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)))
Table('t2', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)),
Column('t1id', String(50)))
't1s':relationship(T1, backref='parent')
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
"both of the same direction <symbol 'ONETOMANY>. Did you "
- "mean to set remote_side on the many-to-one side ?",
+ "mean to set remote_side on the many-to-one side ?",
configure_mappers)
def test_m2o_backref(self):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1,
- backref=backref('parent', remote_side=t1.c.id),
+ 't1s':relationship(T1,
+ backref=backref('parent', remote_side=t1.c.id),
remote_side=t1.c.id)
})
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(sa.exc.ArgumentError,
"T1.t1s and back-reference T1.parent are "
"both of the same direction <symbol 'MANYTOONE>. Did you "
- "mean to set remote_side on the many-to-one side ?",
+ "mean to set remote_side on the many-to-one side ?",
configure_mappers)
def test_o2m_explicit(self):
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(sa.exc.ArgumentError,
"both of the same direction <symbol 'ONETOMANY>. Did you "
- "mean to set remote_side on the many-to-one side ?",
+ "mean to set remote_side on the many-to-one side ?",
configure_mappers)
def test_m2o_explicit(self):
T1, t1 = self.classes.T1, self.tables.t1
mapper(T1, t1, properties={
- 't1s':relationship(T1, back_populates='parent',
+ 't1s':relationship(T1, back_populates='parent',
remote_side=t1.c.id),
- 'parent':relationship(T1, back_populates='t1s',
+ 'parent':relationship(T1, back_populates='t1s',
remote_side=t1.c.id)
})
# can't be sure of ordering here
- assert_raises_message(sa.exc.ArgumentError,
+ assert_raises_message(sa.exc.ArgumentError,
"both of the same direction <symbol 'MANYTOONE>. Did you "
- "mean to set remote_side on the many-to-one side ?",
+ "mean to set remote_side on the many-to-one side ?",
configure_mappers)
class AmbiguousFKResolutionTest(_RelationshipErrors, fixtures.MappedTest):
Column('aid_1', Integer, ForeignKey('a.id')),
Column('aid_2', Integer, ForeignKey('a.id')),
)
- Table("atob", metadata,
+ Table("atob", metadata,
Column('aid', Integer),
Column('bid', Integer),
)
- Table("atob_ambiguous", metadata,
+ Table("atob_ambiguous", metadata,
Column('aid1', Integer, ForeignKey('a.id')),
Column('bid1', Integer, ForeignKey('b.id')),
Column('aid2', Integer, ForeignKey('a.id')),
A, B = self.classes.A, self.classes.B
a, b, a_to_b = self.tables.a, self.tables.b, self.tables.atob_ambiguous
mapper(A, a, properties={
- 'bs':relationship(B, secondary=a_to_b,
+ 'bs':relationship(B, secondary=a_to_b,
foreign_keys=[a_to_b.c.aid1, a_to_b.c.bid1])
})
mapper(B, b)
self.tables.bars_with_fks,
self.tables.foos)
- # very unique - the join between parent/child
+ # very unique - the join between parent/child
# has no fks, but there is an fk join between two other
# tables in the join condition, for those users that try creating
# these big-long-string-of-joining-many-tables primaryjoins.
self._assert_raises_no_equality(
sa.orm.configure_mappers,
"bars_with_fks.fid = foos_with_fks.id "
- "AND foos_with_fks.id = foos.id",
+ "AND foos_with_fks.id = foos.id",
"Foo.bars", "primary"
)
primaryjoin=foos.c.id>foos.c.fid)})
mapper(Bar, bars)
- self._assert_raises_no_relevant_fks(configure_mappers,
+ self._assert_raises_no_relevant_fks(configure_mappers,
"foos.id > foos.fid", "Foo.foos", "primary"
)
foreign_keys=[foos.c.fid])})
mapper(Bar, bars)
- self._assert_raises_no_equality(configure_mappers,
+ self._assert_raises_no_equality(configure_mappers,
"foos.id > foos.fid", "Foo.foos", "primary"
)
Column('id', Integer, primary_key=True))
Table('foobars_with_fks', metadata,
- Column('fid', Integer, ForeignKey('foos.id')),
+ Column('fid', Integer, ForeignKey('foos.id')),
Column('bid', Integer, ForeignKey('bars.id'))
)
Table('foobars_with_many_columns', metadata,
- Column('fid', Integer),
+ Column('fid', Integer),
Column('bid', Integer),
- Column('fid1', Integer),
+ Column('fid1', Integer),
Column('bid1', Integer),
- Column('fid2', Integer),
+ Column('fid2', Integer),
Column('bid2', Integer),
)
self._assert_raises_no_join(
configure_mappers,
- "Foo.bars",
+ "Foo.bars",
"foobars"
)
self._assert_raises_no_join(
configure_mappers,
- "Foo.bars",
+ "Foo.bars",
"foobars"
)
self.tables.foos)
mapper(Foo, foos, properties={
- 'bars': relationship(Bar, secondary=foobars,
+ 'bars': relationship(Bar, secondary=foobars,
primaryjoin=foos.c.id==foobars.c.fid,
secondaryjoin=foobars.c.bid==bars.c.id)})
mapper(Bar, bars)
sa.orm.clear_mappers()
mapper(Foo, foos, properties={
- 'bars': relationship(Bar,
- secondary=foobars_with_many_columns,
+ 'bars': relationship(Bar,
+ secondary=foobars_with_many_columns,
primaryjoin=foos.c.id==
foobars_with_many_columns.c.fid,
secondaryjoin=foobars_with_many_columns.c.bid==
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
+ configure_mappers,
'foos.id > foobars.fid',
"Foo.bars",
"primary")
secondaryjoin=foobars_with_fks.c.bid<=bars.c.id)})
mapper(Bar, bars)
self._assert_raises_no_equality(
- configure_mappers,
+ configure_mappers,
'foos.id > foobars_with_fks.fid',
"Foo.bars",
"primary")
User, users = self.classes.User, self.tables.users
mapper(User, users, properties={
- 'name':column_property(users.c.name,
+ 'name':column_property(users.c.name,
active_history=True)
})
u1 = User(name='jack')
other.description == self.description
mapper(Order, orders, properties={
'composite':composite(
- MyComposite,
- orders.c.description,
+ MyComposite,
+ orders.c.description,
orders.c.isopen,
active_history=True)
})
selectable = select(["x", "y", "z"]).alias()
assert_raises_message(
- sa.exc.ArgumentError,
+ sa.exc.ArgumentError,
"could not assemble any primary key columns",
mapper, Subset, selectable
)
s.expire_all()
u.name = 'newname'
- # can't predict result here
+ # can't predict result here
# deterministically, depending on if
# 'name' or 'addresses' is tested first
mod = s.is_modified(u)
User, Order, Item = self.classes.User, \
self.classes.Order, self.classes.Item
mapper(User, self.tables.users, properties={
- 'orders':relationship(Order),
+ 'orders':relationship(Order),
})
mapper(Order, self.tables.orders, properties={
'items':relationship(Item, secondary=self.tables.order_items),
class InheritanceToRelatedTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
- Table('foo', metadata,
+ Table('foo', metadata,
Column("id", Integer, primary_key=True),
Column("type", String(50)),
Column("related_id", Integer, ForeignKey("related.id"))
mapper(cls.classes.Foo, cls.tables.foo, properties={
'related':relationship(cls.classes.Related)
}, polymorphic_on=cls.tables.foo.c.type)
- mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar',
+ mapper(cls.classes.Bar, cls.tables.bar, polymorphic_identity='bar',
inherits=cls.classes.Foo)
- mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz',
+ mapper(cls.classes.Baz, cls.tables.baz, polymorphic_identity='baz',
inherits=cls.classes.Foo)
mapper(cls.classes.Related, cls.tables.related)
try:
conn = testing.db.connect()
trans = conn.begin()
- sess = create_session(bind=conn, autocommit=False,
+ sess = create_session(bind=conn, autocommit=False,
autoflush=True)
u1 = User(name='u1')
sess.add(u1)
mapper(Address, addresses)
engine2 = engines.testing_engine()
- sess = create_session(autocommit=True, autoflush=False,
+ sess = create_session(autocommit=True, autoflush=False,
twophase=True)
sess.bind_mapper(User, testing.db)
sess.bind_mapper(Address, engine2)
sess.add(u2)
def go():
sess.rollback()
- assert_warnings(go,
+ assert_warnings(go,
["Session's state has been changed on a "
"non-active transaction - this state "
"will be discarded."],
u1.name = 'newname'
def go():
sess.rollback()
- assert_warnings(go,
+ assert_warnings(go,
["Session's state has been changed on a "
"non-active transaction - this state "
"will be discarded."],
sess.delete(u1)
def go():
sess.rollback()
- assert_warnings(go,
+ assert_warnings(go,
["Session's state has been changed on a "
"non-active transaction - this state "
"will be discarded."],
users, addresses = cls.tables.users, cls.tables.addresses
mapper(User, users, properties={
'addresses':relationship(Address, backref='user',
- cascade="all, delete-orphan",
+ cascade="all, delete-orphan",
order_by=addresses.c.id),
})
mapper(Address, addresses)
class CleanSavepointTest(FixtureTest):
"""test the behavior for [ticket:2452] - rollback on begin_nested()
only expires objects tracked as being modified in that transaction.
-
+
"""
run_inserts = None
def test_rollback_ignores_clean_on_savepoint_agg_upd_eval(self):
User, users = self.classes.User, self.tables.users
def update_fn(s, u2):
- s.query(User).filter_by(name='u2').update(dict(name='u2modified'),
+ s.query(User).filter_by(name='u2').update(dict(name='u2modified'),
synchronize_session='evaluate')
self._run_test(update_fn)
def test_rollback_ignores_clean_on_savepoint_agg_upd_fetch(self):
User, users = self.classes.User, self.tables.users
def update_fn(s, u2):
- s.query(User).filter_by(name='u2').update(dict(name='u2modified'),
+ s.query(User).filter_by(name='u2').update(dict(name='u2modified'),
synchronize_session='fetch')
self._run_test(update_fn)
u1.addresses.remove(a1)
s.flush()
- eq_(s.query(Address).filter(Address.email_address=='foo').all(),
+ eq_(s.query(Address).filter(Address.email_address=='foo').all(),
[])
s.rollback()
assert a1 not in s.deleted
s.commit()
eq_(
s.query(User).all(),
- [User(id=1, name='edward',
+ [User(id=1, name='edward',
addresses=[Address(email_address='foober')])]
)
s.commit()
eq_(
s.query(User).all(),
- [User(id=1, name='edward',
+ [User(id=1, name='edward',
addresses=[Address(email_address='foober')])]
)
u1.name = 'edward'
u2.name = 'jackward'
s.add_all([u3, u4])
- eq_(s.query(User.name).order_by(User.id).all(),
+ eq_(s.query(User.name).order_by(User.id).all(),
[('edward',), ('jackward',), ('wendy',), ('foo',)])
s.rollback()
assert u1.name == 'ed'
assert u2.name == 'jack'
- eq_(s.query(User.name).order_by(User.id).all(),
+ eq_(s.query(User.name).order_by(User.id).all(),
[('ed',), ('jack',)])
s.commit()
assert u1.name == 'ed'
assert u2.name == 'jack'
- eq_(s.query(User.name).order_by(User.id).all(),
+ eq_(s.query(User.name).order_by(User.id).all(),
[('ed',), ('jack',)])
@testing.requires.savepoints
u1.name = 'edward'
u2.name = 'jackward'
s.add_all([u3, u4])
- eq_(s.query(User.name).order_by(User.id).all(),
+ eq_(s.query(User.name).order_by(User.id).all(),
[('edward',), ('jackward',), ('wendy',), ('foo',)])
s.commit()
def go():
assert u1.name == 'edward'
assert u2.name == 'jackward'
- eq_(s.query(User.name).order_by(User.id).all(),
+ eq_(s.query(User.name).order_by(User.id).all(),
[('edward',), ('jackward',), ('wendy',), ('foo',)])
self.assert_sql_count(testing.db, go, 1)
s.commit()
- eq_(s.query(User.name).order_by(User.id).all(),
+ eq_(s.query(User.name).order_by(User.id).all(),
[('edward',), ('jackward',), ('wendy',), ('foo',)])
@testing.requires.savepoints
s.add(u2)
eq_(s.query(User).order_by(User.id).all(),
[
- User(name='edward', addresses=[Address(email_address='foo'),
+ User(name='edward', addresses=[Address(email_address='foo'),
Address(email_address='bar')]),
User(name='jack', addresses=[Address(email_address='bat')])
]
s.rollback()
eq_(s.query(User).order_by(User.id).all(),
[
- User(name='edward', addresses=[Address(email_address='foo'),
+ User(name='edward', addresses=[Address(email_address='foo'),
Address(email_address='bar')]),
]
)
s.commit()
eq_(s.query(User).order_by(User.id).all(),
[
- User(name='edward', addresses=[Address(email_address='foo'),
+ User(name='edward', addresses=[Address(email_address='foo'),
Address(email_address='bar')]),
]
)
def test_preflush_no_accounting(self):
User, users = self.classes.User, self.tables.users
- sess = Session(_enable_transaction_accounting=False,
+ sess = Session(_enable_transaction_accounting=False,
autocommit=True, autoflush=False)
u1 = User(name='ed')
sess.add(u1)
print postsort_actions
eq_(len(postsort_actions), expected, postsort_actions)
-class UOWTest(_fixtures.FixtureTest,
+class UOWTest(_fixtures.FixtureTest,
testing.AssertsExecutionResults, AssertsUOW):
run_inserts = None
sess.flush,
CompiledSQL(
"INSERT INTO users (name) VALUES (:name)",
- {'name': 'u1'}
+ {'name': 'u1'}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
- lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
+ lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
- lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
+ lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
),
)
sess.flush,
CompiledSQL(
"INSERT INTO users (name) VALUES (:name)",
- {'name': 'u1'}
+ {'name': 'u1'}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
- lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
+ lambda ctx: {'email_address': 'a1', 'user_id':u1.id}
),
CompiledSQL(
"INSERT INTO addresses (user_id, email_address) "
"VALUES (:user_id, :email_address)",
- lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
+ lambda ctx: {'email_address': 'a2', 'user_id':u1.id}
),
)
session.delete(c2)
session.delete(parent)
- # testing that relationships
- # are loaded even if all ids/references are
+ # testing that relationships
+ # are loaded even if all ids/references are
# expired
self.assert_sql_execution(
testing.db,
testing.db,
sess.flush,
CompiledSQL(
- "INSERT INTO users (id, name) VALUES (:id, :name)",
+ "INSERT INTO users (id, name) VALUES (:id, :name)",
{'id':1, 'name':'u1'}),
CompiledSQL(
"INSERT INTO addresses (id, user_id, email_address) "
sess.flush,
CompiledSQL(
"INSERT INTO nodes (id, parent_id, data) VALUES "
- "(:id, :parent_id, :data)",
- [{'parent_id': None, 'data': None, 'id': 1},
- {'parent_id': 1, 'data': None, 'id': 2},
+ "(:id, :parent_id, :data)",
+ [{'parent_id': None, 'data': None, 'id': 1},
+ {'parent_id': 1, 'data': None, 'id': 2},
{'parent_id': 2, 'data': None, 'id': 3}]
),
)
testing.db,
sess.flush,
CompiledSQL("UPDATE items SET description=:description "
- "WHERE items.id = :items_id",
+ "WHERE items.id = :items_id",
lambda ctx:{'description':'i2', 'items_id':i1.id})
)
self.assert_sql_execution(
testing.db,
sess.flush,
- CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
+ CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:[{'id':n2.id}, {'id':n3.id}]),
- CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
+ CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx: {'id':n1.id})
)
sess.flush,
AllOf(
CompiledSQL("UPDATE nodes SET parent_id=:parent_id "
- "WHERE nodes.id = :nodes_id",
+ "WHERE nodes.id = :nodes_id",
lambda ctx: {'nodes_id':n3.id, 'parent_id':None}),
CompiledSQL("UPDATE nodes SET parent_id=:parent_id "
- "WHERE nodes.id = :nodes_id",
+ "WHERE nodes.id = :nodes_id",
lambda ctx: {'nodes_id':n2.id, 'parent_id':None}),
),
- CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
+ CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:{'id':n1.id})
)
self.assert_sql_execution(
testing.db,
sess.flush,
- CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
+ CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx:[{'id':n2.id},{'id':n3.id}]),
- CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
+ CompiledSQL("DELETE FROM nodes WHERE nodes.id = :id",
lambda ctx: {'id':n1.id})
)
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
- 'children':relationship(Node,
+ 'children':relationship(Node,
backref=backref('parent',
remote_side=nodes.c.id))
})
Node, nodes = self.classes.Node, self.tables.nodes
mapper(Node, nodes, properties={
- 'children':relationship(Node,
+ 'children':relationship(Node,
backref=backref('parent', remote_side=nodes.c.id)
)
})
sess.flush,
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
- "(:parent_id, :data)",
+ "(:parent_id, :data)",
lambda ctx:{'parent_id':None, 'data':'n1'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
- "(:parent_id, :data)",
+ "(:parent_id, :data)",
lambda ctx:{'parent_id':n1.id, 'data':'n11'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
- "(:parent_id, :data)",
+ "(:parent_id, :data)",
lambda ctx:{'parent_id':n1.id, 'data':'n12'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
- "(:parent_id, :data)",
+ "(:parent_id, :data)",
lambda ctx:{'parent_id':n1.id, 'data':'n13'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
- "(:parent_id, :data)",
+ "(:parent_id, :data)",
lambda ctx:{'parent_id':n12.id, 'data':'n121'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
- "(:parent_id, :data)",
+ "(:parent_id, :data)",
lambda ctx:{'parent_id':n12.id, 'data':'n122'}
),
CompiledSQL(
"INSERT INTO nodes (parent_id, data) VALUES "
- "(:parent_id, :data)",
+ "(:parent_id, :data)",
lambda ctx:{'parent_id':n12.id, 'data':'n123'}
),
)
session.delete(c2)
session.delete(parent)
- # testing that relationships
- # are loaded even if all ids/references are
+ # testing that relationships
+ # are loaded even if all ids/references are
# expired
self.assert_sql_execution(
testing.db,
n1.foobars.append(FooBar())
# saveupdateall/deleteall for FooBar added here,
- # plus processstate node.foobars
+ # plus processstate node.foobars
# currently the "all" procs stay in pairs
self._assert_uow_size(sess, 6)
sess.flush()
-class SingleCycleM2MTest(fixtures.MappedTest,
+class SingleCycleM2MTest(fixtures.MappedTest,
testing.AssertsExecutionResults, AssertsUOW):
@classmethod
def define_tables(cls, metadata):
nodes = Table('nodes', metadata,
- Column('id', Integer,
- primary_key=True,
+ Column('id', Integer,
+ primary_key=True,
test_needs_autoincrement=True),
Column('data', String(30)),
Column('favorite_node_id', Integer, ForeignKey('nodes.id'))
)
node_to_nodes =Table('node_to_nodes', metadata,
- Column('left_node_id', Integer,
+ Column('left_node_id', Integer,
ForeignKey('nodes.id'),primary_key=True),
- Column('right_node_id', Integer,
+ Column('right_node_id', Integer,
ForeignKey('nodes.id'),primary_key=True),
)
node_to_nodes.c.right_node_id).\
order_by(node_to_nodes.c.left_node_id,
node_to_nodes.c.right_node_id).\
- all(),
+ all(),
sorted([
- (n1.id, n2.id), (n1.id, n3.id), (n1.id, n4.id),
- (n2.id, n3.id), (n2.id, n5.id),
+ (n1.id, n2.id), (n1.id, n3.id), (n1.id, n4.id),
+ (n2.id, n3.id), (n2.id, n5.id),
(n3.id, n5.id), (n3.id, n4.id)
])
)
"node_to_nodes.left_node_id = :left_node_id AND "
"node_to_nodes.right_node_id = :right_node_id",
lambda ctx:[
- {'right_node_id': n2.id, 'left_node_id': n1.id},
- {'right_node_id': n3.id, 'left_node_id': n1.id},
+ {'right_node_id': n2.id, 'left_node_id': n1.id},
+ {'right_node_id': n3.id, 'left_node_id': n1.id},
{'right_node_id': n4.id, 'left_node_id': n1.id}
]
),
"= :left_node_id AND node_to_nodes.right_node_id = "
":right_node_id",
lambda ctx:[
- {'right_node_id': n5.id, 'left_node_id': n3.id},
- {'right_node_id': n4.id, 'left_node_id': n3.id},
- {'right_node_id': n3.id, 'left_node_id': n2.id},
+ {'right_node_id': n5.id, 'left_node_id': n3.id},
+ {'right_node_id': n4.id, 'left_node_id': n3.id},
+ {'right_node_id': n3.id, 'left_node_id': n2.id},
{'right_node_id': n5.id, 'left_node_id': n2.id}
]
),
Table('parent', metadata,
Column('id', Integer, primary_key=True)
)
- Table('child', metadata,
+ Table('child', metadata,
Column('id', Integer, ForeignKey('parent.id'), primary_key=True)
)
pass
mapper(Parent, parent, properties={
- 'child':relationship(Child, uselist=False,
+ 'child':relationship(Child, uselist=False,
cascade="all, delete-orphan",
backref="parent")
})
@classmethod
def define_tables(cls, metadata):
Table('t', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)),
Column('def_', String(50), server_default='def1')
)
def test_batch_interaction(self):
- """test batching groups same-structured, primary
+ """test batching groups same-structured, primary
key present statements together.
"""
),
CompiledSQL(
"INSERT INTO t (id, data) VALUES (:id, :data)",
- [{'data': 't3', 'id': 3},
- {'data': 't4', 'id': 4},
+ [{'data': 't3', 'id': 3},
+ {'data': 't4', 'id': 4},
{'data': 't5', 'id': 5}]
),
CompiledSQL(
),
CompiledSQL(
"INSERT INTO t (id, data, def_) VALUES (:id, :data, :def_)",
- [{'data': 't9', 'id': 9, 'def_':'def2'},
+ [{'data': 't9', 'id': 9, 'def_':'def2'},
{'data': 't10', 'id': 10, 'def_':'def3'}]
),
CompiledSQL(
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(32)),
Column('age', Integer))
(s.query(User).distinct(), "distinct")
):
assert_raises_message(
- exc.InvalidRequestError,
- r"Can't call Query.update\(\) when %s\(\) has been called" % mname,
- q.update,
+ exc.InvalidRequestError,
+ r"Can't call Query.update\(\) when %s\(\) has been called" % mname,
+ q.update,
{'name':'ed'})
assert_raises_message(
- exc.InvalidRequestError,
- r"Can't call Query.delete\(\) when %s\(\) has been called" % mname,
+ exc.InvalidRequestError,
+ r"Can't call Query.delete\(\) when %s\(\) has been called" % mname,
q.delete)
assert_raises(exc.InvalidRequestError,
sess.query(User).
- filter(User.name == select([func.max(User.name)])).delete,
+ filter(User.name == select([func.max(User.name)])).delete,
synchronize_session='evaluate'
)
john.name = 'j2'
sess.query(User).filter_by(name='j2').\
- update({'age':42},
+ update({'age':42},
synchronize_session='evaluate')
eq_(john.age, 42)
john.name = 'j2'
sess.query(User).filter_by(name='j2').\
- update({'age':42},
+ update({'age':42},
synchronize_session='fetch')
eq_(john.age, 42)
sess.expire(john, ['age'])
# eval must be before the update. otherwise
- # we eval john, age has been expired and doesn't
+ # we eval john, age has been expired and doesn't
# match the new value coming in
sess.query(User).filter_by(name='john').filter_by(age=25).\
- update({'name':'j2', 'age':40},
+ update({'name':'j2', 'age':40},
synchronize_session='evaluate')
eq_(john.name, 'j2')
eq_(john.age, 40)
sess.expire(john, ['age'])
sess.query(User).filter_by(name='john').filter_by(age=25).\
- update({'name':'j2', 'age':40},
+ update({'name':'j2', 'age':40},
synchronize_session='fetch')
eq_(john.name, 'j2')
eq_(john.age, 40)
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(32)),
Column('age', Integer))
Table('documents', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
Column('title', String(32)))
mapper(User, users)
mapper(Document, documents, properties={
- 'user': relationship(User, lazy='joined',
+ 'user': relationship(User, lazy='joined',
backref=backref('documents', lazy='select'))
})
update({'title': Document.title+Document.title}, synchronize_session='fetch')
eq_([foo.title, bar.title, baz.title], ['foofoo','barbar', 'baz'])
- eq_(sess.query(Document.title).order_by(Document.id).all(),
+ eq_(sess.query(Document.title).order_by(Document.id).all(),
zip(['foofoo','barbar', 'baz']))
def test_update_with_explicit_joinedload(self):
@classmethod
def define_tables(cls, metadata):
data = Table('data', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('counter', Integer, nullable=False, default=0)
)
def _fixture(self):
Foo, version_table = self.classes.Foo, self.tables.version_table
- mapper(Foo, version_table,
+ mapper(Foo, version_table,
version_id_col=version_table.c.version_id)
s1 = Session()
return s1
# Only dialects with a sane rowcount can detect the
# StaleDataError
if testing.db.dialect.supports_sane_rowcount:
- assert_raises_message(sa.orm.exc.StaleDataError,
+ assert_raises_message(sa.orm.exc.StaleDataError,
r"UPDATE statement on table 'version_table' expected "
r"to update 1 row\(s\); 0 were matched.",
s1.commit),
if testing.db.dialect.supports_sane_rowcount:
assert_raises_message(
- sa.orm.exc.StaleDataError,
+ sa.orm.exc.StaleDataError,
r"DELETE statement on table 'version_table' expected "
r"to delete 2 row\(s\); 1 were matched.",
s1.commit)
def test_bump_version(self):
"""test that version number can be bumped.
- Ensures that the UPDATE or DELETE is against the
- last committed version of version_id_col, not the modified
+ Ensures that the UPDATE or DELETE is against the
+ last committed version of version_id_col, not the modified
state.
"""
# load, version is wrong
assert_raises_message(
- sa.orm.exc.StaleDataError,
+ sa.orm.exc.StaleDataError,
r"Instance .* has version id '\d+' which does not "
r"match database-loaded version id '\d+'",
s1.query(Foo).with_lockmode('read').get, f1s1.id
cls.classes.C,
cls.classes.P)
- mapper(P, p, version_id_col=p.c.version_id,
+ mapper(P, p, version_id_col=p.c.version_id,
properties={
'c':relationship(C, uselist=False, cascade='all, delete-orphan')
})
cls.classes.C,
cls.classes.P)
- mapper(P, p, version_id_col=p.c.version_id,
+ mapper(P, p, version_id_col=p.c.version_id,
version_id_generator=lambda x:make_uuid(),
properties={
'c':relationship(C, uselist=False, cascade='all, delete-orphan')
Session = sessionmaker()
- # TODO: not sure this test is
+ # TODO: not sure this test is
# testing exactly what its looking for
sess1 = Session()
self.tables.base,
self.classes.Sub)
- mapper(Base, base,
+ mapper(Base, base,
version_id_col=base.c.version_id)
mapper(Sub, sub, inherits=Base)
self.tables.base,
self.classes.Sub)
- mapper(Base, base,
+ mapper(Base, base,
version_id_col=base.c.version_id)
mapper(Sub, sub, inherits=Base)
self.classes.Sub)
mapper(Base, base)
- mapper(Sub, sub, inherits=Base,
+ mapper(Sub, sub, inherits=Base,
version_id_col=sub.c.version_id)
session = Session()
self.tables.base,
self.classes.Sub)
- mapper(Base, base,
+ mapper(Base, base,
version_id_col=base.c.version_id)
assert_raises_message(
"version_id_col should only be specified on "
"the base-most mapper that includes versioning.",
mapper,
- Sub, sub, inherits=Base,
+ Sub, sub, inherits=Base,
version_id_col=sub.c.version_id)
if test_types:
tests = [booleantest, datetimetest, decimaltest, intervaltest,
pickletypetest, typedecoratortest, unicodetest]
- for engineurl in ('postgresql://scott:tiger@localhost/test',
+ for engineurl in ('postgresql://scott:tiger@localhost/test',
'sqlite://', 'mysql://scott:tiger@localhost/test'):
print "\n%s\n" % engineurl
for datatype, genvalue, kwargs in tests:
getitem_str_results, getitem_fallback_results,
getitem_int_results, getitem_long_results, getitem_obj_results,
slice_results]
- for engineurl in ('postgresql://scott:tiger@localhost/test',
+ for engineurl in ('postgresql://scott:tiger@localhost/test',
'sqlite://', 'mysql://scott:tiger@localhost/test'):
print "\n%s\n" % engineurl
test_table = prepare(Unicode(20, assert_unicode=False),
('sometable', 'this_name_alsois_long', 'ix_sometable_t_3cf1'),
]:
- t1 = Table(tname, MetaData(),
+ t1 = Table(tname, MetaData(),
Column(cname, Integer, index=True),
)
ix1 = list(t1.indexes)[0]
assert_raises(
exc.IdentifierError,
schema.CreateIndex(Index(
- "this_other_name_is_too_long_for_what_were_doing",
+ "this_other_name_is_too_long_for_what_were_doing",
t1.c.c)).compile,
dialect=dialect
)
def test_index_declartion_inline(self):
- t1 = Table('t1', metadata,
+ t1 = Table('t1', metadata,
Column('x', Integer),
Column('y', Integer),
Index('foo', 'x', 'y')
)
self.assert_compile(
- schema.CreateIndex(list(t1.indexes)[0]),
+ schema.CreateIndex(list(t1.indexes)[0]),
"CREATE INDEX foo ON t1 (x, y)"
)
def test_index_asserts_cols_standalone(self):
- t1 = Table('t1', metadata,
+ t1 = Table('t1', metadata,
Column('x', Integer)
)
t2 = Table('t2', metadata,
)
def test_index_asserts_cols_inline(self):
- t1 = Table('t1', metadata,
+ t1 = Table('t1', metadata,
Column('x', Integer)
)
assert_raises_message(
def test_multiple(self):
m = MetaData()
- foo = Table("foo", m,
+ foo = Table("foo", m,
Column('id', Integer, primary_key=True),
Column('bar', Integer, primary_key=True)
)
m.drop_all(e)
e.assert_sql([
- 'CREATE TABLE t (a INTEGER)',
- 'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb FOREIGN KEY(b) REFERENCES t (a))',
- 'ALTER TABLE t2 ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)',
- 'ALTER TABLE t2 DROP CONSTRAINT fk_ta',
- 'DROP TABLE t2',
+ 'CREATE TABLE t (a INTEGER)',
+ 'CREATE TABLE t2 (a INTEGER, b INTEGER, CONSTRAINT fk_tb FOREIGN KEY(b) REFERENCES t (a))',
+ 'ALTER TABLE t2 ADD CONSTRAINT fk_ta FOREIGN KEY(a) REFERENCES t (a)',
+ 'ALTER TABLE t2 DROP CONSTRAINT fk_ta',
+ 'DROP TABLE t2',
'DROP TABLE t'
])
)
constraint = CheckConstraint('a < b',name="my_test_constraint",
- deferrable=True,initially='DEFERRED',
+ deferrable=True,initially='DEFERRED',
table=t)
def test_no_embed_in_sql(self):
"""Using a DefaultGenerator, Sequence, DefaultClause
- in the columns, where clause of a select, or in the values
+ in the columns, where clause of a select, or in the values
clause of insert, update, raises an informative error"""
for const in (
)
def test_missing_many_param(self):
- assert_raises_message(exc.StatementError,
+ assert_raises_message(exc.StatementError,
"A value is required for bind parameter 'col7', in parameter group 1",
t.insert().execute,
{'col4':7, 'col7':12, 'col8':19},
Column('id', Integer(), primary_key=True)
)
x = Table('x', m,
- Column('id', Integer(),
- ForeignKey('y.id'),
+ Column('id', Integer(),
+ ForeignKey('y.id'),
autoincrement="ignore_fk", primary_key=True)
)
assert x._autoincrement_column is x.c.id
Column('id', Integer(), primary_key=True)
)
x = Table('x', m,
- Column('id', Integer(),
- ForeignKey('y.id'),
+ Column('id', Integer(),
+ ForeignKey('y.id'),
primary_key=True)
)
assert x._autoincrement_column is None
self._assert_seq_result(s.execute(testing.db))
def test_explicit_optional(self):
- """test dialect executes a Sequence, returns nextval, whether
+ """test dialect executes a Sequence, returns nextval, whether
or not "optional" is set """
s = Sequence("my_sequence", optional=True)
@testing.provide_metadata
def test_inserted_pk_no_returning(self):
- """test inserted_primary_key contains [None] when
+ """test inserted_primary_key contains [None] when
pk_col=next_value(), implicit returning is not used."""
metadata = self.metadata
@testing.requires.returning
@testing.provide_metadata
def test_inserted_pk_implicit_returning(self):
- """test inserted_primary_key contains the result when
+ """test inserted_primary_key contains the result when
pk_col=next_value(), when implicit returning is used."""
metadata = self.metadata
@testing.fails_on('firebird', 'no FB support for start/increment')
def test_start_increment(self):
for seq in (
- Sequence('foo_seq'),
- Sequence('foo_seq', start=8),
+ Sequence('foo_seq'),
+ Sequence('foo_seq', start=8),
Sequence('foo_seq', increment=5)):
seq.create(testing.db)
try:
return testing.db.dialect.has_sequence(testing.db, name)
def test_nextval_render(self):
- """test dialect renders the "nextval" construct,
+ """test dialect renders the "nextval" construct,
whether or not "optional" is set """
for s in (
- Sequence("my_seq"),
+ Sequence("my_seq"),
Sequence("my_seq", optional=True)):
assert str(s.next_value().
compile(dialect=testing.db.dialect)) in (
)
def test_nextval_unsupported(self):
- """test next_value() used on non-sequence platform
+ """test next_value() used on non-sequence platform
raises NotImplementedError."""
s = Sequence("my_seq")
s1 = Sequence("s1", metadata=metadata)
s2 = Sequence("s2", metadata=metadata)
s3 = Sequence("s3")
- t = Table('t', metadata,
+ t = Table('t', metadata,
Column('c', Integer, s3, primary_key=True))
assert s3.metadata is metadata
class ServerDefaultsOnPKTest(fixtures.TestBase):
@testing.provide_metadata
def test_string_default_none_on_insert(self):
- """Test that without implicit returning, we return None for
+ """Test that without implicit returning, we return None for
a string server default.
That is, we don't want to attempt to pre-execute "server_default"
"""
metadata = self.metadata
- t = Table('x', metadata,
+ t = Table('x', metadata,
Column('y', String(10), server_default='key_one', primary_key=True),
Column('data', String(10)),
implicit_returning=False
"""With implicit_returning, we get a string PK default back no problem."""
metadata = self.metadata
- t = Table('x', metadata,
+ t = Table('x', metadata,
Column('y', String(10), server_default='key_one', primary_key=True),
Column('data', String(10))
)
@testing.provide_metadata
def test_int_default_none_on_insert(self):
metadata = self.metadata
- t = Table('x', metadata,
- Column('y', Integer,
+ t = Table('x', metadata,
+ Column('y', Integer,
server_default='5', primary_key=True),
Column('data', String(10)),
implicit_returning=False
@testing.provide_metadata
def test_autoincrement_reflected_from_server_default(self):
metadata = self.metadata
- t = Table('x', metadata,
- Column('y', Integer,
+ t = Table('x', metadata,
+ Column('y', Integer,
server_default='5', primary_key=True),
Column('data', String(10)),
implicit_returning=False
@testing.provide_metadata
def test_int_default_none_on_insert_reflected(self):
metadata = self.metadata
- t = Table('x', metadata,
- Column('y', Integer,
+ t = Table('x', metadata,
+ Column('y', Integer,
server_default='5', primary_key=True),
Column('data', String(10)),
implicit_returning=False
@testing.provide_metadata
def test_int_default_on_insert_with_returning(self):
metadata = self.metadata
- t = Table('x', metadata,
- Column('y', Integer,
+ t = Table('x', metadata,
+ Column('y', Integer,
server_default='5', primary_key=True),
Column('data', String(10))
)
GenericFunction.__init__(self, args=[arg], **kwargs)
self.assert_compile(
- fake_func('foo'),
- "fake_func(%s)" %
- bindtemplate % {'name':'param_1', 'position':1},
+ fake_func('foo'),
+ "fake_func(%s)" %
+ bindtemplate % {'name':'param_1', 'position':1},
dialect=dialect)
def test_use_labels(self):
- self.assert_compile(select([func.foo()], use_labels=True),
+ self.assert_compile(select([func.foo()], use_labels=True),
"SELECT foo() AS foo_1"
)
def test_underscores(self):
for fn in [func.coalesce, func.max, func.min, func.sum]:
for args, type_ in [
- ((datetime.date(2007, 10, 5),
+ ((datetime.date(2007, 10, 5),
datetime.date(2005, 10, 15)), sqltypes.Date),
((3, 5), sqltypes.Integer),
((decimal.Decimal(3), decimal.Decimal(5)), sqltypes.Numeric),
(("foo", "bar"), sqltypes.String),
- ((datetime.datetime(2007, 10, 5, 8, 3, 34),
+ ((datetime.datetime(2007, 10, 5, 8, 3, 34),
datetime.datetime(2005, 10, 15, 14, 45, 33)), sqltypes.DateTime)
]:
assert isinstance(fn(*args).type, type_), "%s / %s" % (fn(), type_)
self.assert_compile(func.lala.hoho(7), "lala.hoho(:hoho_1)")
# test None becomes NULL
- self.assert_compile(func.my_func(1,2,None,3),
+ self.assert_compile(func.my_func(1,2,None,3),
"my_func(:my_func_1, :my_func_2, NULL, :my_func_3)")
# test pickling
from test.lib.testing import eq_, ne_, assert_raises
class TraversalTest(fixtures.TestBase, AssertsExecutionResults):
- """test ClauseVisitor's traversal, particularly its
+ """test ClauseVisitor's traversal, particularly its
ability to copy and modify a ClauseElement in place."""
@classmethod
global A, B
# establish two ficticious ClauseElements.
- # define deep equality semantics as well as deep
+ # define deep equality semantics as well as deep
# identity semantics.
class A(ClauseElement):
__visit_name__ = 'a'
a1 = A("expr1")
struct = B(a1, A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3"))
struct2 = B(a1, A("expr2"), B(A("expr1b"), A("expr2b")), A("expr3"))
- struct3 = B(a1, A("expr2"), B(A("expr1b"),
+ struct3 = B(a1, A("expr2"), B(A("expr1b"),
A("expr2bmodified")), A("expr3"))
assert a1.is_other(a1)
assert not struct.is_other(struct3)
def test_clone(self):
- struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
+ struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
A("expr2b")), A("expr3"))
class Vis(CloningVisitor):
assert not struct.is_other(s2)
def test_no_clone(self):
- struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
+ struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
A("expr2b")), A("expr3"))
class Vis(ClauseVisitor):
assert struct.is_other(s2)
def test_change_in_place(self):
- struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
+ struct = B(A("expr1"), A("expr2"), B(A("expr1b"),
A("expr2b")), A("expr3"))
- struct2 = B(A("expr1"), A("expr2modified"), B(A("expr1b"),
+ struct2 = B(A("expr1"), A("expr2modified"), B(A("expr1b"),
A("expr2b")), A("expr3"))
- struct3 = B(A("expr1"), A("expr2"), B(A("expr1b"),
+ struct3 = B(A("expr1"), A("expr2"), B(A("expr1b"),
A("expr2bmodified")), A("expr3"))
class Vis(CloningVisitor):
column("col2"),
column("col3"),
)
- t3 = Table('table3', MetaData(),
+ t3 = Table('table3', MetaData(),
Column('col1', Integer),
Column('col2', Integer)
)
f = t.c.col1 * 5
- self.assert_compile(select([f]),
+ self.assert_compile(select([f]),
"SELECT t1.col1 * :col1_1 AS anon_1 FROM t1")
f.anon_label
a = t.alias()
f = sql_util.ClauseAdapter(a).traverse(f)
- self.assert_compile(select([f]),
+ self.assert_compile(select([f]),
"SELECT t1_1.col1 * :col1_1 AS anon_1 FROM t1 AS t1_1")
def test_join(self):
aliased = t1.select().alias()
aliased2 = t1.alias()
-
+
adapter = sql_util.ColumnAdapter(aliased)
f = select([
s = select([aliased2]).select_from(aliased)
eq_(str(s), str(f))
-
+
f = select([
adapter.columns[func.count(aliased2.c.col1)]
]).select_from(aliased)
# fixed by [ticket:2419]. the inside columns
# on aliased3 have _is_clone_of pointers to those of
- # aliased2. corresponding_column checks these
+ # aliased2. corresponding_column checks these
# now.
adapter = sql_util.ColumnAdapter(aliased1)
f1 = select([
aliased3 = cloned_traverse(aliased2, {}, {})
# also fixed by [ticket:2419]. When we look at the
- # *outside* columns of aliased3, they previously did not
+ # *outside* columns of aliased3, they previously did not
# have an _is_clone_of pointer. But we now modified _make_proxy
# to assign this.
adapter = sql_util.ColumnAdapter(aliased1)
aliased3 = cloned_traverse(aliased2, {}, {})
# also fixed by [ticket:2419]. When we look at the
- # *outside* columns of aliased3, they previously did not
+ # *outside* columns of aliased3, they previously did not
# have an _is_clone_of pointer. But we now modified _make_proxy
# to assign this.
adapter = sql_util.ColumnAdapter(aliased1)
assert sql_util.ClauseAdapter(u).traverse(t1) is u
def test_binds(self):
- """test that unique bindparams change their name upon clone()
+ """test that unique bindparams change their name upon clone()
to prevent conflicts"""
s = select([t1], t1.c.col1==bindparam(None, unique=True)).alias()
s2 = CloningVisitor().traverse(s).alias()
s3 = select([s], s.c.col2==s2.c.col2)
- self.assert_compile(s3,
+ self.assert_compile(s3,
"SELECT anon_1.col1, anon_1.col2, anon_1.col3 FROM "
"(SELECT table1.col1 AS col1, table1.col2 AS col2, "
"table1.col3 AS col3 FROM table1 WHERE table1.col1 = :param_1) "
s = select([t1], t1.c.col1==4).alias()
s2 = CloningVisitor().traverse(s).alias()
s3 = select([s], s.c.col2==s2.c.col2)
- self.assert_compile(s3,
+ self.assert_compile(s3,
"SELECT anon_1.col1, anon_1.col2, anon_1.col3 FROM "
"(SELECT table1.col1 AS col1, table1.col2 AS col2, "
"table1.col3 AS col3 FROM table1 WHERE table1.col1 = :col1_1) "
def test_extract(self):
s = select([extract('foo', t1.c.col1).label('col1')])
- self.assert_compile(s,
+ self.assert_compile(s,
"SELECT EXTRACT(foo FROM table1.col1) AS col1 FROM table1")
s2 = CloningVisitor().traverse(s).alias()
s3 = select([s2.c.col1])
- self.assert_compile(s,
+ self.assert_compile(s,
"SELECT EXTRACT(foo FROM table1.col1) AS col1 FROM table1")
- self.assert_compile(s3,
+ self.assert_compile(s3,
"SELECT anon_1.col1 FROM (SELECT EXTRACT(foo FROM "
"table1.col1) AS col1 FROM table1) AS anon_1")
@testing.emits_warning('.*replaced by another column with the same key')
def test_alias(self):
subq = t2.select().alias('subq')
- s = select([t1.c.col1, subq.c.col1],
- from_obj=[t1, subq,
+ s = select([t1.c.col1, subq.c.col1],
+ from_obj=[t1, subq,
t1.join(subq, t1.c.col1==subq.c.col2)]
)
orig = str(s)
assert orig == str(s) == str(s3) == str(s4)
subq = subq.alias('subq')
- s = select([t1.c.col1, subq.c.col1],
- from_obj=[t1, subq,
+ s = select([t1.c.col1, subq.c.col1],
+ from_obj=[t1, subq,
t1.join(subq, t1.c.col1==subq.c.col2)]
)
s5 = CloningVisitor().traverse(s)
assert orig == str(s) == str(s5)
def test_correlated_select(self):
- s = select(['*'], t1.c.col1==t2.c.col1,
+ s = select(['*'], t1.c.col1==t2.c.col1,
from_obj=[t1, t2]).correlate(t2)
class Vis(CloningVisitor):
def visit_select(self, select):
select.append_whereclause(t1.c.col2==7)
- self.assert_compile(Vis().traverse(s),
+ self.assert_compile(Vis().traverse(s),
"SELECT * FROM table1 WHERE table1.col1 = table2.col1 "
"AND table1.col2 = :col2_1")
m = MetaData()
a=Table( 'a',m,
Column( 'id', Integer, primary_key=True),
- Column( 'xxx_id', Integer,
- ForeignKey( 'a.id', name='adf',use_alter=True )
+ Column( 'xxx_id', Integer,
+ ForeignKey( 'a.id', name='adf',use_alter=True )
)
)
alias = select([a]).select_from(a.join(b, a.c.x==b.c.x)).alias()
- # two levels of indirection from c.x->b.x->a.x, requires recursive
+ # two levels of indirection from c.x->b.x->a.x, requires recursive
# corresponding_column call
adapt = sql_util.ClauseAdapter(alias,
equivalents={b.c.x: set([a.c.x]), c.c.x: set([b.c.x])})
assert_raises(
exc.ArgumentError,
- select().execution_options,
+ select().execution_options,
isolation_level='READ_COMMITTED'
)
def define_tables(cls, metadata):
table1 = Table("some_large_named_table", metadata,
Column("this_is_the_primarykey_column", Integer,
- primary_key=True,
+ primary_key=True,
test_needs_autoincrement=True),
Column("this_is_the_data_column", String(30))
)
table2 = Table("table_with_exactly_29_characs", metadata,
Column("this_is_the_primarykey_column", Integer,
- primary_key=True,
+ primary_key=True,
test_needs_autoincrement=True),
Column("this_is_the_data_column", String(30))
)
table1 = cls.tables.table1
table2 = cls.tables.table2
for data in [
- {"this_is_the_primarykey_column":1,
+ {"this_is_the_primarykey_column":1,
"this_is_the_data_column":"data1"},
- {"this_is_the_primarykey_column":2,
+ {"this_is_the_primarykey_column":2,
"this_is_the_data_column":"data2"},
- {"this_is_the_primarykey_column":3,
+ {"this_is_the_primarykey_column":3,
"this_is_the_data_column":"data3"},
- {"this_is_the_primarykey_column":4,
+ {"this_is_the_primarykey_column":4,
"this_is_the_data_column":"data4"}
]:
testing.db.execute(
)
testing.db.execute(
table2.insert(),
- {"this_is_the_primary_key_column":1,
+ {"this_is_the_primary_key_column":1,
"this_is_the_data_column":"data"}
)
def test_too_long_name_disallowed(self):
m = MetaData(testing.db)
- t1 = Table("this_name_is_too_long_for_what_were_doing_in_this_test",
+ t1 = Table("this_name_is_too_long_for_what_were_doing_in_this_test",
m, Column('foo', Integer))
assert_raises(exceptions.IdentifierError, m.create_all)
assert_raises(exceptions.IdentifierError, m.drop_all)
def test_basic_result(self):
table1 = self.tables.table1
- s = table1.select(use_labels=True,
+ s = table1.select(use_labels=True,
order_by=[table1.c.this_is_the_primarykey_column])
result = [
- (row[table1.c.this_is_the_primarykey_column],
+ (row[table1.c.this_is_the_primarykey_column],
row[table1.c.this_is_the_data_column])
for row in testing.db.execute(s)
]
def test_result_limit(self):
table1 = self.tables.table1
- # some dialects such as oracle (and possibly ms-sql
+ # some dialects such as oracle (and possibly ms-sql
# in a future version)
# generate a subquery for limits/offsets.
- # ensure that the generated result map corresponds
+ # ensure that the generated result map corresponds
# to the selected table, not
# the select query
- s = table1.select(use_labels=True,
+ s = table1.select(use_labels=True,
order_by=[table1.c.this_is_the_primarykey_column]).\
limit(2)
result = [
- (row[table1.c.this_is_the_primarykey_column],
+ (row[table1.c.this_is_the_primarykey_column],
row[table1.c.this_is_the_data_column])
for row in testing.db.execute(s)
]
@testing.requires.offset
def test_result_limit_offset(self):
table1 = self.tables.table1
- s = table1.select(use_labels=True,
+ s = table1.select(use_labels=True,
order_by=[table1.c.this_is_the_primarykey_column]).\
limit(2).offset(1)
result = [
- (row[table1.c.this_is_the_primarykey_column],
+ (row[table1.c.this_is_the_primarykey_column],
row[table1.c.this_is_the_data_column])
for row in testing.db.execute(s)
]
dialect.max_identifier_length = IDENT_LENGTH
self.assert_compile(
select([table1, ta]).select_from(
- table1.join(ta,
+ table1.join(ta,
table1.c.this_is_the_data_column==
ta.c.this_is_the_data_column)).\
where(ta.c.this_is_the_data_column=='data3'),
@testing.provide_metadata
def test_insert_no_pk(self):
t = Table("some_other_large_named_table", self.metadata,
- Column("this_is_the_primarykey_column", Integer,
- Sequence("this_is_some_large_seq"),
+ Column("this_is_the_primarykey_column", Integer,
+ Sequence("this_is_some_large_seq"),
primary_key=True),
Column("this_is_the_data_column", String(30))
)
t.create(testing.db, checkfirst=True)
- testing.db.execute(t.insert(),
+ testing.db.execute(t.insert(),
**{"this_is_the_data_column":"data1"})
@testing.requires.subqueries
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
x = select([q], use_labels=True)
- self.assert_compile(x,
+ self.assert_compile(x,
"SELECT anon_1.this_is_the_primarykey_column AS "
"anon_1_this_is_the_prim_1, anon_1.this_is_the_data_column "
"AS anon_1_this_is_the_data_2 "
"AS this_is_the_data_column "
"FROM some_large_named_table "
"WHERE some_large_named_table.this_is_the_primarykey_column "
- "= :this_is_the_primarykey__1) AS anon_1",
+ "= :this_is_the_primarykey__1) AS anon_1",
dialect=compile_dialect)
eq_(
x = select([q])
compile_dialect = default.DefaultDialect(label_length=10)
- self.assert_compile(x,
+ self.assert_compile(x,
"SELECT foo.this_1, foo.this_2 FROM "
"(SELECT some_large_named_table."
"this_is_the_primarykey_column AS this_1, "
"some_large_named_table.this_is_the_data_column AS this_2 "
"FROM some_large_named_table WHERE "
- "some_large_named_table.this_is_the_primarykey_column = :this_1) AS foo",
+ "some_large_named_table.this_is_the_primarykey_column = :this_1) AS foo",
dialect=compile_dialect)
compile_dialect = default.DefaultDialect(label_length=4)
"(SELECT some_large_named_table.this_is_the_primarykey_column "
"AS _1, some_large_named_table.this_is_the_data_column AS _2 "
"FROM some_large_named_table WHERE "
- "some_large_named_table.this_is_the_primarykey_column = :_1) AS foo",
+ "some_large_named_table.this_is_the_primarykey_column = :_1) AS foo",
dialect=compile_dialect)
q = table1.select(table1.c.this_is_the_primarykey_column == 4).alias()
x = select([q], use_labels=True)
compile_dialect = default.DefaultDialect(label_length=10)
- self.assert_compile(x,
+ self.assert_compile(x,
"SELECT anon_1.this_2 AS anon_1, anon_1.this_4 AS anon_3 FROM "
"(SELECT some_large_named_table.this_is_the_primarykey_column "
"AS this_2, some_large_named_table.this_is_the_data_column AS this_4 "
"FROM some_large_named_table WHERE "
- "some_large_named_table.this_is_the_primarykey_column = :this_1) AS anon_1",
+ "some_large_named_table.this_is_the_primarykey_column = :this_1) AS anon_1",
dialect=compile_dialect)
compile_dialect = default.DefaultDialect(label_length=4)
"(SELECT some_large_named_table.this_is_the_primarykey_column "
"AS _2, some_large_named_table.this_is_the_data_column AS _4 "
"FROM some_large_named_table WHERE "
- "some_large_named_table.this_is_the_primarykey_column = :_1) AS _1",
+ "some_large_named_table.this_is_the_primarykey_column = :_1) AS _1",
dialect=compile_dialect)
def test_adjustable_result_schema_column(self):
def test_adjustable_result_lightweight_column(self):
- table1 = table("some_large_named_table",
+ table1 = table("some_large_named_table",
column("this_is_the_primarykey_column"),
column("this_is_the_data_column")
)
class MetaDataTest(fixtures.TestBase, ComparesTables):
def test_metadata_connect(self):
metadata = MetaData()
- t1 = Table('table1', metadata,
+ t1 = Table('table1', metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(20)))
metadata.bind = testing.db
Column('bar', Integer(), info={'foo':'bar'}),
]:
c2 = col.copy()
- for attr in ('name', 'type', 'nullable',
+ for attr in ('name', 'type', 'nullable',
'primary_key', 'key', 'unique', 'info',
'doc'):
eq_(getattr(col, attr), getattr(c2, attr))
@testing.provide_metadata
def test_dupe_tables(self):
metadata = self.metadata
- t1 = Table('table1', metadata,
+ t1 = Table('table1', metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(20)))
metadata.create_all()
t1 = Table('table1', metadata, autoload=True)
def go():
- t2 = Table('table1', metadata,
+ t2 = Table('table1', metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(20)))
assert_raises_message(
m = MetaData()
t1 = Table('t', m, c1, c2)
- kw = dict(onupdate="X",
+ kw = dict(onupdate="X",
ondelete="Y", use_alter=True, name='f1',
deferrable="Z", initially="Q", link_to_name=True)
- fk1 = ForeignKey(c1, **kw)
+ fk1 = ForeignKey(c1, **kw)
fk2 = ForeignKeyConstraint((c1,), (c2,), **kw)
t1.append_constraint(fk2)
def test_check_constraint_copy(self):
r = lambda x: x
- c = CheckConstraint("foo bar",
- name='name',
- initially=True,
- deferrable=True,
+ c = CheckConstraint("foo bar",
+ name='name',
+ initially=True,
+ deferrable=True,
_create_rule = r)
c2 = c.copy()
eq_(c2.name, 'name')
table2 = Table('othertable', meta,
Column('id', Integer, Sequence('foo_seq'), primary_key=True),
- Column('myid', Integer,
+ Column('myid', Integer,
ForeignKey('mytable.myid'),
),
test_needs_fk=True,
m2 = pickle.loads(pickle.dumps(m1))
s2 = Sequence("x_seq")
- t2 = Table('a', m2,
+ t2 = Table('a', m2,
Column('id',Integer,primary_key=True),
Column('x', Integer, s2),
extend_existing=True)
m1 = MetaData()
s1 = Sequence("x_seq")
- t = Table('a', m1,
+ t = Table('a', m1,
Column('x', Integer, s1)
)
assert m1._sequences['x_seq'] is s1
table_c = table.tometadata(meta2, schema=None)
table2_c = table2.tometadata(meta2, schema=None)
- eq_(str(table_c.join(table2_c).onclause),
+ eq_(str(table_c.join(table2_c).onclause),
str(table_c.c.myid == table2_c.c.myid))
- eq_(str(table_c.join(table2_c).onclause),
+ eq_(str(table_c.join(table2_c).onclause),
"someschema.mytable.myid = someschema.othertable.myid")
def test_tometadata_strip_schema(self):
ck = schema.CheckConstraint("x > y", name="someconstraint")
for const, exp in (
- (Sequence("my_seq"),
+ (Sequence("my_seq"),
"Sequence('my_seq')"),
- (Sequence("my_seq", start=5),
+ (Sequence("my_seq", start=5),
"Sequence('my_seq', start=5)"),
- (Column("foo", Integer),
+ (Column("foo", Integer),
"Column('foo', Integer(), table=None)"),
- (Table("bar", MetaData(), Column("x", String)),
+ (Table("bar", MetaData(), Column("x", String)),
"Table('bar', MetaData(bind=None), "
"Column('x', String(), table=<bar>), schema=None)"),
- (schema.DefaultGenerator(for_update=True),
+ (schema.DefaultGenerator(for_update=True),
"DefaultGenerator(for_update=True)"),
(schema.Index("bar", "c"), "Index('bar')"),
(i1, "Index('bar', Column('x', Integer(), table=<foo>))"),
(schema.FetchedValue(), "FetchedValue()"),
- (ck,
+ (ck,
"CheckConstraint("
"%s"
", name='someconstraint')" % repr(ck.sqltext)),
prefixes = ["TEMPORARY"])
self.assert_compile(
- schema.CreateTable(table1),
+ schema.CreateTable(table1),
"CREATE TEMPORARY TABLE temporary_table_1 (col1 INTEGER)"
)
Column("col1", Integer),
prefixes = ["VIRTUAL"])
self.assert_compile(
- schema.CreateTable(table2),
+ schema.CreateTable(table2),
"CREATE VIRTUAL TABLE temporary_table_2 (col1 INTEGER)"
)
def test_default_schema_metadata_fk_alt_remote(self):
m = MetaData(schema="foo")
t1 = Table('t1', m, Column('x', Integer))
- t2 = Table('t2', m, Column('x', Integer, ForeignKey('t1.x')),
+ t2 = Table('t2', m, Column('x', Integer, ForeignKey('t1.x')),
schema="bar")
assert t2.c.x.references(t1.c.x)
class UseExistingTest(fixtures.TablesTest):
@classmethod
def define_tables(cls, metadata):
- Table('users', metadata,
- Column('id', Integer, primary_key=True),
+ Table('users', metadata,
+ Column('id', Integer, primary_key=True),
Column('name', String(30)))
def _useexisting_fixture(self):
meta2 = self._useexisting_fixture()
assert_raises(
exc.ArgumentError,
- Table, 'users', meta2, keep_existing=True,
+ Table, 'users', meta2, keep_existing=True,
extend_existing=True
)
meta2 = self._useexisting_fixture()
assert_raises(
exc.ArgumentError,
- Table, 'users', meta2, useexisting=True,
+ Table, 'users', meta2, useexisting=True,
extend_existing=True
)
def test_keep_existing_no_dupe_constraints(self):
meta2 = self._notexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('id', Integer),
Column('name', Unicode),
UniqueConstraint('name'),
assert 'id' in users.c
eq_(len(users.constraints), 2)
- u2 = Table('users', meta2,
+ u2 = Table('users', meta2,
Column('id', Integer),
Column('name', Unicode),
UniqueConstraint('name'),
def test_extend_existing_dupes_constraints(self):
meta2 = self._notexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('id', Integer),
Column('name', Unicode),
UniqueConstraint('name'),
assert 'id' in users.c
eq_(len(users.constraints), 2)
- u2 = Table('users', meta2,
+ u2 = Table('users', meta2,
Column('id', Integer),
Column('name', Unicode),
UniqueConstraint('name'),
def test_keep_existing_add_column(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('foo', Integer),
autoload=True,
keep_existing=True)
def test_keep_existing_quote_no_orig(self):
meta2 = self._notexisting_fixture()
- users = Table('users', meta2, quote=True,
+ users = Table('users', meta2, quote=True,
autoload=True,
keep_existing=True)
assert users.quote
def test_keep_existing_add_column_no_orig(self):
meta2 = self._notexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('foo', Integer),
autoload=True,
keep_existing=True)
def test_keep_existing_quote_no_reflection(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2, quote=True,
+ users = Table('users', meta2, quote=True,
keep_existing=True)
assert not users.quote
def test_keep_existing_add_column_no_reflection(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('foo', Integer),
keep_existing=True)
assert "foo" not in users.c
def test_extend_existing_add_column(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('foo', Integer),
autoload=True,
extend_existing=True)
def test_extend_existing_quote_no_orig(self):
meta2 = self._notexisting_fixture()
- users = Table('users', meta2, quote=True,
+ users = Table('users', meta2, quote=True,
autoload=True,
extend_existing=True)
assert users.quote
def test_extend_existing_add_column_no_orig(self):
meta2 = self._notexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('foo', Integer),
autoload=True,
extend_existing=True)
def test_extend_existing_quote_no_reflection(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2, quote=True,
+ users = Table('users', meta2, quote=True,
extend_existing=True)
assert users.quote
def test_extend_existing_add_column_no_reflection(self):
meta2 = self._useexisting_fixture()
- users = Table('users', meta2,
+ users = Table('users', meta2,
Column('foo', Integer),
extend_existing=True)
assert "foo" in users.c
def _single_fixture(self):
m = MetaData()
- t1 = Table('t1', m,
+ t1 = Table('t1', m,
Column('a', Integer),
Column('b', Integer)
)
- t2 = Table('t2', m,
+ t2 = Table('t2', m,
Column('a', Integer, ForeignKey('t1.a'))
)
- t3 = Table('t3', m,
+ t3 = Table('t3', m,
Column('a', Integer)
)
return t1, t2, t3
c = Column(Integer)
assert_raises_message(
- exc.ArgumentError,
+ exc.ArgumentError,
"Column must be constructed with a non-blank name or assign a "
"non-blank .name ",
Table, 't', MetaData(), c)
c = Column('', Integer)
assert_raises_message(
- exc.ArgumentError,
+ exc.ArgumentError,
"Column must be constructed with a non-blank name or assign a "
"non-blank .name ",
Table, 't', MetaData(), c)
t = Table('t', MetaData(), c)
assert_raises_message(
- exc.ArgumentError,
+ exc.ArgumentError,
"Column object already assigned to Table 't'",
Table, 'q', MetaData(), c)
event.listen(schema.SchemaItem, "after_parent_attach", after_attach)
m = MetaData()
- t1 = Table('t1', m,
+ t1 = Table('t1', m,
Column('id', Integer, Sequence('foo_id'), primary_key=True),
Column('bar', String, ForeignKey('t2.id'))
)
evt(target)
m = MetaData()
- t1 = Table('t1', m,
+ t1 = Table('t1', m,
Column('id', Integer, Sequence('foo_id'), primary_key=True),
Column('bar', String, ForeignKey('t2.id')),
Column('bat', Integer, unique=True),
eq_(
canary,
[
- 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1',
+ 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t1',
'ForeignKeyConstraint->Table', 'ForeignKeyConstraint->t1',
'UniqueConstraint->Table', 'UniqueConstraint->t1',
- 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2',
+ 'PrimaryKeyConstraint->Table', 'PrimaryKeyConstraint->t2',
'CheckConstraint->Table', 'CheckConstraint->t2',
'UniqueConstraint->Table', 'UniqueConstraint->t2'
]
def test_insert_heterogeneous_params(self):
"""test that executemany parameters are asserted to match the parameter set of the first."""
- assert_raises_message(exc.StatementError,
+ assert_raises_message(exc.StatementError,
r"A value is required for bind parameter 'user_name', in "
"parameter group 2 \(original cause: (sqlalchemy.exc.)?InvalidRequestError: A "
"value is required for bind parameter 'user_name', in "
{'user_id':9}
)
- # this succeeds however. We aren't yet doing
+ # this succeeds however. We aren't yet doing
# a length check on all subsequent parameters.
users.insert().execute(
{'user_id':7},
ret[col.key] = id
if result.lastrow_has_defaults():
- criterion = and_(*[col==id for col, id in
+ criterion = and_(*[col==id for col, id in
zip(table.primary_key, result.inserted_primary_key)])
row = engine.execute(table.select(criterion)).first()
for c in table.c:
for engine in test_engines:
- r = engine.execute(users.insert(),
+ r = engine.execute(users.insert(),
{'user_name':'jack'},
)
assert r.closed
content = Table('content', self.metadata,
Column('type', String(30)),
)
- bar = Table('bar', self.metadata,
+ bar = Table('bar', self.metadata,
Column('content_type', String(30))
)
self.metadata.create_all(testing.db)
result = util.pickle.loads(util.pickle.dumps(result))
eq_(
- result,
+ result,
[(7, "jack"), (8, "ed"), (9, "fred")]
)
if use_labels:
if not pickle or use_labels:
assert_raises(exc.NoSuchColumnError, lambda: result[0][addresses.c.user_id])
else:
- # test with a different table. name resolution is
+ # test with a different table. name resolution is
# causing 'user_id' to match when use_labels wasn't used.
eq_(result[0][addresses.c.user_id], 7)
(unprintable(), "unprintable element.*"),
]:
assert_raises_message(
- exc.NoSuchColumnError,
+ exc.NoSuchColumnError,
msg % repl,
lambda: row[accessor]
)
dict(user_id=1, user_name='john'),
)
- # test a little sqlite weirdness - with the UNION,
+ # test a little sqlite weirdness - with the UNION,
# cols come back as "query_users.user_id" in cursor.description
r = text("select query_users.user_id, query_users.user_name from query_users "
"UNION select query_users.user_id, query_users.user_name from query_users",
)
# test using literal tablename.colname
r = text('select query_users.user_id AS "query_users.user_id", '
- 'query_users.user_name AS "query_users.user_name" from query_users',
+ 'query_users.user_name AS "query_users.user_name" from query_users',
bind=testing.db).execution_options(sqlite_raw_colnames=True).execute().first()
eq_(r['query_users.user_id'], 1)
eq_(r['query_users.user_name'], "john")
)
shadowed.create(checkfirst=True)
try:
- shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light',
- row='Without light there is no shadow',
- _parent='Hidden parent',
+ shadowed.insert().execute(shadow_id=1, shadow_name='The Shadow', parent='The Light',
+ row='Without light there is no shadow',
+ _parent='Hidden parent',
_row='Hidden row')
r = shadowed.select(shadowed.c.shadow_id==1).execute().first()
self.assert_(r.shadow_id == r['shadow_id'] == r[shadowed.c.shadow_id] == 1)
@testing.fails_on('firebird', "uses sql-92 rules")
@testing.fails_on('sybase', "uses sql-92 rules")
@testing.fails_on('mssql+mxodbc', "uses sql-92 rules")
- @testing.fails_if(lambda:
+ @testing.fails_if(lambda:
testing.against('mssql+pyodbc') and not testing.db.dialect.freetds,
"uses sql-92 rules")
def test_bind_in(self):
@testing.emits_warning('.*empty sequence.*')
@testing.requires.boolean_col_expressions
def test_in_filtering_advanced(self):
- """test the behavior of the in_() function when
+ """test the behavior of the in_() function when
comparing against an empty collection, specifically
that a proper boolean value is generated.
class PercentSchemaNamesTest(fixtures.TestBase):
"""tests using percent signs, spaces in table and column names.
- Doesn't pass for mysql, postgresql, but this is really a
+ Doesn't pass for mysql, postgresql, but this is really a
SQLAlchemy bug - we should be escaping out %% signs for this
operation the same way we do for text() and column labels.
Column("percent%", Integer),
Column("spaces % more spaces", Integer),
)
- lightweight_percent_table = sql.table('percent%table',
+ lightweight_percent_table = sql.table('percent%table',
sql.column("percent%"),
sql.column("spaces % more spaces"),
)
def teardown_class(cls):
metadata.drop_all()
- @testing.skip_if(lambda: testing.against('postgresql'),
+ @testing.skip_if(lambda: testing.against('postgresql'),
"psycopg2 2.4 no longer accepts % in bind placeholders")
def test_single_roundtrip(self):
percent_table.insert().execute(
)
self._assert_table()
- @testing.skip_if(lambda: testing.against('postgresql'),
+ @testing.skip_if(lambda: testing.against('postgresql'),
"psycopg2 2.4 no longer accepts % in bind placeholders")
@testing.crashes('mysql+mysqldb', "MySQLdb handles executemany() "
"inconsistently vs. execute()")
def _assert_table(self):
for table in (
- percent_table,
- percent_table.alias(),
- lightweight_percent_table,
+ percent_table,
+ percent_table.alias(),
+ lightweight_percent_table,
lightweight_percent_table.alias()):
eq_(
list(
content = Table('content', metadata,
Column('t', String(30), key="type"),
)
- bar = Table('bar', metadata,
+ bar = Table('bar', metadata,
Column('ctype', String(30), key="content_type")
)
eq_(found2, wanted)
def test_union_all_lightweight(self):
- """like test_union_all, but breaks the sub-union into
+ """like test_union_all, but breaks the sub-union into
a subquery with an explicit column reference on the outside,
more palatable to a wider variety of engines.
self.assert_compile(t1.select().apply_labels(), '''SELECT "foo"."t1"."col1" AS "foo_t1_col1" FROM "foo"."t1"''')
a = t1.select().alias('anon')
b = select([1], a.c.col1==2, from_obj=a)
- self.assert_compile(b,
+ self.assert_compile(b,
'''SELECT 1 FROM (SELECT "foo"."t1"."col1" AS "col1" FROM '''\
'''"foo"."t1") AS anon WHERE anon."col1" = :col1_1'''
)
Column('ColumnOne', Integer, quote=False), quote=False, schema="FooBar", quote_schema=False)
self.assert_compile(t1.select(), "SELECT FooBar.TableOne.ColumnOne FROM FooBar.TableOne")
- self.assert_compile(t1.select().apply_labels(),
+ self.assert_compile(t1.select().apply_labels(),
"SELECT FooBar.TableOne.ColumnOne AS "\
- "FooBar_TableOne_ColumnOne FROM FooBar.TableOne" # TODO: is this what we really want here ? what if table/schema
+ "FooBar_TableOne_ColumnOne FROM FooBar.TableOne" # TODO: is this what we really want here ? what if table/schema
# *are* quoted?
)
a = t1.select().alias('anon')
b = select([1], a.c.ColumnOne==2, from_obj=a)
- self.assert_compile(b,
+ self.assert_compile(b,
"SELECT 1 FROM (SELECT FooBar.TableOne.ColumnOne AS "\
"ColumnOne FROM FooBar.TableOne) AS anon WHERE anon.ColumnOne = :ColumnOne_1"
)
if labels arent quoted, a query in postgresql in particular will fail since it produces:
SELECT LaLa.lowercase, LaLa."UPPERCASE", LaLa."MixedCase", LaLa."ASC"
- FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase,
- "WorstCase1"."UPPERCASE" AS UPPERCASE,
+ FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase,
+ "WorstCase1"."UPPERCASE" AS UPPERCASE,
"WorstCase1"."MixedCase" AS MixedCase, "WorstCase1"."ASC" AS ASC \nFROM "WorstCase1") AS LaLa
where the "UPPERCASE" column of "LaLa" doesnt exist.
Column("order", Integer))
x = select([table.c.col1, table.c['from'], table.c.louisville, table.c.order])
- self.assert_compile(x,
+ self.assert_compile(x,
'''SELECT "ImATable".col1, "ImATable"."from", "ImATable".louisville, "ImATable"."order" FROM "ImATable"''')
metadata = MetaData(testing.db)
employees_table = Table('employees', metadata,
- Column('employee_id', Integer,
- Sequence('employee_id_seq', optional=True),
+ Column('employee_id', Integer,
+ Sequence('employee_id_seq', optional=True),
primary_key=True),
Column('name', String(50)),
Column('department', String(1)),
]
def _all_dialects(self):
- return [d.base.dialect() for d in
+ return [d.base.dialect() for d in
self._all_dialect_modules()]
def _types_for_mod(self, mod):
self.assert_compile(t, "VARCHAR(50)", dialect=sl)
self.assert_compile(t, "FLOAT", dialect=pg)
eq_(
- t.dialect_impl(dialect=sl).impl.__class__,
+ t.dialect_impl(dialect=sl).impl.__class__,
String().dialect_impl(dialect=sl).__class__
)
eq_(
- t.dialect_impl(dialect=pg).impl.__class__,
+ t.dialect_impl(dialect=pg).impl.__class__,
Float().dialect_impl(pg).__class__
)
('oracle','cx_oracle'),
)), \
"name: %s driver %s returns_unicode_strings=%s" % \
- (testing.db.name,
- testing.db.driver,
+ (testing.db.name,
+ testing.db.driver,
testing.db.dialect.returns_unicode_strings)
def test_round_trip(self):
eq_(uni(unicodedata), unicodedata.encode('utf-8'))
- # using convert unicode at engine level -
+ # using convert unicode at engine level -
# this should not be raising a warning
unicode_engine = engines.utf8_engine(options={'convert_unicode':True,})
unicode_engine.dialect.supports_unicode_binds = False
engine = engines.testing_engine(options={'encoding':'ascii'})
m.create_all(engine)
try:
- # insert a row that should be ascii and
+ # insert a row that should be ascii and
# coerce from unicode with ignore on the bind side
engine.execute(
table.insert(),
# one row will be ascii with ignores,
# the other will be either ascii with the ignores
- # or just the straight unicode+ utf8 value if the
+ # or just the straight unicode+ utf8 value if the
# dialect just returns unicode
result = engine.execute(table.select().order_by(table.c.sort))
ascii_row = result.fetchone()
def teardown_class(cls):
metadata.drop_all()
- @testing.fails_on('postgresql+zxjdbc',
+ @testing.fails_on('postgresql+zxjdbc',
'zxjdbc fails on ENUM: column "XXX" is of type XXX '
'but expression is of type character varying')
- @testing.fails_on('postgresql+pg8000',
+ @testing.fails_on('postgresql+pg8000',
'zxjdbc fails on ENUM: column "XXX" is of type XXX '
'but expression is of type text')
def test_round_trip(self):
])
eq_(
- enum_table.select().order_by(enum_table.c.id).execute().fetchall(),
+ enum_table.select().order_by(enum_table.c.id).execute().fetchall(),
[
(1, 'two'),
(2, 'two'),
eq_(
non_native_enum_table.select().
- order_by(non_native_enum_table.c.id).execute().fetchall(),
+ order_by(non_native_enum_table.c.id).execute().fetchall(),
[
(1, 'two'),
(2, 'two'),
eq_(e1.adapt(ENUM).name, 'foo')
eq_(e1.adapt(ENUM).schema, 'bar')
- @testing.crashes('mysql',
+ @testing.crashes('mysql',
'Inconsistent behavior across various OS/drivers'
)
def test_constraint(self):
- assert_raises(exc.DBAPIError,
+ assert_raises(exc.DBAPIError,
enum_table.insert().execute,
{'id':4, 'someenum':'four'}
)
- @testing.fails_on('mysql',
+ @testing.fails_on('mysql',
"the CHECK constraint doesn't raise an exception for unknown reason")
def test_non_native_constraint(self):
- assert_raises(exc.DBAPIError,
+ assert_raises(exc.DBAPIError,
non_native_enum_table.insert().execute,
{'id':4, 'someenum':'four'}
)
Column('x', Enum("x", "y", name="pge"))
)
t.create(e, checkfirst=False)
- # basically looking for the start of
+ # basically looking for the start of
# the constraint, or the ENUM def itself,
# depending on backend.
assert "('x'," in e.print_sql()
stream1 =self.load_stream('binary_data_one.dat')
stream2 =self.load_stream('binary_data_two.dat')
binary_table.insert().execute(
- primary_id=1,
- misc='binary_data_one.dat',
- data=stream1,
- data_slice=stream1[0:100],
- pickled=testobj1,
+ primary_id=1,
+ misc='binary_data_one.dat',
+ data=stream1,
+ data_slice=stream1[0:100],
+ pickled=testobj1,
mypickle=testobj3)
binary_table.insert().execute(
- primary_id=2,
- misc='binary_data_two.dat',
- data=stream2,
- data_slice=stream2[0:99],
+ primary_id=2,
+ misc='binary_data_two.dat',
+ data=stream2,
+ data_slice=stream2[0:99],
pickled=testobj2)
binary_table.insert().execute(
- primary_id=3,
- misc='binary_data_two.dat',
- data=None,
- data_slice=stream2[0:99],
+ primary_id=3,
+ misc='binary_data_two.dat',
+ data=None,
+ data_slice=stream2[0:99],
pickled=None)
for stmt in (
binary_table.select(order_by=binary_table.c.primary_id),
text(
- "select * from binary_table order by binary_table.primary_id",
- typemap={'pickled':PickleType,
- 'mypickle':MyPickleType,
- 'data':LargeBinary, 'data_slice':LargeBinary},
+ "select * from binary_table order by binary_table.primary_id",
+ typemap={'pickled':PickleType,
+ 'mypickle':MyPickleType,
+ 'data':LargeBinary, 'data_slice':LargeBinary},
bind=testing.db)
):
l = stmt.execute().fetchall()
meta.create_all()
test_table.insert().execute({
- 'id':1,
- 'data':'somedata',
- 'atimestamp':datetime.date(2007, 10, 15),
+ 'id':1,
+ 'data':'somedata',
+ 'atimestamp':datetime.date(2007, 10, 15),
'avalue':25, 'bvalue':'foo'})
@classmethod
eq_(
testing.db.execute(
select([test_table.c.id, test_table.c.data, test_table.c.atimestamp])
- .where(expr),
+ .where(expr),
{"thedate":datetime.date(2007, 10, 15)}).fetchall(),
[(1, 'somedata', datetime.date(2007, 10, 15))]
)
eq_(expr.right.type._type_affinity, String)
eq_(
- testing.db.execute(test_table.select().where(expr),
+ testing.db.execute(test_table.select().where(expr),
{"somevalue":"foo"}).fetchall(),
- [(1, 'somedata',
+ [(1, 'somedata',
datetime.date(2007, 10, 15), 25, 'BIND_INfooBIND_OUT')]
)
def test_null_comparison(self):
eq_(
- str(column('a', types.NullType()) + column('b', types.NullType())),
+ str(column('a', types.NullType()) + column('b', types.NullType())),
"a + b"
)
self._do_test(
Numeric(precision=8, scale=4),
[15.7563, decimal.Decimal("15.7563"), None],
- [decimal.Decimal("15.7563"), None],
+ [decimal.Decimal("15.7563"), None],
)
def test_numeric_as_float(self):
self._do_test(
Float(precision=8, asdecimal=True),
[15.7563, decimal.Decimal("15.7563"), None],
- [decimal.Decimal("15.7563"), None],
+ [decimal.Decimal("15.7563"), None],
filter_ = lambda n:n is not None and round(n, 5) or None
)
def test_precision_decimal(self):
numbers = set([
decimal.Decimal("54.234246451650"),
- decimal.Decimal("0.004354"),
- decimal.Decimal("900.0"),
+ decimal.Decimal("0.004354"),
+ decimal.Decimal("900.0"),
])
self._do_test(
def test_enotation_decimal(self):
"""test exceedingly small decimals.
- Decimal reports values with E notation when the exponent
+ Decimal reports values with E notation when the exponent
is greater than 6.
"""
numbers
)
- @testing.fails_on("sybase+pyodbc",
+ @testing.fails_on("sybase+pyodbc",
"Don't know how do get these values through FreeTDS + Sybase")
@testing.fails_on("firebird", "Precision must be from 1 to 18")
def test_enotation_decimal_large(self):
"this may be a bug due to the difficulty in handling "
"oracle precision numerics"
)
- @testing.fails_on('postgresql+pg8000',
+ @testing.fails_on('postgresql+pg8000',
"pg-8000 does native decimal but truncates the decimals.")
def test_numeric_no_decimal(self):
numbers = set([
small_delta = datetime.timedelta(days=15, seconds=5874)
delta = datetime.timedelta(414)
interval_table.insert().execute(
- native_interval=small_delta,
- native_interval_args=delta,
+ native_interval=small_delta,
+ native_interval_args=delta,
non_native_interval=delta
)
row = interval_table.select().execute().first()
res3 = select([bool_table.c.id, bool_table.c.value]).\
order_by(bool_table.c.id).\
execute().fetchall()
- eq_(res3, [(1, True), (2, False),
- (3, True), (4, True),
+ eq_(res3, [(1, True), (2, False),
+ (3, True), (4, True),
(5, True), (6, None)])
# ensure we're getting True/False, not just ints
assert res3[0][1] is True
assert res3[1][1] is False
- @testing.fails_on('mysql',
+ @testing.fails_on('mysql',
"The CHECK clause is parsed but ignored by all storage engines.")
- @testing.fails_on('mssql',
+ @testing.fails_on('mssql',
"FIXME: MS-SQL 2005 doesn't honor CHECK ?!?")
@testing.skip_if(lambda: testing.db.dialect.supports_native_boolean)
def test_constraint(self):
assert_raises((exc.IntegrityError, exc.ProgrammingError),
- testing.db.execute,
+ testing.db.execute,
"insert into booltest (id, value) values(1, 5)")
@testing.skip_if(lambda: testing.db.dialect.supports_native_boolean)
):
assert p1.compare_values(p1.copy_value(obj), obj)
- assert_raises(NotImplementedError,
+ assert_raises(NotImplementedError,
p1.compare_values,
pickleable.BrokenComparable('foo'),
pickleable.BrokenComparable('foo'))
select([column(u'special_col')]).select_from(t1).execute().close()
assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), unicode)
- # now execute, run the sequence. it should run in u"Special_col.nextid" or similar as
+ # now execute, run the sequence. it should run in u"Special_col.nextid" or similar as
# a unicode object; cx_oracle asserts that this is None or a String (postgresql lets it pass thru).
# ensure that executioncontext._exec_default() is encoding.
t1.insert().execute(data='foo')
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
)
Table('addresses', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
Column('name', String(30), nullable=False),
)
Table("dingalings", metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('address_id', None, ForeignKey('addresses.id')),
Column('data', String(30)),
"AND addresses.email_address = :email_address_1 "
"AND addresses.id = dingalings.address_id AND "
"dingalings.id = :id_1",
- checkparams={u'email_address_1': 'e1', u'id_1': 2,
+ checkparams={u'email_address_1': 'e1', u'id_1': 2,
'name': 'newname'}
)
def test_render_subquery(self):
users, addresses = self.tables.users, self.tables.addresses
- subq = select([addresses.c.id,
- addresses.c.user_id,
+ subq = select([addresses.c.id,
+ addresses.c.user_id,
addresses.c.email_address]).\
where(addresses.c.id==7).alias()
self.assert_compile(
"email_address FROM addresses WHERE addresses.id = "
":id_1) AS anon_1 WHERE users.id = anon_1.user_id "
"AND anon_1.email_address = :email_address_1",
- checkparams={u'email_address_1': 'e1',
+ checkparams={u'email_address_1': 'e1',
u'id_1': 7, 'name': 'newname'}
)
testing.db.execute(
addresses.update().\
values({
- addresses.c.email_address:users.c.name,
+ addresses.c.email_address:users.c.name,
users.c.name:'ed2'
}).\
where(users.c.id==addresses.c.user_id).\
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(30), nullable=False),
Column('some_update', String(30), onupdate="im the update")
)
Table('addresses', metadata,
- Column('id', Integer, primary_key=True,
+ Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('user_id', None, ForeignKey('users.id')),
Column('email_address', String(50), nullable=False),
ret = testing.db.execute(
addresses.update().\
values({
- addresses.c.email_address:users.c.name,
+ addresses.c.email_address:users.c.name,
users.c.name:'ed2'
}).\
where(users.c.id==addresses.c.user_id).\
ret = testing.db.execute(
addresses.update().\
values({
- 'email_address':users.c.name,
+ 'email_address':users.c.name,
}).\
where(users.c.id==addresses.c.user_id).\
where(users.c.name=='ed')
(4, 9, "fred@fred.com")
]
)
- # users table not actually updated,
+ # users table not actually updated,
# so no onupdate
eq_(
testing.db.execute(