]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
merged -r 5869:5936 of trunk, including dialect changes re: extract()
authorMike Bayer <mike_mp@zzzcomputing.com>
Sat, 2 May 2009 15:48:32 +0000 (15:48 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Sat, 2 May 2009 15:48:32 +0000 (15:48 +0000)
70 files changed:
CHANGES
doc/build/Makefile
doc/build/dbengine.rst
doc/build/ormtutorial.rst
doc/build/reference/ext/serializer.rst
examples/adjacencytree/basic_tree.py
examples/association/basic_association.py
examples/association/proxied_association.py
examples/collections/large_collection.py
examples/custom_attributes/custom_management.py
examples/custom_attributes/listen_for_events.py
examples/derived_attributes/attributes.py
examples/dynamic_dict/dynamic_dict.py
examples/elementtree/adjacency_list.py
examples/elementtree/optimized_al.py
examples/elementtree/pickle.py
examples/graphs/graph1.py
examples/nested_sets/nested_sets.py
examples/pickle/custom_pickler.py
examples/poly_assoc/poly_assoc.py
examples/poly_assoc/poly_assoc_fk.py
examples/poly_assoc/poly_assoc_generic.py
examples/polymorph/concrete.py
examples/polymorph/polymorph.py
examples/polymorph/single.py
examples/postgis/postgis.py
examples/sharding/attribute_shard.py
examples/vertical/dictlike-polymorphic.py
examples/vertical/dictlike.py
examples/vertical/vertical.py
lib/sqlalchemy/dialects/access/base.py
lib/sqlalchemy/dialects/mssql/base.py
lib/sqlalchemy/dialects/mysql/base.py
lib/sqlalchemy/dialects/postgres/base.py
lib/sqlalchemy/dialects/sqlite/base.py
lib/sqlalchemy/dialects/sybase/base.py
lib/sqlalchemy/ext/declarative.py
lib/sqlalchemy/orm/__init__.py
lib/sqlalchemy/orm/dynamic.py
lib/sqlalchemy/orm/evaluator.py
lib/sqlalchemy/orm/interfaces.py
lib/sqlalchemy/orm/properties.py
lib/sqlalchemy/orm/query.py
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/orm/unitofwork.py
lib/sqlalchemy/schema.py
lib/sqlalchemy/sql/compiler.py
lib/sqlalchemy/sql/expression.py
lib/sqlalchemy/sql/util.py
lib/sqlalchemy/util.py
test/base/utils.py
test/dialect/access.py
test/dialect/mssql.py
test/dialect/mysql.py
test/dialect/postgres.py
test/dialect/sqlite.py
test/dialect/sybase.py
test/ext/declarative.py
test/orm/alltests.py
test/orm/cycles.py
test/orm/dynamic.py
test/orm/eager_relations.py
test/orm/inheritance/polymorph2.py
test/orm/merge.py
test/orm/pickled.py
test/orm/query.py
test/sql/functions.py
test/sql/select.py
test/sql/testtypes.py
test/testlib/requires.py

diff --git a/CHANGES b/CHANGES
index 8aa1709c6e8b9215def8955ebc512fa50f541f1f..c3dc89916f4df464ce8521f9aecdfea972d7e8f1 100644 (file)
--- a/CHANGES
+++ b/CHANGES
@@ -3,33 +3,91 @@
 =======
 CHANGES
 =======
+
 0.5.4
 =====
+
 - orm
-    - Fixed the "set collection" function on "dynamic" relations
-      to initiate events correctly.  Previously a collection
-      could only be assigned to a pending parent instance,
-      otherwise modified events would not be fired correctly.
-      Set collection is now compatible with merge(), 
-      fixes [ticket:1352].
-
-    - Lazy loader will not use get() if the "lazy load"
-      SQL clause matches the clause used by get(), but
-      contains some parameters hardcoded.  Previously
-      the lazy strategy would fail with the get().  Ideally
-      get() would be used with the hardcoded parameters
-      but this would require further development.
+    - Modified query_cls on DynamicAttributeImpl to accept a full
+      mixin version of the AppenderQuery, which allows subclassing
+      the AppenderMixin.
+
+    - Fixed the evaluator not being able to evaluate IS NULL clauses.
+
+    - Fixed the "set collection" function on "dynamic" relations to
+      initiate events correctly.  Previously a collection could only
+      be assigned to a pending parent instance, otherwise modified
+      events would not be fired correctly.  Set collection is now
+      compatible with merge(), fixes [ticket:1352].
+    
+    - Allowed pickling of PropertyOption objects constructed with
+      instrumented descriptors; previously, pickle errors would occur 
+      when pickling an object which was loaded with a descriptor-based 
+      option, such as query.options(eagerload(MyClass.foo)).
+      
+    - Lazy loader will not use get() if the "lazy load" SQL clause
+      matches the clause used by get(), but contains some parameters
+      hardcoded.  Previously the lazy strategy would fail with the
+      get().  Ideally get() would be used with the hardcoded
+      parameters but this would require further development.
       [ticket:1357]
 
+    - Fixed another location where autoflush was interfering
+      with session.merge().  autoflush is disabled completely
+      for the duration of merge() now. [ticket:1360]
+
+    - Fixed bug in relation(), introduced in 0.5.3, 
+      whereby a self referential relation
+      from a base class to a joined-table subclass would 
+      not configure correctly.
+
+    - Fixed documentation for session weak_identity_map - 
+      the default value is True, indicating a weak
+      referencing map in use.
+      
+    - Fixed a unit of work issue whereby the foreign
+      key attribute on an item contained within a collection 
+      owned by an object being deleted would not be set to
+      None if the relation() was self-referential. [ticket:1376]
+
+    - Fixed Query.update() and Query.delete() failures with eagerloaded
+      relations. [ticket:1378]
+
+- schema
+    - Added a quote_schema() method to the IdentifierPreparer class
+      so that dialects can override how schemas get handled. This
+      enables the MSSQL dialect to treat schemas as multipart
+      identifiers, such as 'database.owner'. [ticket: 594, 1341]
+
 - sql
-    - Fixed __repr__() and other _get_colspec() methods on 
+    - ``sqlalchemy.extract()`` is now dialect sensitive and can
+      extract components of timestamps idiomatically across the
+      supported databases, including SQLite.
+
+    - Fixed __repr__() and other _get_colspec() methods on
       ForeignKey constructed from __clause_element__() style
       construct (i.e. declarative columns).  [ticket:1353]
-      
+
 - mssql
+    - Modified how savepoint logic works to prevent it from
+      stepping on non-savepoint oriented routines. Savepoint
+      support is still very experimental.
+
+    - Added in reserved words for MSSQL that covers version 2008
+      and all prior versions. [ticket:1310]
+
     - Corrected problem with information schema not working with a
-      binary collation based database. Cleaned up information
-      schema since it is only used by mssql now. [ticket:1343]
+      binary collation based database. Cleaned up information schema
+      since it is only used by mssql now. [ticket:1343]
+
+- sqlite
+    - Corrected the float type so that it correctly maps to a
+      SLFloat type when being reflected. [ticket:1273]
+
+- extensions
+
+    - Fixed adding of deferred or other column properties to a
+      declarative class. [ticket:1379]
 
 0.5.3
 =====
index 6dcd0321ee953ac3f58c0fea78b8ad1d1c8368e2..f7ac2ca576dd349387c7f3a072660e9a8358a546 100644 (file)
@@ -3,7 +3,7 @@
 
 # You can set these variables from the command line.
 SPHINXOPTS    =
-SPHINXBUILD   = ./bin/sphinx-build
+SPHINXBUILD   = sphinx-build
 PAPER         =
 
 # Internal variables.
index 92e4d3063c766f2ea9a338f35765187af2466208..362526943dbe7892d816254e42299c95bd6f621d 100644 (file)
@@ -92,8 +92,12 @@ Available drivernames are ``sqlite``, ``mysql``, ``postgres``, ``oracle``, ``mss
     # mysql
     mysql_db = create_engine('mysql://localhost/foo')
 
+    # oracle
+    oracle_db = create_engine('oracle://scott:tiger@host:port/dbname?key1=value1&key2=value2')
+
     # oracle via TNS name
-    oracle_db = create_engine('oracle://scott:tiger@dsn')
+    oracle_db = create_engine('oracle://scott:tiger@tnsname')
+    oracle_db = create_engine('oracle://scott:tiger@tnsname/?key1=value1&key2=value2')
 
     # oracle will feed host/port/SID into cx_oracle.makedsn
     oracle_db = create_engine('oracle://scott:tiger@127.0.0.1:1521/sidname')
@@ -102,7 +106,8 @@ Available drivernames are ``sqlite``, ``mysql``, ``postgres``, ``oracle``, ``mss
     mssql_db = create_engine('mssql://username:password@localhost/database')
 
     # mssql via a DSN connection
-    mssql_db = create_engine('mssql://username:password@/?dsn=mydsn') 
+    mssql_db = create_engine('mssql://mydsn')
+    mssql_db = create_engine('mssql://username:password@mydsn')
 
 The :class:`~sqlalchemy.engine.base.Engine` will ask the connection pool for a connection when the ``connect()`` or ``execute()`` methods are called.  The default connection pool, :class:`~sqlalchemy.pool.QueuePool`, as well as the default connection pool used with SQLite, :class:`~sqlalchemy.pool.SingletonThreadPool`, will open connections to the database on an as-needed basis.  As concurrent statements are executed, :class:`~sqlalchemy.pool.QueuePool` will grow its pool of connections to a default size of five, and will allow a default "overflow" of ten.   Since the ``Engine`` is essentially "home base" for the connection pool, it follows that you should keep a single :class:`~sqlalchemy.engine.base.Engine` per database established within an application, rather than creating a new one for each connection.
 
index 1190f3c3365ed64c4b313618e447d06f8958e99c..423c235d245772dd1deba73d41a439b511a8cfff 100644 (file)
@@ -501,9 +501,10 @@ The ``all()``, ``one()``, and ``first()`` methods of ``Query`` immediately issue
 
 .. sourcecode:: python+sql
 
-    {sql}>>> try: #doctest: +NORMALIZE_WHITESPACE
+    {sql}>>> from sqlalchemy.orm.exc import MultipleResultsFound
+    >>> try: #doctest: +NORMALIZE_WHITESPACE
     ...     user = query.one()
-    ... except Exception, e:
+    ... except MultipleResultsFound, e:
     ...     print e
     SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password
     FROM users
@@ -514,9 +515,10 @@ The ``all()``, ``one()``, and ``first()`` methods of ``Query`` immediately issue
 
 .. sourcecode:: python+sql
 
-    {sql}>>> try: #doctest: +NORMALIZE_WHITESPACE
+    {sql}>>> from sqlalchemy.orm.exc import NoResultFound
+    >>> try: #doctest: +NORMALIZE_WHITESPACE
     ...     user = query.filter(User.id == 99).one()
-    ... except Exception, e:
+    ... except NoResultFound, e:
     ...     print e
     SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password
     FROM users
@@ -682,7 +684,7 @@ Querying for Jack, we get just Jack back.  No SQL is yet issued for Jack's addre
      LIMIT 2 OFFSET 0
     ['jack']
 
-    >>> jack
+    {stop}>>> jack
     <User('jack','Jack Bean', 'gjffdd')>
 
 Let's look at the ``addresses`` collection.  Watch the SQL:
@@ -709,14 +711,14 @@ If you want to reduce the number of queries (dramatically, in many cases), we ca
     anon_1.users_fullname AS anon_1_users_fullname, anon_1.users_password AS anon_1_users_password,
     addresses_1.id AS addresses_1_id, addresses_1.email_address AS addresses_1_email_address,
     addresses_1.user_id AS addresses_1_user_id
-        FROM (SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname,
-        users.password AS users_password
-        FROM users WHERE users.name = ?
-         LIMIT 2 OFFSET 0) AS anon_1 LEFT OUTER JOIN addresses AS addresses_1
-         ON anon_1.users_id = addresses_1.user_id ORDER BY addresses_1.id
-        ['jack']
-
-    >>> jack
+    FROM (SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname,
+    users.password AS users_password
+    FROM users WHERE users.name = ?
+    LIMIT 2 OFFSET 0) AS anon_1 LEFT OUTER JOIN addresses AS addresses_1
+    ON anon_1.users_id = addresses_1.user_id ORDER BY addresses_1.id
+    ['jack']
+
+    {stop}>>> jack
     <User('jack','Jack Bean', 'gjffdd')>
 
     >>> jack.addresses
@@ -1199,7 +1201,7 @@ Usage is not too different from what we've been doing.  Let's give Wendy some bl
     WHERE users.name = ?
      LIMIT 2 OFFSET 0
     ['wendy']
-
+    {stop}
     >>> post = BlogPost("Wendy's Blog Post", "This is a test", wendy)
     >>> session.add(post)
 
index 184e57f1bdbd4aa8ed28e89a8fe43aaf8c38212c..759ba35ce6131a66172295b090f9c54797a59398 100644 (file)
@@ -3,42 +3,6 @@ serializer
 
 :author: Mike Bayer
 
-Serializer/Deserializer objects for usage with SQLAlchemy structures.
-
-Any SQLAlchemy structure, including Tables, Columns, expressions, mappers,
-Query objects etc. can be serialized in a minimally-sized format,
-and deserialized when given a Metadata and optional ScopedSession object
-to use as context on the way out.
-
-Usage is nearly the same as that of the standard Python pickle module:
-
-.. sourcecode:: python+sql
-
-    from sqlalchemy.ext.serializer import loads, dumps
-    metadata = MetaData(bind=some_engine)
-    Session = scoped_session(sessionmaker())
-    
-    # ... define mappers
-    
-    query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
-    
-    # pickle the query
-    serialized = dumps(query)
-    
-    # unpickle.  Pass in metadata + scoped_session
-    query2 = loads(serialized, metadata, Session)
-    
-    print query2.all()
-
-Similar restrictions as when using raw pickle apply; mapped classes must be 
-themselves be pickleable, meaning they are importable from a module-level
-namespace.
-
-Note that instances of user-defined classes do not require this extension
-in order to be pickled; these contain no references to engines, sessions
-or expression constructs in the typical case and can be serialized directly.
-This module is specifically for ORM and expression constructs.
-
 .. automodule:: sqlalchemy.ext.serializer
    :members:
    :undoc-members:
index ac149d68a932c972bca92c871477c31dc06fe7fc..dec03e5504c8a9645ad2810b6fc836f9bc0d5f12 100644 (file)
@@ -72,7 +72,7 @@ print "Flushing:"
 print "----------------------------"
 
 session = create_session()
-session.save(node)
+session.add(node)
 session.flush()
 
 print "\n\n\n----------------------------"
@@ -113,7 +113,7 @@ print "Clearing session, selecting "
 print "tree new where node_id=%d:" % nodeid
 print "----------------------------"
 
-session.clear()
+session.expunge_all()
 t = session.query(TreeNode).filter(TreeNode.id==nodeid)[0]
 
 print "\n\n\n----------------------------"
index 8078a2bb9c80a7291f68928c32d6e25d43026056..a56d2383e6898bf82fbeabd724aa864e3d0b3a70 100644 (file)
@@ -10,13 +10,14 @@ the price paid by the user for that particular order, which is independent
 of the price on each Item (since those can change).
 """
 
-import logging
 from datetime import datetime
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
+    String, DateTime, Numeric, ForeignKey, and_)
+from sqlalchemy.orm import mapper, relation, create_session
 
 # Uncomment these to watch database activity.
+#import logging
 #logging.basicConfig(format='%(message)s')
 #logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
 
@@ -72,10 +73,10 @@ mapper(OrderItem, orderitems, properties={
 session = create_session()
 
 # create our catalog
-session.save(Item('SA T-Shirt', 10.99))
-session.save(Item('SA Mug', 6.50))
-session.save(Item('SA Hat', 8.99))
-session.save(Item('MySQL Crowbar', 16.99))
+session.add(Item('SA T-Shirt', 10.99))
+session.add(Item('SA Mug', 6.50))
+session.add(Item('SA Hat', 8.99))
+session.add(Item('MySQL Crowbar', 16.99))
 session.flush()
 
 # function to return items from the DB
@@ -89,10 +90,10 @@ order = Order('john smith')
 order.order_items.append(OrderItem(item('SA Mug')))
 order.order_items.append(OrderItem(item('MySQL Crowbar'), 10.99))
 order.order_items.append(OrderItem(item('SA Hat')))
-session.save(order)
+session.add(order)
 session.flush()
 
-session.clear()
+session.expunge_all()
 
 # query the order, print items
 order = session.query(Order).filter_by(customer_name='john smith').one()
index ed3afd597c750ebbd65048faf1ce6b74c3e246e8..3a78e3adfbcfc6a9b6ddc038b521426209dfbd83 100644 (file)
@@ -2,8 +2,9 @@
 the usage of the associationproxy extension."""
 
 from datetime import datetime
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
+    String, DateTime, Float, ForeignKey, and_)
+from sqlalchemy.orm import mapper, relation, create_session
 from sqlalchemy.ext.associationproxy import AssociationProxy
 
 engine = create_engine('sqlite://')
@@ -82,7 +83,7 @@ order.items.append(item('SA Hat'))
 session.add(order)
 session.flush()
 
-session.clear()
+session.expunge_all()
 
 # query the order, print items
 order = session.query(Order).filter_by(customer_name='john smith').one()
index 203aa6d230a1f0c9c0647447d902db75bdbc3ffb..2451ea5826bcbb9222ef158bd2fa80125b86e622 100644 (file)
@@ -4,8 +4,10 @@ Also see the docs regarding the new "dynamic" relation option, which
 presents a more refined version of some of these patterns.
 """
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import (mapper, relation, create_session, MapperExtension,
+    object_session)
+
 meta = MetaData('sqlite://')
 meta.bind.echo = True
 
@@ -56,11 +58,11 @@ org.members.append(Member('member one'))
 org.members.append(Member('member two'))
 org.members.append(Member('member three'))
 
-sess.save(org)
+sess.add(org)
 
 print "-------------------------\nflush one - save org + 3 members"
 sess.flush()
-sess.clear()
+sess.expunge_all()
 
 # reload. load the org and some child members
 print "-------------------------\nload subset of members"
@@ -68,7 +70,7 @@ org = sess.query(Organization).get(org.org_id)
 members = org.member_query.filter(member_table.c.name.like('%member t%')).all()
 print members
 
-sess.clear()
+sess.expunge_all()
 
 
 # reload.  create some more members and flush, without loading any of the original members
@@ -80,7 +82,7 @@ org.members.append(Member('member six'))
 print "-------------------------\nflush two - save 3 more members"
 sess.flush()
 
-sess.clear()
+sess.expunge_all()
 org = sess.query(Organization).get(org.org_id)
 
 # now delete.  note that this will explictily delete members four, five and six because they are in the session,
index 707e182ba0fd9944bb1128a37053f2a709e84958..b8ea70dfd57a88999677646efe2aa005570a3cb1 100644 (file)
@@ -9,8 +9,10 @@ of "instance.__dict__".  Note that the default collection implementations can be
 with a custom attribute system as well.
 
 """
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, Text,
+    ForeignKey)
+from sqlalchemy.orm import (mapper, relation, create_session,
+    InstrumentationManager)
 
 from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute, is_instrumented
 from sqlalchemy.orm.collections import collection_adapter
@@ -170,10 +172,10 @@ if __name__ == '__main__':
     assert isinstance(a1.bs, MyCollection)
 
     sess = create_session()
-    sess.save(a1)
+    sess.add(a1)
 
     sess.flush()
-    sess.clear()
+    sess.expunge_all()
 
     a1 = sess.query(A).get(a1.id)
 
@@ -184,7 +186,7 @@ if __name__ == '__main__':
     a1.bs.remove(a1.bs[0])
 
     sess.flush()
-    sess.clear()
+    sess.expunge_all()
 
     a1 = sess.query(A).get(a1.id)
     assert len(a1.bs) == 1
index de28df5b3a01e76244d45465c5cd83402c4a9893..3264b02462d8d598222fac2b0ad6ad9bbfcc63ed 100644 (file)
@@ -39,8 +39,8 @@ class AttributeListener(AttributeExtension):
 
 if __name__ == '__main__':
 
-    from sqlalchemy import *
-    from sqlalchemy.orm import *
+    from sqlalchemy import Column, Integer, String, ForeignKey
+    from sqlalchemy.orm import relation
     from sqlalchemy.ext.declarative import declarative_base
 
     class Base(object):
@@ -82,4 +82,4 @@ if __name__ == '__main__':
     m1.related.mapped.append(MyMappedClass(data='m2'))
     del m1.data
     
-    
\ No newline at end of file
+    
index f53badc74574db59776979e3f137efcb9cacd0d9..4a1618985402c617162d821cb74febbd0eafdb66 100644 (file)
@@ -36,8 +36,8 @@ def hybrid_property(fget, fset=None, fdel=None):
 
 ### Example code
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer
+from sqlalchemy.orm import mapper, create_session
 
 metadata = MetaData('sqlite://')
 metadata.bind.echo = True
@@ -99,14 +99,14 @@ session = create_session()
 intervals = [Interval1(1,4), Interval1(3,15), Interval1(11,16)]
 
 for interval in intervals:
-    session.save(interval)
-    session.save(Interval2(interval.start, interval.length))
+    session.add(interval)
+    session.add(Interval2(interval.start, interval.length))
 
 session.flush()
 
 print "Clear the cache and do some queries"
 
-session.clear()
+session.expunge_all()
 
 for Interval in (Interval1, Interval2):
     print "Querying using interval class %s" % Interval.__name__
index b47a6d68f6313333b600820eee45f5899c5ba3de..3846c29944e5bd3e4f88bc6e98e21e898eed661f 100644 (file)
@@ -40,8 +40,8 @@ class MyProxyDict(object):
         self.collection.append(value)\r
 \r
 from sqlalchemy.ext.declarative import declarative_base\r
-from sqlalchemy import *\r
-from sqlalchemy.orm import *\r
+from sqlalchemy import create_engine, Column, Integer, String, ForeignKey\r
+from sqlalchemy.orm import sessionmaker, dynamic_loader\r
 \r
 Base = declarative_base(engine=create_engine('sqlite://'))\r
 \r
index 706cc88a07e6dab9a914c3ff28719bf415108334..58156dcb6776060b406b8da46281447e72f15b8f 100644 (file)
@@ -7,8 +7,9 @@ styles of persistence are identical, as is the structure of the main Document cl
 """
 
 ################################# PART I - Imports/Coniguration ###########################################
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
+    Unicode, and_)
+from sqlalchemy.orm import mapper, relation, create_session, lazyload
 
 import sys, os, StringIO, re
 
@@ -22,8 +23,7 @@ logging.basicConfig()
 #logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
 
 
-from elementtree import ElementTree
-from elementtree.ElementTree import Element, SubElement
+from xml.etree import ElementTree
 
 meta = MetaData()
 meta.bind = 'sqlite://'
@@ -155,16 +155,16 @@ session = create_session()
 
 # get ElementTree documents
 for file in ('test.xml', 'test2.xml', 'test3.xml'):
-    filename = os.path.join(os.path.dirname(sys.argv[0]), file)
+    filename = os.path.join(os.path.dirname(__file__), file)
     doc = ElementTree.parse(filename)
-    session.save(Document(file, doc))
+    session.add(Document(file, doc))
 
 print "\nSaving three documents...", line
 session.flush()
 print "Done."
 
 # clear session (to illustrate a full load), restore
-session.clear()
+session.expunge_all()
 
 print "\nFull text of document 'text.xml':", line
 document = session.query(Document).filter_by(filename="test.xml").first()
index 8b50f6805d0a41ecabf4d2f6fed4c1ebcbdd4ffb..c03acee1d4ff7faa7fddad4562a0353b0dd85b9a 100644 (file)
@@ -6,8 +6,9 @@ which joins on only three tables.
 """
 
 ################################# PART I - Imports/Configuration ###########################################
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
+    Unicode, and_)
+from sqlalchemy.orm import mapper, relation, create_session, lazyload
 
 import sys, os, StringIO, re
 
@@ -21,8 +22,7 @@ logging.basicConfig()
 #logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
 
 
-from elementtree import ElementTree
-from elementtree.ElementTree import Element, SubElement
+from xml.etree import ElementTree
 
 meta = MetaData()
 meta.bind = 'sqlite://'
@@ -164,16 +164,16 @@ session = create_session()
 
 # get ElementTree documents
 for file in ('test.xml', 'test2.xml', 'test3.xml'):
-    filename = os.path.join(os.path.dirname(sys.argv[0]), file)
+    filename = os.path.join(os.path.dirname(__file__), file)
     doc = ElementTree.parse(filename)
-    session.save(Document(file, doc))
+    session.add(Document(file, doc))
 
 print "\nSaving three documents...", line
 session.flush()
 print "Done."
 
 # clear session (to illustrate a full load), restore
-session.clear()
+session.expunge_all()
 
 print "\nFull text of document 'text.xml':", line
 document = session.query(Document).filter_by(filename="test.xml").first()
index e7cd86984e2123c8d5f8404321fa8dc8edf4b0f1..220bb2295386a75a1311c597f76514c078ad8ffb 100644 (file)
@@ -6,8 +6,9 @@ structure in distinct rows using two additional mapped entities.  Note that the
 styles of persistence are identical, as is the structure of the main Document class.
 """
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, String,
+    PickleType)
+from sqlalchemy.orm import mapper, create_session
 
 import sys, os
 
@@ -20,7 +21,7 @@ logging.basicConfig()
 # uncomment to show SQL statements and result sets
 #logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
 
-from elementtree import ElementTree
+from xml.etree import ElementTree
 
 engine = create_engine('sqlite://')
 meta = MetaData(engine)
@@ -48,16 +49,16 @@ mapper(Document, documents)
 ###### time to test ! #########
 
 # get ElementTree document
-filename = os.path.join(os.path.dirname(sys.argv[0]), "test.xml")
+filename = os.path.join(os.path.dirname(__file__), "test.xml")
 doc = ElementTree.parse(filename)
     
 # save to DB
 session = create_session()
-session.save(Document("test.xml", doc))
+session.add(Document("test.xml", doc))
 session.flush()
 
 # clear session (to illustrate a full load), restore
-session.clear()
+session.expunge_all()
 document = session.query(Document).filter_by(filename="test.xml").first()
 
 # print
index 8188d7c8707541416303094f1f098cea1c367ffb..6122e65f1f3fa86882d2f753353e912798979c1c 100644 (file)
@@ -1,7 +1,7 @@
 """a directed graph example."""
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, ForeignKey
+from sqlalchemy.orm import mapper, relation, create_session
 
 import logging
 logging.basicConfig()
@@ -70,10 +70,10 @@ n7.add_neighbor(n2)
 n1.add_neighbor(n3)
 n2.add_neighbor(n1)
 
-[session.save(x) for x in [n1, n2, n3, n4, n5, n6, n7]]
+[session.add(x) for x in [n1, n2, n3, n4, n5, n6, n7]]
 session.flush()
 
-session.clear()
+session.expunge_all()
 
 n2 = session.query(Node).get(2)
 n3 = session.query(Node).get(3)
index e83a263e93f3e70f339c1d87ae6403e8a9344463..fbb481759021affa738ed4641e812667c905a1bd 100644 (file)
@@ -4,9 +4,9 @@ http://www.intelligententerprise.com/001020/celko.jhtml
 
 """
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
-from sqlalchemy.orm import attributes
+from sqlalchemy import (create_engine, Column, Integer, String, select, case,
+    func)
+from sqlalchemy.orm import sessionmaker, MapperExtension, aliased
 from sqlalchemy.ext.declarative import declarative_base
 
 engine = create_engine('sqlite://', echo=True)
index 1c88c88e82b8fdfa43850de37daba9bb649ccc5a..79a8b3fa393dcc42632187c79371591a8632b69b 100644 (file)
@@ -1,7 +1,8 @@
 """illustrates one way to use a custom pickler that is session-aware."""
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, String, PickleType
+from sqlalchemy.orm import (mapper, create_session, MapperExtension,
+    class_mapper, EXT_CONTINUE)
 from sqlalchemy.orm.session import object_session
 from cStringIO import StringIO
 from pickle import Pickler, Unpickler
@@ -28,7 +29,7 @@ class MyPickler(object):
         if getattr(obj, "id", None) is None:
             sess = MyPickler.sessions.current
             newsess = create_session(bind=sess.connection(class_mapper(Bar)))
-            newsess.save(obj)
+            newsess.add(obj)
             newsess.flush()
         key = "%s:%s" % (type(obj).__name__, obj.id)
         return key
@@ -74,9 +75,9 @@ mapper(Bar, bar_table)
 sess = create_session()
 f = Foo()
 f.bar = Bar('some bar')
-sess.save(f)
+sess.add(f)
 sess.flush()
-sess.clear()
+sess.expunge_all()
 
 del MyPickler.sessions.current
 
index 3823f4b7f0e3856d2e28e4032fe292fd232a33ac..6e09935cb1e36c66f029787234582adeb03da722 100644 (file)
@@ -20,8 +20,9 @@ the associated target object from those which associate with it.
 
 """
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, String, and_
+from sqlalchemy.orm import (mapper, relation, create_session, class_mapper,
+    backref)
 
 metadata = MetaData('sqlite://')
 
@@ -124,11 +125,11 @@ a3 = o1.create_address()
 a3.street = '444 park ave.'
 
 sess = create_session()
-sess.save(u1)
-sess.save(o1)
+sess.add(u1)
+sess.add(o1)
 sess.flush()
 
-sess.clear()
+sess.expunge_all()
 
 # query objects, get their addresses
 
index 32600fcbf2a273e2f8aa8f486a732c1641abd820..f5dda2e39595c438d98738f9c547b172a50ed506 100644 (file)
@@ -19,8 +19,8 @@ For a more genericized version of this example, see
 poly_assoc_generic.py.
 """
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import mapper, relation, create_session, class_mapper
 
 metadata = MetaData('sqlite://')
 
@@ -137,11 +137,11 @@ o1.address = Address()
 o1.address.street = '444 park ave.'
 
 sess = create_session()
-sess.save(u1)
-sess.save(o1)
+sess.add(u1)
+sess.add(o1)
 sess.flush()
 
-sess.clear()
+sess.expunge_all()
 
 # query objects, get their addresses
 
index 8cc3edf60952610eccdb4b5b8367872078cc684f..233df51958dffa022fc57cb6f24b423224a2da75 100644 (file)
@@ -6,8 +6,8 @@ function "association" which creates a new polymorphic association
 "interface".
 """
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import mapper, relation, create_session, class_mapper
 
 metadata = MetaData('sqlite://')
 
@@ -133,11 +133,11 @@ o1.address = Address()
 o1.address.street = '444 park ave.'
 
 sess = create_session()
-sess.save(u1)
-sess.save(o1)
+sess.add(u1)
+sess.add(o1)
 sess.flush()
 
-sess.clear()
+sess.expunge_all()
 
 # query objects, get their addresses
 
index 5f12e9a3d73c67467de9d51045bd387f5e75c1f6..c50513b55559ad14d7e600d352a65c7576dfd3f9 100644 (file)
@@ -1,5 +1,5 @@
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String
+from sqlalchemy.orm import mapper, create_session, polymorphic_union
 
 metadata = MetaData()
 
@@ -56,11 +56,11 @@ m1 = Manager("pointy haired boss", "manager1")
 e1 = Engineer("wally", "engineer1")
 e2 = Engineer("dilbert", "engineer2")
 
-session.save(m1)
-session.save(e1)
-session.save(e2)
+session.add(m1)
+session.add(e1)
+session.add(e2)
 session.flush()
 
-employees = session.query(Employee).select()
+employees = session.query(Employee)
 print [e for e in employees]
 
index 4f3aeb7d2634a044e535bbbf32d987a20ddc14ca..ea56ffed1f36988e726ff2f19c0937e096351447 100644 (file)
@@ -1,5 +1,5 @@
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import mapper, relation, create_session
 import sets
 
 # this example illustrates a polymorphic load of two classes
@@ -58,12 +58,12 @@ class Company(object):
 
 person_join = people.outerjoin(engineers).outerjoin(managers)
 
-person_mapper = mapper(Person, people, select_table=person_join,polymorphic_on=people.c.type, polymorphic_identity='person')
+person_mapper = mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person')
 mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer')
 mapper(Manager, managers, inherits=person_mapper, polymorphic_identity='manager')
 
 mapper(Company, companies, properties={
-    'employees': relation(Person, lazy=False, private=True, backref='company')
+    'employees': relation(Person, lazy=False, backref='company', cascade="all, delete-orphan")
 })
 
 session = create_session(echo_uow=False)
@@ -73,30 +73,30 @@ c.employees.append(Engineer(name='dilbert', status='BBA', engineer_name='enginee
 c.employees.append(Person(name='joesmith', status='HHH'))
 c.employees.append(Engineer(name='wally', status='CGG', engineer_name='engineer2', primary_language='python'))
 c.employees.append(Manager(name='jsmith', status='ABA', manager_name='manager2'))
-session.save(c)
+session.add(c)
 
 print session.new
 session.flush()
-session.clear()
+session.expunge_all()
 
 c = session.query(Company).get(1)
 for e in c.employees:
-    print e, e._instance_key, e.company
+    print e, e._sa_instance_state.key, e.company
 assert sets.Set([e.name for e in c.employees]) == sets.Set(['pointy haired boss', 'dilbert', 'joesmith', 'wally', 'jsmith'])
 print "\n"
 
-dilbert = session.query(Person).get_by(name='dilbert')
-dilbert2 = session.query(Engineer).get_by(name='dilbert')
+dilbert = session.query(Person).filter_by(name='dilbert').one()
+dilbert2 = session.query(Engineer).filter_by(name='dilbert').one()
 assert dilbert is dilbert2
 
 dilbert.engineer_name = 'hes dibert!'
 
 session.flush()
-session.clear()
+session.expunge_all()
 
 c = session.query(Company).get(1)
 for e in c.employees:
-    print e, e._instance_key
+    print e, e._sa_instance_state.key
 
 session.delete(c)
 session.flush()
index dfc4264169f0563c9eb1cf68d18336554f8d672b..5a4f5c841357f83f586c92bff4045f86f425b3d6 100644 (file)
@@ -1,5 +1,5 @@
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import mapper, relation, create_session
 
 metadata = MetaData('sqlite://')
 metadata.bind.echo = 'debug'
@@ -48,7 +48,7 @@ engineer_mapper = mapper(Engineer, inherits=person_mapper, polymorphic_identity=
 
 
 mapper(Company, companies, properties={
-    'employees': relation(Person, lazy=True, private=True, backref='company')
+    'employees': relation(Person, lazy=True, backref='company')
 })
 
 session = create_session()
@@ -58,29 +58,29 @@ c.employees.append(Engineer(name='dilbert', status='BBA', engineer_name='enginee
 c.employees.append(Person(name='joesmith', status='HHH'))
 c.employees.append(Engineer(name='wally', status='CGG', engineer_name='engineer2', primary_language='python'))
 c.employees.append(Manager(name='jsmith', status='ABA', manager_name='manager2'))
-session.save(c)
+session.add(c)
 session.flush()
 
-session.clear()
+session.expunge_all()
 
 c = session.query(Company).get(1)
 for e in c.employees:
-    print e, e._instance_key, e.company
+    print e, e._sa_instance_state.key, e.company
 
 print "\n"
 
-dilbert = session.query(Person).get_by(name='dilbert')
-dilbert2 = session.query(Engineer).get_by(name='dilbert')
+dilbert = session.query(Person).filter_by(name='dilbert').one()
+dilbert2 = session.query(Engineer).filter_by(name='dilbert').one()
 assert dilbert is dilbert2
 
 dilbert.engineer_name = 'hes dibert!'
 
 session.flush()
-session.clear()
+session.expunge_all()
 
 c = session.query(Company).get(1)
 for e in c.employees:
-    print e, e._instance_key
+    print e, e._sa_instance_state.key
 
 session.delete(c)
 session.flush()
index 802aa0ea9005f3c25b04013c96e565bb493982b8..c482d82560f321ca0fb357aaa1fc042050213dc9 100644 (file)
@@ -28,11 +28,10 @@ Includes:
  * a standalone operator example.
 
 The implementation is limited to only public, well known
-and simple to use extension points, with the exception
-of one temporary monkeypatch in the DDL extension.  
-Future SQLAlchemy expansion points may allow more seamless
-integration of some features.
+and simple to use extension points. Future SQLAlchemy
+expansion points may allow more seamless integration of
+some features.
+
 """
 
 from sqlalchemy.orm.interfaces import AttributeExtension
@@ -227,8 +226,9 @@ def GISColumn(*args, **kw):
 
 # illustrate usage
 if __name__ == '__main__':
-    from sqlalchemy import *
-    from sqlalchemy.orm import *
+    from sqlalchemy import (create_engine, MetaData, Column, Integer, String,
+        func, literal, select)
+    from sqlalchemy.orm import sessionmaker, column_property
     from sqlalchemy.ext.declarative import declarative_base
 
     engine = create_engine('postgres://scott:tiger@localhost/gistest', echo=True)
index 0a9e992a32be70deea0a654aeaa8b8da3aba20c8..2f03a5a34488874d3307102b9f010be594df241e 100644 (file)
@@ -18,8 +18,9 @@ To set up a sharding system, you need:
 """
 
 # step 1. imports
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
+    String, ForeignKey, Float, DateTime)
+from sqlalchemy.orm import sessionmaker, mapper, relation
 from sqlalchemy.orm.shard import ShardedSession
 from sqlalchemy.sql import operators
 from sqlalchemy import sql
@@ -27,10 +28,10 @@ import datetime
 
 # step 2. databases
 echo = True
-db1 = create_engine('sqlite:///shard1.db', echo=echo)
-db2 = create_engine('sqlite:///shard2.db', echo=echo)
-db3 = create_engine('sqlite:///shard3.db', echo=echo)
-db4 = create_engine('sqlite:///shard4.db', echo=echo)
+db1 = create_engine('sqlite://', echo=echo)
+db2 = create_engine('sqlite://', echo=echo)
+db3 = create_engine('sqlite://', echo=echo)
+db4 = create_engine('sqlite://', echo=echo)
 
 
 # step 3. create session function.  this binds the shard ids
@@ -183,10 +184,10 @@ quito.reports.append(Report(85))
 
 sess = create_session()
 for c in [tokyo, newyork, toronto, london, dublin, brasilia, quito]:
-    sess.save(c)
+    sess.add(c)
 sess.flush()
 
-sess.clear()
+sess.expunge_all()
 
 t = sess.query(WeatherLocation).get(tokyo.id)
 assert t.city == tokyo.city
@@ -195,6 +196,6 @@ assert t.reports[0].temperature == 80.0
 north_american_cities = sess.query(WeatherLocation).filter(WeatherLocation.continent == 'North America')
 assert [c.city for c in north_american_cities] == ['New York', 'Toronto']
 
-asia_and_europe = sess.query(WeatherLocation).filter(WeatherLocation.continent.in_('Europe', 'Asia'))
+asia_and_europe = sess.query(WeatherLocation).filter(WeatherLocation.continent.in_(['Europe', 'Asia']))
 assert set([c.city for c in asia_and_europe]) == set(['Tokyo', 'London', 'Dublin'])
 
index 4065337c2ea04859e7f79e299d47f3a982d5f8dd..6180fa3034c004415130b4dbe296b97428a01a18 100644 (file)
@@ -29,8 +29,8 @@ Note: Something much like 'comparable_property' is slated for inclusion in a
       future version of SQLAlchemy.
 """
 
-from sqlalchemy.orm.interfaces import PropComparator, MapperProperty
-from sqlalchemy.orm import session as sessionlib, comparable_property
+from sqlalchemy.orm.interfaces import PropComparator
+from sqlalchemy.orm import comparable_property
 
 # Using the VerticalPropertyDictMixin from the base example
 from dictlike import VerticalPropertyDictMixin
@@ -131,7 +131,9 @@ class PolymorphicVerticalProperty(object):
 
 
 if __name__ == '__main__':
-    from sqlalchemy import *
+    from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
+        ForeignKey, UnicodeText, and_, not_, or_, String, Boolean, cast, text,
+        null, case)
     from sqlalchemy.orm import mapper, relation, create_session
     from sqlalchemy.orm.collections import attribute_mapped_collection
 
@@ -198,9 +200,9 @@ if __name__ == '__main__':
     stoat[u'cuteness'] = 7
     stoat[u'weasel-like'] = True
 
-    session.save(stoat)
+    session.add(stoat)
     session.flush()
-    session.clear()
+    session.expunge_all()
 
     critter = session.query(Animal).filter(Animal.name == u'stoat').one()
     print critter[u'color']
@@ -217,14 +219,14 @@ if __name__ == '__main__':
     marten[u'cuteness'] = 5
     marten[u'weasel-like'] = True
     marten[u'poisonous'] = False
-    session.save(marten)
+    session.add(marten)
 
     shrew = Animal(u'shrew')
     shrew[u'cuteness'] = 5
     shrew[u'weasel-like'] = False
     shrew[u'poisonous'] = True
 
-    session.save(shrew)
+    session.add(shrew)
     session.flush()
 
     q = (session.query(Animal).
index 5f478d7d0558c1a2c1317030bac62fbe056ba153..1e1635ed763ce6d0007c3877f26175f1cdd4dbbd 100644 (file)
@@ -122,7 +122,8 @@ class VerticalPropertyDictMixin(object):
 
 
 if __name__ == '__main__':
-    from sqlalchemy import *
+    from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
+        ForeignKey, UnicodeText, and_, not_)
     from sqlalchemy.orm import mapper, relation, create_session
     from sqlalchemy.orm.collections import attribute_mapped_collection
 
@@ -183,9 +184,9 @@ if __name__ == '__main__':
     # stoat.facts collection:
     print stoat.facts[u'color']
 
-    session.save(stoat)
+    session.add(stoat)
     session.flush()
-    session.clear()
+    session.expunge_all()
 
     critter = session.query(Animal).filter(Animal.name == u'stoat').one()
     print critter[u'color']
@@ -201,17 +202,17 @@ if __name__ == '__main__':
     marten = Animal(u'marten')
     marten[u'color'] = u'brown'
     marten[u'cuteness'] = u'somewhat'
-    session.save(marten)
+    session.add(marten)
 
     shrew = Animal(u'shrew')
     shrew[u'cuteness'] = u'somewhat'
     shrew[u'poisonous-part'] = u'saliva'
-    session.save(shrew)
+    session.add(shrew)
 
     loris = Animal(u'slow loris')
     loris[u'cuteness'] = u'fairly'
     loris[u'poisonous-part'] = u'elbows'
-    session.save(loris)
+    session.add(loris)
     session.flush()
 
     q = (session.query(Animal).
index 6c3a61919ef9140a40670dd1e3f6d93e88cc9696..4a8bf77871e1d28a9858d9c468ecb34c21c105c7 100644 (file)
@@ -2,8 +2,9 @@
 represented in distinct database rows.  This allows objects to be created with dynamically changing
 fields that are all persisted in a normalized fashion."""
 
-from sqlalchemy import *
-from sqlalchemy.orm import *
+from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, String,
+    ForeignKey, PickleType, DateTime, and_)
+from sqlalchemy.orm import mapper, relation, sessionmaker, scoped_session
 from sqlalchemy.orm.collections import mapped_collection
 import datetime
 
index de4af6bcb710d23c7f74fbfbc9f12c85ce607870..5b55eca97d46cebcc500b6198314327a98c7c8e9 100644 (file)
@@ -328,6 +328,20 @@ class AccessDialect(default.DefaultDialect):
 
 
 class AccessCompiler(compiler.SQLCompiler):
+    extract_map = compiler.SQLCompiler.extract_map.copy()
+    extract_map.update ({
+            'month': 'm',
+            'day': 'd',
+            'year': 'yyyy',
+            'second': 's',
+            'hour': 'h',
+            'doy': 'y',
+            'minute': 'n',
+            'quarter': 'q',
+            'dow': 'w',
+            'week': 'ww'
+    })
+        
     def visit_select_precolumns(self, select):
         """Access puts TOP, it's version of LIMIT here """
         s = select.distinct and "DISTINCT " or ""
@@ -375,6 +389,9 @@ class AccessCompiler(compiler.SQLCompiler):
         return (self.process(join.left, asfrom=True) + (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN ") + \
             self.process(join.right, asfrom=True) + " ON " + self.process(join.onclause))
 
+    def visit_extract(self, extract):
+        field = self.extract_map.get(extract.field, extract.field)
+        return 'DATEPART("%s", %s)' % (field, self.process(extract.expr))
 
 class AccessSchemaGenerator(compiler.SchemaGenerator):
     def get_column_specification(self, column, **kwargs):
index cfaf614890666ad2d365dcf8c24f63a6c5cd1b1f..1ce0209bf30a72b4870286e7c895f2b757226df8 100644 (file)
@@ -241,7 +241,36 @@ MS_2008_VERSION = (10,)
 MS_2005_VERSION = (9,)
 MS_2000_VERSION = (8,)
 
-MSSQL_RESERVED_WORDS = set(['function'])
+RESERVED_WORDS = set(
+    ['add', 'all', 'alter', 'and', 'any', 'as', 'asc', 'authorization',
+     'backup', 'begin', 'between', 'break', 'browse', 'bulk', 'by', 'cascade',
+     'case', 'check', 'checkpoint', 'close', 'clustered', 'coalesce',
+     'collate', 'column', 'commit', 'compute', 'constraint', 'contains',
+     'containstable', 'continue', 'convert', 'create', 'cross', 'current',
+     'current_date', 'current_time', 'current_timestamp', 'current_user',
+     'cursor', 'database', 'dbcc', 'deallocate', 'declare', 'default',
+     'delete', 'deny', 'desc', 'disk', 'distinct', 'distributed', 'double',
+     'drop', 'dump', 'else', 'end', 'errlvl', 'escape', 'except', 'exec',
+     'execute', 'exists', 'exit', 'external', 'fetch', 'file', 'fillfactor',
+     'for', 'foreign', 'freetext', 'freetexttable', 'from', 'full',
+     'function', 'goto', 'grant', 'group', 'having', 'holdlock', 'identity',
+     'identity_insert', 'identitycol', 'if', 'in', 'index', 'inner', 'insert',
+     'intersect', 'into', 'is', 'join', 'key', 'kill', 'left', 'like',
+     'lineno', 'load', 'merge', 'national', 'nocheck', 'nonclustered', 'not',
+     'null', 'nullif', 'of', 'off', 'offsets', 'on', 'open', 'opendatasource',
+     'openquery', 'openrowset', 'openxml', 'option', 'or', 'order', 'outer',
+     'over', 'percent', 'pivot', 'plan', 'precision', 'primary', 'print',
+     'proc', 'procedure', 'public', 'raiserror', 'read', 'readtext',
+     'reconfigure', 'references', 'replication', 'restore', 'restrict',
+     'return', 'revert', 'revoke', 'right', 'rollback', 'rowcount',
+     'rowguidcol', 'rule', 'save', 'schema', 'securityaudit', 'select',
+     'session_user', 'set', 'setuser', 'shutdown', 'some', 'statistics',
+     'system_user', 'table', 'tablesample', 'textsize', 'then', 'to', 'top',
+     'tran', 'transaction', 'trigger', 'truncate', 'tsequal', 'union',
+     'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values',
+     'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with',
+     'writetext',
+    ])
 
 
 class MSNumeric(sqltypes.Numeric):
@@ -859,6 +888,14 @@ class MSSQLCompiler(compiler.SQLCompiler):
         }
     )
 
+    extract_map = compiler.SQLCompiler.extract_map.copy()
+    extract_map.update ({
+        'doy': 'dayofyear',
+        'dow': 'weekday',
+        'milliseconds': 'millisecond',
+        'microseconds': 'microsecond'
+    })
+
     def __init__(self, *args, **kwargs):
         super(MSSQLCompiler, self).__init__(*args, **kwargs)
         self.tablealiases = {}
@@ -927,9 +964,9 @@ class MSSQLCompiler(compiler.SQLCompiler):
         kwargs['mssql_aliased'] = True
         return super(MSSQLCompiler, self).visit_alias(alias, **kwargs)
 
-    def visit_savepoint(self, savepoint_stmt):
-        util.warn("Savepoint support in mssql is experimental and may lead to data loss.")
-        return "SAVE TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt)
+    def visit_extract(self, extract):
+        field = self.extract_map.get(extract.field, extract.field)
+        return 'DATEPART("%s", %s)' % (field, self.process(extract.expr))
 
     def visit_rollback_to_savepoint(self, savepoint_stmt):
         return "ROLLBACK TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt)
@@ -1051,7 +1088,7 @@ class MSDDLCompiler(compiler.DDLCompiler):
 
 
 class MSIdentifierPreparer(compiler.IdentifierPreparer):
-    reserved_words = compiler.IdentifierPreparer.reserved_words.union(MSSQL_RESERVED_WORDS)
+    reserved_words = RESERVED_WORDS
 
     def __init__(self, dialect):
         super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[', final_quote=']')
@@ -1060,6 +1097,10 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer):
         #TODO: determine MSSQL's escaping rules
         return value
 
+    def quote_schema(self, schema, force=True):
+        """Prepare a quoted table and schema name."""
+        result = '.'.join([self.quote(x, force) for x in schema.split('.')])
+        return result
 
 class MSDialect(default.DefaultDialect):
     name = 'mssql'
@@ -1097,10 +1138,10 @@ class MSDialect(default.DefaultDialect):
                 self.max_identifier_length
         super(MSDialect, self).__init__(**opts)
     
-    def do_begin(self, connection):
-        cursor = connection.cursor()
-        cursor.execute("SET IMPLICIT_TRANSACTIONS OFF")
-        cursor.execute("BEGIN TRANSACTION")
+    def do_savepoint(self, connection, name):
+        util.warn("Savepoint support in mssql is experimental and may lead to data loss.")
+        connection.execute("IF @@TRANCOUNT = 0 BEGIN TRANSACTION")
+        connection.execute("SAVE TRANSACTION %s" % name)
 
     def do_release_savepoint(self, connection, name):
         pass
index eb4fb2d731c69d6ddd4502b365c11cf100196ca4..fd5ba7348e41105cbdf1ca8b36334b8cb4c63502 100644 (file)
@@ -1300,6 +1300,11 @@ class MySQLCompiler(compiler.SQLCompiler):
             "utc_timestamp":"UTC_TIMESTAMP"
         })
 
+    extract_map = compiler.SQLCompiler.extract_map.copy()
+    extract_map.update ({
+        'milliseconds': 'millisecond',
+    })
+
     def visit_typeclause(self, typeclause):
         type_ = typeclause.type.dialect_impl(self.dialect)
         if isinstance(type_, MSInteger):
index 448d3b90aeb75f270f3b3420bd91b202250b792d..3942e0d5709fd9140a1edb7464a6e14b3b3e5253 100644 (file)
@@ -277,6 +277,11 @@ class PGCompiler(compiler.SQLCompiler):
         else:
             return text
 
+    def visit_extract(self, extract, **kwargs):
+        field = self.extract_map.get(extract.field, extract.field)
+        return "EXTRACT(%s FROM %s::timestamp)" % (
+            field, self.process(extract.expr))
+
 class PGDDLCompiler(compiler.DDLCompiler):
     def get_column_specification(self, column, **kwargs):
         colspec = self.preparer.format_column(column)
@@ -326,6 +331,7 @@ class PGDDLCompiler(compiler.DDLCompiler):
             text += " WHERE " + inlined_clause
         return text
 
+
 class PGDefaultRunner(base.DefaultRunner):
     def __init__(self, context):
         base.DefaultRunner.__init__(self, context)
index def9b6f899e2e5ed05f09c4e426701f7e87d3636..0c7400c2bddb027a0d969ef7bcaa3d2ec5af5542 100644 (file)
@@ -177,12 +177,34 @@ class SQLiteCompiler(compiler.SQLCompiler):
         }
     )
 
+    extract_map = compiler.SQLCompiler.extract_map.copy()
+    extract_map.update({
+        'month': '%m',
+        'day': '%d',
+        'year': '%Y',
+        'second': '%S',
+        'hour': '%H',
+        'doy': '%j',
+        'minute': '%M',
+        'epoch': '%s',
+        'dow': '%w',
+        'week': '%W'
+    })
+
     def visit_cast(self, cast, **kwargs):
         if self.dialect.supports_cast:
             return super(SQLiteCompiler, self).visit_cast(cast)
         else:
             return self.process(cast.clause)
 
+    def visit_extract(self, extract):
+        try:
+            return "CAST(STRFTIME('%s', %s) AS INTEGER)" % (
+                self.extract_map[extract.field], self.process(extract.expr))
+        except KeyError:
+            raise exc.ArgumentError(
+                "%s is not a valid extract argument." % extract.field)
+
     def limit_clause(self, select):
         text = ""
         if select._limit is not None:
@@ -227,7 +249,7 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
         'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive',
         'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob',
         'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index',
-        'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is',
+        'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is',
         'isnull', 'join', 'key', 'left', 'like', 'limit', 'match', 'natural',
         'not', 'notnull', 'null', 'of', 'offset', 'on', 'or', 'order', 'outer',
         'plan', 'pragma', 'primary', 'query', 'raise', 'references',
index 16b739ae967886ecd3473670ab884c1f301e4e67..4204530ae27629cf87f4db652353b7d45a39869b 100644 (file)
@@ -217,6 +217,13 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
         sql_operators.mod: lambda x, y: "MOD(%s, %s)" % (x, y),
     })
 
+    extract_map = compiler.SQLCompiler.extract_map.copy()
+    extract_map.update ({
+        'doy': 'dayofyear',
+        'dow': 'weekday',
+        'milliseconds': 'millisecond'
+    })
+
     def bindparam_string(self, name):
         res = super(SybaseSQLCompiler, self).bindparam_string(name)
         if name.lower().startswith('literal'):
@@ -270,6 +277,10 @@ class SybaseSQLCompiler(compiler.SQLCompiler):
             res = "CAST(%s AS %s)" % (res, self.process(cast.typeclause))
         return res
 
+    def visit_extract(self, extract):
+        field = self.extract_map.get(extract.field, extract.field)
+        return 'DATEPART("%s", %s)' % (field, self.process(extract.expr))
+
     def for_update_clause(self, select):
         # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which SQLAlchemy doesn't use
         return ''
index 084ec2699abc3eec48af61d963ac3ffa829c646c..e22928b487e797c422cbc98e366237a3e240176b 100644 (file)
@@ -552,6 +552,12 @@ class DeclarativeMeta(type):
                 _undefer_column_name(key, value)
                 cls.__table__.append_column(value)
                 cls.__mapper__.add_property(key, value)
+            elif isinstance(value, ColumnProperty):
+                for col in value.columns:
+                    if isinstance(col, Column) and col.table is None:
+                        _undefer_column_name(key, col)
+                        cls.__table__.append_column(col)
+                cls.__mapper__.add_property(key, value)
             elif isinstance(value, MapperProperty):
                 cls.__mapper__.add_property(key, _deferred_relation(cls, value))
             else:
index bf3e3cc86409120a60cb7739b3ac9ec4d72bebb4..8a1de2b5074dcb821efe0bdfabf0d804968f05b8 100644 (file)
@@ -614,11 +614,6 @@ def mapper(class_, local_table=None, *args, **params):
         alternative, see the method `populate_existing()` on
         :class:`~sqlalchemy.orm.query.Query`.
 
-      allow_column_override
-        If True, allows the usage of a ``relation()`` which has the
-        same name as a column in the mapped table.  The table column
-        will no longer be mapped.
-
       allow_null_pks
         Indicates that composite primary keys where one or more (but not all)
         columns contain NULL is a valid primary key.  Primary keys which
index 4bc3f58c2e63efe17cb72b233272b2351771e884..3d31a686a2e5f75f3cb039682804b414e92d6007 100644 (file)
@@ -28,9 +28,9 @@ class DynaLoader(strategies.AbstractRelationLoader):
         strategies._register_attribute(self,
             mapper,
             useobject=True,
-            impl_class=DynamicAttributeImpl, 
-            target_mapper=self.parent_property.mapper, 
-            order_by=self.parent_property.order_by, 
+            impl_class=DynamicAttributeImpl,
+            target_mapper=self.parent_property.mapper,
+            order_by=self.parent_property.order_by,
             query_class=self.parent_property.query_class
         )
 
@@ -43,13 +43,15 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
     uses_objects = True
     accepts_scalar_loader = False
 
-    def __init__(self, class_, key, typecallable, 
+    def __init__(self, class_, key, typecallable,
                      target_mapper, order_by, query_class=None, **kwargs):
         super(DynamicAttributeImpl, self).__init__(class_, key, typecallable, **kwargs)
         self.target_mapper = target_mapper
         self.order_by = order_by
         if not query_class:
             self.query_class = AppenderQuery
+        elif AppenderMixin in query_class.mro():
+            self.query_class = query_class
         else:
             self.query_class = mixin_user_query(query_class)
 
@@ -87,7 +89,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
             ext.remove(state, value, initiator or self)
 
     def _modified_event(self, state):
-        
+
         if self.key not in state.committed_state:
             state.committed_state[self.key] = CollectionHistory(self, state)
 
@@ -108,7 +110,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
 
         collection_history = self._modified_event(state)
         new_values = list(iterable)
-        
+
         if _state_has_identity(state):
             old_collection = list(self.get(state))
         else:
@@ -128,31 +130,31 @@ class DynamicAttributeImpl(attributes.AttributeImpl):
             c = state.committed_state[self.key]
         else:
             c = CollectionHistory(self, state)
-            
+
         if not passive:
             return CollectionHistory(self, state, apply_to=c)
         else:
             return c
-        
+
     def append(self, state, value, initiator, passive=False):
         if initiator is not self:
             self.fire_append_event(state, value, initiator)
-    
+
     def remove(self, state, value, initiator, passive=False):
         if initiator is not self:
             self.fire_remove_event(state, value, initiator)
 
 class DynCollectionAdapter(object):
     """the dynamic analogue to orm.collections.CollectionAdapter"""
-    
+
     def __init__(self, attr, owner_state, data):
         self.attr = attr
         self.state = owner_state
         self.data = data
-    
+
     def __iter__(self):
         return iter(self.data)
-        
+
     def append_with_event(self, item, initiator=None):
         self.attr.append(self.state, item, initiator)
 
@@ -161,10 +163,10 @@ class DynCollectionAdapter(object):
 
     def append_without_event(self, item):
         pass
-    
+
     def remove_without_event(self, item):
         pass
-        
+
 class AppenderMixin(object):
     query_class = None
 
@@ -172,7 +174,7 @@ class AppenderMixin(object):
         Query.__init__(self, attr.target_mapper, None)
         self.instance = state.obj()
         self.attr = attr
-    
+
     def __session(self):
         sess = object_session(self.instance)
         if sess is not None and self.autoflush and sess.autoflush and self.instance in sess:
@@ -181,11 +183,11 @@ class AppenderMixin(object):
             return None
         else:
             return sess
-    
+
     def session(self):
         return self.__session()
     session = property(session, lambda s, x:None)
-    
+
     def __iter__(self):
         sess = self.__session()
         if sess is None:
@@ -203,7 +205,7 @@ class AppenderMixin(object):
                 passive=True).added_items.__getitem__(index)
         else:
             return self._clone(sess).__getitem__(index)
-    
+
     def count(self):
         sess = self.__session()
         if sess is None:
@@ -253,7 +255,7 @@ def mixin_user_query(cls):
     name = 'Appender' + cls.__name__
     return type(name, (AppenderMixin, cls), {'query_class': cls})
 
-class CollectionHistory(object): 
+class CollectionHistory(object):
     """Overrides AttributeHistory to receive append/remove events directly."""
 
     def __init__(self, attr, state, apply_to=None):
@@ -268,4 +270,4 @@ class CollectionHistory(object):
             self.deleted_items = []
             self.added_items = []
             self.unchanged_items = []
-        
+
index e78891353130971f7e6d219d7f6f9f44446717aa..05af5d8ca7d8955fd3ed5b1530b9c79b8e8c3828 100644 (file)
@@ -72,7 +72,7 @@ class EvaluatorCompiler(object):
         if operator is operators.is_:
             def evaluate(obj):
                 return eval_left(obj) == eval_right(obj)
-        if operator is operators.isnot:
+        elif operator is operators.isnot:
             def evaluate(obj):
                 return eval_left(obj) != eval_right(obj)
         elif operator in _straight_ops:
index 3b7507def64cb96b0ec3c2253454eed606b00a72..d36f51194e09aec35cd6bd08490998fca4e06f66 100644 (file)
@@ -682,6 +682,27 @@ class PropertyOption(MapperOption):
             else:
                 return None
 
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        d['key'] = ret = []
+        for token in util.to_list(self.key):
+            if isinstance(token, PropComparator):
+                ret.append((token.mapper.class_, token.key))
+            else:
+                ret.append(token)
+        return d
+
+    def __setstate__(self, state):
+        ret = []
+        for key in state['key']:
+            if isinstance(key, tuple):
+                cls, propkey = key
+                ret.append(getattr(cls, propkey))
+            else:
+                ret.append(key)
+        state['key'] = tuple(ret)
+        self.__dict__ = state
+
     def __get_paths(self, query, raiseerr):
         path = None
         entity = None
index 8999e7d8ece151b4c53b31c9d862490735669b53..398cbe5d989a8a2c9447189e22d012619b1754bb 100644 (file)
@@ -650,7 +650,7 @@ class RelationProperty(StrategizedProperty):
             dest_list = []
             for current in instances:
                 _recursive[(current, self)] = True
-                obj = session.merge(current, dont_load=dont_load, _recursive=_recursive)
+                obj = session._merge(current, dont_load=dont_load, _recursive=_recursive)
                 if obj is not None:
                     dest_list.append(obj)
             if dont_load:
@@ -663,7 +663,7 @@ class RelationProperty(StrategizedProperty):
             current = instances[0]
             if current is not None:
                 _recursive[(current, self)] = True
-                obj = session.merge(current, dont_load=dont_load, _recursive=_recursive)
+                obj = session._merge(current, dont_load=dont_load, _recursive=_recursive)
                 if obj is not None:
                     if dont_load:
                         dest_state.dict[self.key] = obj
@@ -1021,7 +1021,11 @@ class RelationProperty(StrategizedProperty):
         
 
     def _refers_to_parent_table(self):
-        return self.parent.mapped_table is self.target
+        for c, f in self.synchronize_pairs:
+            if c.table is f.table:
+                return True
+        else:
+            return False
 
     def _is_self_referential(self):
         return self.mapper.common_parent(self.parent)
index 7336b655626c8963d65237aad3a356004beec13f..219eed311ec1d87ef8440d98df9f65ad7cdc7eb4 100644 (file)
@@ -1549,6 +1549,8 @@ class Query(object):
         if synchronize_session not in [False, 'evaluate', 'fetch']:
             raise sa_exc.ArgumentError("Valid strategies for session synchronization are False, 'evaluate' and 'fetch'")
 
+        self = self.enable_eagerloads(False)
+
         context = self._compile_context()
         if len(context.statement.froms) != 1 or not isinstance(context.statement.froms[0], schema.Table):
             raise sa_exc.ArgumentError("Only deletion via a single table query is currently supported")
@@ -1646,6 +1648,8 @@ class Query(object):
         if synchronize_session not in [False, 'evaluate', 'fetch']:
             raise sa_exc.ArgumentError("Valid strategies for session synchronization are False, 'evaluate' and 'fetch'")
 
+        self = self.enable_eagerloads(False)
+
         context = self._compile_context()
         if len(context.statement.froms) != 1 or not isinstance(context.statement.froms[0], schema.Table):
             raise sa_exc.ArgumentError("Only update via a single table query is currently supported")
index d8af4e74f5a548adc380f323ce12242bbb0bc7cc..1e3a750d950fb51864d34f6b4c770fd8178986b4 100644 (file)
@@ -158,13 +158,13 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
       before each transaction is committed.
 
     weak_identity_map
-      When set to the default value of ``False``, a weak-referencing map is
+      When set to the default value of ``True``, a weak-referencing map is
       used; instances which are not externally referenced will be garbage
       collected immediately. For dereferenced instances which have pending
       changes present, the attribute management system will create a temporary
       strong-reference to the object which lasts until the changes are flushed
       to the database, at which point it's again dereferenced. Alternatively,
-      when using the value ``True``, the identity map uses a regular Python
+      when using the value ``False``, the identity map uses a regular Python
       dictionary to store instances. The session will maintain all instances
       present until they are removed using expunge(), clear(), or purge().
 
@@ -1141,8 +1141,7 @@ class Session(object):
         for state, m, o in cascade_states:
             self._delete_impl(state)
 
-    def merge(self, instance, dont_load=False,
-              _recursive=None):
+    def merge(self, instance, dont_load=False):
         """Copy the state an instance onto the persistent instance with the same identifier.
 
         If there is no persistent instance currently associated with the
@@ -1155,13 +1154,18 @@ class Session(object):
         mapped with ``cascade="merge"``.
 
         """
-        if _recursive is None:
-            # TODO: this should be an IdentityDict for instances, but will
-            # need a separate dict for PropertyLoader tuples
-            _recursive = {}
-            # Autoflush only on the topmost call
-            self._autoflush()
-
+        # TODO: this should be an IdentityDict for instances, but will
+        # need a separate dict for PropertyLoader tuples
+        _recursive = {}
+        self._autoflush()
+        autoflush = self.autoflush
+        try:
+            self.autoflush = False
+            return self._merge(instance, dont_load=dont_load, _recursive=_recursive)
+        finally:
+            self.autoflush = autoflush
+        
+    def _merge(self, instance, dont_load=False, _recursive=None):
         mapper = _object_mapper(instance)
         if instance in _recursive:
             return _recursive[instance]
@@ -1169,6 +1173,7 @@ class Session(object):
         new_instance = False
         state = attributes.instance_state(instance)
         key = state.key
+
         if key is None:
             if dont_load:
                 raise sa_exc.InvalidRequestError(
@@ -1194,7 +1199,7 @@ class Session(object):
                 self._update_impl(merged_state)
                 new_instance = True
             else:
-                merged = self.query(mapper.class_).autoflush(False).get(key[1])
+                merged = self.query(mapper.class_).get(key[1])
 
         if merged is None:
             merged = mapper.class_manager.new_instance()
index 8ad88982024fbbeae79b20f3abe3ba46397e4e37..4ac9c765e03bb9643f2e1c09f9e7b26f7c8d02c3 100644 (file)
@@ -720,6 +720,10 @@ class UOWExecutor(object):
 
     def execute_save_steps(self, trans, task):
         self.save_objects(trans, task)
+        for dep in task.polymorphic_cyclical_dependencies:
+            self.execute_dependency(trans, dep, False)
+        for dep in task.polymorphic_cyclical_dependencies:
+            self.execute_dependency(trans, dep, True)
         self.execute_cyclical_dependencies(trans, task, False)
         self.execute_dependencies(trans, task)
 
@@ -735,7 +739,5 @@ class UOWExecutor(object):
             self.execute_dependency(trans, dep, True)
 
     def execute_cyclical_dependencies(self, trans, task, isdelete):
-        for dep in task.polymorphic_cyclical_dependencies:
-            self.execute_dependency(trans, dep, isdelete)
         for t in task.dependent_tasks:
             self.execute(trans, [t], isdelete)
index a4e9aa5c4fd1a8c25d62463436ac65562d480281..8c0418a5e2c138b459e986cbde20c60269b0695b 100644 (file)
@@ -881,17 +881,33 @@ class ForeignKey(SchemaItem):
                 raise exc.ArgumentError(
                     "Parent column '%s' does not descend from a "
                     "table-attached Column" % str(self.parent))
-            m = re.match(r"^(.+?)(?:\.(.+?))?(?:\.(.+?))?$", self._colspec,
-                         re.UNICODE)
+
+            m = self._colspec.split('.')
+
             if m is None:
                 raise exc.ArgumentError(
                     "Invalid foreign key column specification: %s" %
                     self._colspec)
-            if m.group(3) is None:
-                (tname, colname) = m.group(1, 2)
-                schema = None
+
+            # A FK between column 'bar' and table 'foo' can be
+            # specified as 'foo', 'foo.bar', 'dbo.foo.bar',
+            # 'otherdb.dbo.foo.bar'. Once we have the column name and
+            # the table name, treat everything else as the schema
+            # name. Some databases (e.g. Sybase) support
+            # inter-database foreign keys. See tickets#1341 and --
+            # indirectly related -- Ticket #594. This assumes that '.'
+            # will never appear *within* any component of the FK.
+
+            (schema, tname, colname) = (None, None, None)
+            if (len(m) == 1):
+                tname   = m.pop()
             else:
-                (schema, tname, colname) = m.group(1, 2, 3)
+                colname = m.pop()
+                tname   = m.pop()
+
+            if (len(m) > 0):
+                schema = '.'.join(m)
+
             if _get_table_key(tname, schema) not in parenttable.metadata:
                 raise exc.NoReferencedTableError(
                     "Could not find table '%s' with which to generate a "
index 787827e8be063e6f7ff2dbd470bdc327c38e4b47..a419de431e72c4079007386479bc7f88402babb6 100644 (file)
@@ -110,6 +110,23 @@ FUNCTIONS = {
     functions.user: 'USER'
 }
 
+EXTRACT_MAP = {
+    'month': 'month',
+    'day': 'day',
+    'year': 'year',
+    'second': 'second',
+    'hour': 'hour',
+    'doy': 'doy',
+    'minute': 'minute',
+    'quarter': 'quarter',
+    'dow': 'dow',
+    'week': 'week',
+    'epoch': 'epoch',
+    'milliseconds': 'milliseconds',
+    'microseconds': 'microseconds',
+    'timezone_hour': 'timezone_hour',
+    'timezone_minute': 'timezone_minute'
+}
 
 class _CompileLabel(visitors.Visitable):
     """lightweight label object which acts as an expression._Label."""
@@ -135,6 +152,7 @@ class SQLCompiler(engine.Compiled):
 
     operators = OPERATORS
     functions = FUNCTIONS
+    extract_map = EXTRACT_MAP
 
     # class-level defaults which can be set at the instance
     # level to define if this Compiled instance represents
@@ -273,7 +291,7 @@ class SQLCompiler(engine.Compiled):
             return name
         else:
             if column.table.schema:
-                schema_prefix = self.preparer.quote(column.table.schema, column.table.quote_schema) + '.'
+                schema_prefix = self.preparer.quote_schema(column.table.schema, column.table.quote_schema) + '.'
             else:
                 schema_prefix = ''
             tablename = column.table.name
@@ -345,6 +363,10 @@ class SQLCompiler(engine.Compiled):
     def visit_cast(self, cast, **kwargs):
         return "CAST(%s AS %s)" % (self.process(cast.clause), self.process(cast.typeclause))
 
+    def visit_extract(self, extract, **kwargs):
+        field = self.extract_map.get(extract.field, extract.field)
+        return "EXTRACT(%s FROM %s)" % (field, self.process(extract.expr))
+
     def visit_function(self, func, result_map=None, **kwargs):
         if result_map is not None:
             result_map[func.name.lower()] = (func.name, None, func.type)
@@ -589,7 +611,7 @@ class SQLCompiler(engine.Compiled):
     def visit_table(self, table, asfrom=False, **kwargs):
         if asfrom:
             if getattr(table, "schema", None):
-                return self.preparer.quote(table.schema, table.quote_schema) + "." + self.preparer.quote(table.name, table.quote)
+                return self.preparer.quote_schema(table.schema, table.quote_schema) + "." + self.preparer.quote(table.name, table.quote)
             else:
                 return self.preparer.quote(table.name, table.quote)
         else:
@@ -1118,7 +1140,15 @@ class IdentifierPreparer(object):
                 or self.illegal_initial_characters.match(value[0])
                 or not self.legal_characters.match(unicode(value))
                 or (lc_value != value))
-    
+
+    def quote_schema(self, schema, force):
+        """Quote a schema.
+
+        Subclasses should override this to provide database-dependent 
+        quoting behavior.
+        """
+        return self.quote(schema, force)
+
     def quote(self, ident, force):
         if force is None:
             if ident in self._strings:
@@ -1137,7 +1167,7 @@ class IdentifierPreparer(object):
     def format_sequence(self, sequence, use_schema=True):
         name = self.quote(sequence.name, sequence.quote)
         if not self.omit_schema and use_schema and sequence.schema is not None:
-            name = self.quote(sequence.schema, sequence.quote) + "." + name
+            name = self.quote_schema(sequence.schema, sequence.quote) + "." + name
         return name
 
     def format_label(self, label, name=None):
@@ -1159,7 +1189,7 @@ class IdentifierPreparer(object):
             name = table.name
         result = self.quote(name, table.quote)
         if not self.omit_schema and use_schema and getattr(table, "schema", None):
-            result = self.quote(table.schema, table.quote_schema) + "." + result
+            result = self.quote_schema(table.schema, table.quote_schema) + "." + result
         return result
 
     def format_column(self, column, use_table=False, name=None, table_name=None):
@@ -1187,7 +1217,7 @@ class IdentifierPreparer(object):
         # a longer sequence.
 
         if not self.omit_schema and use_schema and getattr(table, 'schema', None):
-            return (self.quote(table.schema, table.quote_schema),
+            return (self.quote_schema(table.schema, table.quote_schema),
                     self.format_table(table, use_schema=False))
         else:
             return (self.format_table(table, use_schema=False), )
index 5e565cfe90f7670cf1c455d1ddeba6e5ce437ba4..4ecc7a06786faf008a5d1446202eb56342af2c23 100644 (file)
@@ -483,8 +483,7 @@ def cast(clause, totype, **kwargs):
 def extract(field, expr):
     """Return the clause ``extract(field FROM expr)``."""
 
-    expr = _BinaryExpression(text(field), expr, operators.from_)
-    return func.extract(expr)
+    return _Extract(field, expr)
 
 def collate(expression, collation):
     """Return the clause ``expression COLLATE collation``."""
@@ -2226,13 +2225,17 @@ class _Case(ColumnElement):
             whenlist = [(_literal_as_binds(c).self_group(), _literal_as_binds(r)) for (c, r) in whens]
         else:
             whenlist = [(_no_literals(c).self_group(), _literal_as_binds(r)) for (c, r) in whens]
-            
+
         if whenlist:
             type_ = list(whenlist[-1])[-1].type
         else:
             type_ = None
-            
-        self.value = value
+
+        if value is None:
+            self.value = None
+        else:
+            self.value = _literal_as_binds(value)
+
         self.type = type_
         self.whens = whenlist
         if else_ is not None:
@@ -2258,7 +2261,7 @@ class _Case(ColumnElement):
 
     @property
     def _from_objects(self):
-        return itertools.chain(*[x._from_objects for x in self.get_children()])
+        return list(itertools.chain(*[x._from_objects for x in self.get_children()]))
 
 class Function(ColumnElement, FromClause):
     """Describe a SQL function."""
@@ -2334,6 +2337,27 @@ class _Cast(ColumnElement):
         return self.clause._from_objects
 
 
+class _Extract(ColumnElement):
+
+    __visit_name__ = 'extract'
+
+    def __init__(self, field, expr, **kwargs):
+        self.type = sqltypes.Integer()
+        self.field = field
+        self.expr = _literal_as_binds(expr, None)
+
+    def _copy_internals(self, clone=_clone):
+        self.field = clone(self.field)
+        self.expr = clone(self.expr)
+
+    def get_children(self, **kwargs):
+        return self.field, self.expr
+
+    @property
+    def _from_objects(self):
+        return self.expr._from_objects
+
+
 class _UnaryExpression(ColumnElement):
 
     __visit_name__ = 'unary'
index a8de5c6352b696f253e00e06486b025344155b9f..36357faf505c5a171bb0a3896b6fd23737070c36 100644 (file)
@@ -45,6 +45,7 @@ def find_join_source(clauses, join_to):
                 return i, f
     else:
         return None, None
+
     
 def find_tables(clause, check_columns=False, include_aliases=False, include_joins=False, include_selects=False):
     """locate Table objects within the given expression."""
index 8ea8c36dd85a818f16e6d9293622d935e830c610..8c2847396e64b0b64401963d23605fdbd36176ca 100644 (file)
@@ -715,6 +715,12 @@ class OrderedDict(dict):
         self._list = []
         dict.clear(self)
 
+    def copy(self):
+        return self.__copy__()
+
+    def __copy__(self):
+        return OrderedDict(self)
+
     def sort(self, *arg, **kw):
         self._list.sort(*arg, **kw)
 
index fa3aff4807dd7ccc236dfa08bc17be11b4ee0440..1721f621d3ccf3dd5a4fe9b370a45bbceea719bd 100644 (file)
@@ -1,5 +1,5 @@
 import testenv; testenv.configure_for_tests()
-import threading, unittest
+import copy, threading, unittest
 from sqlalchemy import util, sql, exc
 from testlib import TestBase
 from testlib.compat import gc_collect
@@ -56,6 +56,18 @@ class OrderedDictTest(TestBase):
         o = util.OrderedDict([('name', 'jbe'), ('fullname', 'jonathan'), ('password', '')])
         eq_(o.keys(), ['name', 'fullname', 'password'])
 
+    def test_odict_copy(self):
+        o = util.OrderedDict()
+        o["zzz"] = 1
+        o["aaa"] = 2
+        eq_(o.keys(), ['zzz', 'aaa'])
+
+        o2 = o.copy()
+        eq_(o2.keys(), o.keys())
+
+        o3 = copy.copy(o)
+        eq_(o3.keys(), o.keys())
+
 class OrderedSetTest(TestBase):
     def test_mutators_against_iter(self):
         # testing a set modified against an iterator
index 311231947e567b005dabfb9dd75eacd63c9eab7e..57af45a9d6ac1642ff4cef5c50e32e542d0318f6 100644 (file)
@@ -1,14 +1,33 @@
 import testenv; testenv.configure_for_tests()
 from sqlalchemy import *
+from sqlalchemy import sql
 from sqlalchemy.databases import access
 from testlib import *
 
 
-class BasicTest(TestBase, AssertsExecutionResults):
-    # A simple import of the database/ module should work on all systems.
-    def test_import(self):
-        # we got this far, right?
-        return True
+class CompileTest(TestBase, AssertsCompiledSQL):
+    __dialect__ = access.dialect()
+
+    def test_extract(self):
+        t = sql.table('t', sql.column('col1'))
+
+        mapping = {
+            'month': 'm',
+            'day': 'd',
+            'year': 'yyyy',
+            'second': 's',
+            'hour': 'h',
+            'doy': 'y',
+            'minute': 'n',
+            'quarter': 'q',
+            'dow': 'w',
+            'week': 'ww'
+            }
+
+        for field, subst in mapping.items():
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst)
 
 
 if __name__ == "__main__":
index ecaf7b5a6d483a4dd8fe67d9db325d58783584f5..e62582ddc41d1b97d516590e3103be270074b816 100755 (executable)
@@ -89,6 +89,30 @@ class CompileTest(TestBase, AssertsCompiledSQL):
         s = select([tbl.c.id]).where(tbl.c.id==1)
         self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM paj.test WHERE paj.test.id IN (SELECT test_1.id FROM paj.test AS test_1 WHERE test_1.id = :id_1)")
 
+    def test_delete_schema_multipart(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana.paj')
+        self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM banana.paj.test WHERE banana.paj.test.id = :id_1")
+
+        s = select([tbl.c.id]).where(tbl.c.id==1)
+        self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM banana.paj.test WHERE banana.paj.test.id IN (SELECT test_1.id FROM banana.paj.test AS test_1 WHERE test_1.id = :id_1)")
+
+    def test_delete_schema_multipart_needs_quoting(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana split.paj')
+        self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM [banana split].paj.test WHERE [banana split].paj.test.id = :id_1")
+
+        s = select([tbl.c.id]).where(tbl.c.id==1)
+        self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM [banana split].paj.test WHERE [banana split].paj.test.id IN (SELECT test_1.id FROM [banana split].paj.test AS test_1 WHERE test_1.id = :id_1)")
+
+    def test_delete_schema_multipart_both_need_quoting(self):
+        metadata = MetaData()
+        tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana split.paj with a space')
+        self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM [banana split].[paj with a space].test WHERE [banana split].[paj with a space].test.id = :id_1")
+
+        s = select([tbl.c.id]).where(tbl.c.id==1)
+        self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM [banana split].[paj with a space].test WHERE [banana split].[paj with a space].test.id IN (SELECT test_1.id FROM [banana split].[paj with a space].test AS test_1 WHERE test_1.id = :id_1)")                
+
     def test_union(self):
         t1 = table('t1',
             column('col1'),
@@ -126,6 +150,14 @@ class CompileTest(TestBase, AssertsCompiledSQL):
         self.assert_compile(func.current_date(), "GETDATE()")
         self.assert_compile(func.length(3), "LEN(:length_1)")
 
+    def test_extract(self):
+        t = table('t', column('col1'))
+
+        for field in 'day', 'month', 'year':
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field)
+
 
 class IdentityInsertTest(TestBase, AssertsCompiledSQL):
     __only_on__ = 'mssql'
index acd1c5a43b18f40da751bce2c1cb9e442219d1d1..de4480a4f701b1d51eaf88d3376d03c75a989ccb 100644 (file)
@@ -982,6 +982,19 @@ class SQLTest(TestBase, AssertsCompiledSQL):
         for type_, expected in specs:
             self.assert_compile(cast(t.c.col, type_), expected)
 
+    def test_extract(self):
+        t = sql.table('t', sql.column('col1'))
+
+        for field in 'year', 'month', 'day':
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                "SELECT EXTRACT(%s FROM t.col1) AS anon_1 FROM t" % field)
+
+        # millsecondS to millisecond
+        self.assert_compile(
+            select([extract('milliseconds', t.c.col1)]),
+            "SELECT EXTRACT(millisecond FROM t.col1) AS anon_1 FROM t")
+
 
 class RawReflectionTest(TestBase):
     def setUp(self):
index 997c3fcb5e8b170387ef7b303c59c74c55e03101..fe72d474f20c03f2c62514a89eca4ba370d22a3e 100644 (file)
@@ -22,6 +22,8 @@ class SequenceTest(TestBase, AssertsCompiledSQL):
         assert dialect.identifier_preparer.format_sequence(seq) == '"Some_Schema"."My_Seq"'
 
 class CompileTest(TestBase, AssertsCompiledSQL):
+    __dialect__ = postgres.dialect()
+
     def test_update_returning(self):
         dialect = postgres.dialect()
         table1 = table('mytable',
@@ -65,6 +67,14 @@ class CompileTest(TestBase, AssertsCompiledSQL):
         self.assert_compile(schema.CreateIndex(idx), 
             "CREATE INDEX test_idx1 ON testtbl (data) WHERE testtbl.data > 5 AND testtbl.data < 10", dialect=postgres.dialect())
 
+    def test_extract(self):
+        t = table('t', column('col1'))
+
+        for field in 'year', 'month', 'day':
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                "SELECT EXTRACT(%s FROM t.col1::timestamp) AS anon_1 "
+                "FROM t" % field)
 
 class ReturningTest(TestBase, AssertsExecutionResults):
     __only_on__ = 'postgres'
index 35f6cbba72d6f5569140991b80b631da9c3eaf36..f11461949764985bea1069f89ca5607a6d405c18 100644 (file)
@@ -3,7 +3,7 @@
 import testenv; testenv.configure_for_tests()
 import datetime
 from sqlalchemy import *
-from sqlalchemy import exc
+from sqlalchemy import exc, sql
 from sqlalchemy.dialects.sqlite import base as sqlite, pysqlite as pysqlite_dialect
 from testlib import *
 
@@ -285,6 +285,35 @@ class DialectTest(TestBase, AssertsExecutionResults):
             isolation_level="FOO")
 
 
+class SQLTest(TestBase, AssertsCompiledSQL):
+    """Tests SQLite-dialect specific compilation."""
+
+    __dialect__ = sqlite.dialect()
+
+
+    def test_extract(self):
+        t = sql.table('t', sql.column('col1'))
+
+        mapping = {
+            'month': '%m',
+            'day': '%d',
+            'year': '%Y',
+            'second': '%S',
+            'hour': '%H',
+            'doy': '%j',
+            'minute': '%M',
+            'epoch': '%s',
+            'dow': '%w',
+            'week': '%W',
+            }
+
+        for field, subst in mapping.items():
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                "SELECT CAST(STRFTIME('%s', t.col1) AS INTEGER) AS anon_1 "
+                "FROM t" % subst)
+
+
 class InsertTest(TestBase, AssertsExecutionResults):
     """Tests inserts and autoincrement."""
 
index 19cca465bd0cf47bc1dc4960f41f82c65c727fbc..32b9904d8a5d6bf22dd49deb41b6c29bd3a0b48b 100644 (file)
@@ -1,14 +1,30 @@
 import testenv; testenv.configure_for_tests()
 from sqlalchemy import *
+from sqlalchemy import sql
 from sqlalchemy.databases import sybase
 from testlib import *
 
 
-class BasicTest(TestBase, AssertsExecutionResults):
-    # A simple import of the database/ module should work on all systems.
-    def test_import(self):
-        # we got this far, right?
-        return True
+class CompileTest(TestBase, AssertsCompiledSQL):
+    __dialect__ = sybase.dialect()
+
+    def test_extract(self):
+        t = sql.table('t', sql.column('col1'))
+
+        mapping = {
+            'day': 'day',
+            'doy': 'dayofyear',
+            'dow': 'weekday',
+            'milliseconds': 'millisecond',
+            'millisecond': 'millisecond',
+            'year': 'year',
+            }
+
+        for field, subst in mapping.items():
+            self.assert_compile(
+                select([extract(field, t.c.col1)]),
+                'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst)
+
 
 
 if __name__ == "__main__":
index c6d4fe681471c642f0064ad074335acd2c2e13b5..0c835641fb9189086d61f71a12633749a00e5ec0 100644 (file)
@@ -4,7 +4,7 @@ from sqlalchemy.ext import declarative as decl
 from sqlalchemy import exc
 from testlib import sa, testing
 from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey, ForeignKeyConstraint, asc, Index
-from testlib.sa.orm import relation, create_session, class_mapper, eagerload, compile_mappers, backref, clear_mappers, polymorphic_union
+from testlib.sa.orm import relation, create_session, class_mapper, eagerload, compile_mappers, backref, clear_mappers, polymorphic_union, deferred
 from testlib.testing import eq_
 from orm._base import ComparableEntity, MappedTest
 
@@ -1082,6 +1082,29 @@ class DeclarativeInheritanceTest(DeclarativeTestBase):
             Engineer(name="vlad", primary_language="cobol") 
         )
 
+    def test_add_deferred(self):
+        class Person(Base, ComparableEntity):
+            __tablename__ = 'people'
+            id = Column('id', Integer, primary_key=True)
+
+        Person.name = deferred(Column(String(10)))
+
+        Base.metadata.create_all()
+        sess = create_session()
+        p = Person(name='ratbert')
+
+        sess.add(p)
+        sess.flush()
+        sess.expunge_all()
+        eq_(
+            sess.query(Person).all(),
+            [
+                Person(name='ratbert')
+            ]
+        )
+        person = sess.query(Person).filter(Person.name == 'ratbert').one()
+        assert 'name' not in person.__dict__
+
     def test_single_fksonsub(self):
         """test single inheritance with a foreign key-holding column on a subclass.
 
index f628460c110888aeb217348916ca8ac2928556af..d0c9d8b045a004f0a172f7fafd6305ead7411978 100644 (file)
@@ -22,6 +22,7 @@ def suite():
         'orm.assorted_eager',
 
         'orm.naturalpks',
+        'orm.defaults',
         'orm.unitofwork',
         'orm.session',
         'orm.transaction',
@@ -40,6 +41,8 @@ def suite():
         'orm.onetoone',
         'orm.dynamic',
 
+        'orm.evaluator',
+
         'orm.deprecations',
         )
     alltests = unittest.TestSuite()
index 958afadaf32d0943f8a15e4625bf4459511432be..3e3636085247fcfabf22297d954f66581908414d 100644 (file)
@@ -108,7 +108,27 @@ class SelfReferentialTest(_base.MappedTest):
         sess.delete(a)
         sess.flush()
 
-
+    @testing.resolve_artifact_names
+    def test_setnull_ondelete(self):
+        mapper(C1, t1, properties={
+            'children':relation(C1)
+        })
+        
+        sess = create_session()
+        c1 = C1()
+        c2 = C1()
+        c1.children.append(c2)
+        sess.add(c1)
+        sess.flush()
+        assert c2.parent_c1 == c1.c1
+        
+        sess.delete(c1)
+        sess.flush()
+        assert c2.parent_c1 is None
+        
+        sess.expire_all()
+        assert c2.parent_c1 is None
+        
 class SelfReferentialNoPKTest(_base.MappedTest):
     """A self-referential relationship that joins on a column other than the primary key column"""
 
index f975f762f8b0fc81a7267cc05e47d8dc4d9161e9..3bd94b7c0ebc2dd7d54ca50b90065ab5a79b0a57 100644 (file)
@@ -4,6 +4,7 @@ from sqlalchemy.orm import dynamic_loader, backref
 from testlib import testing
 from testlib.sa import Table, Column, Integer, String, ForeignKey, desc, select, func
 from testlib.sa.orm import mapper, relation, create_session, Query, attributes
+from sqlalchemy.orm.dynamic import AppenderMixin
 from testlib.testing import eq_
 from testlib.compat import _function_named
 from orm import _base, _fixtures
@@ -151,6 +152,44 @@ class DynamicTest(_fixtures.FixtureTest):
         assert not hasattr(q, 'append')
         assert type(q).__name__ == 'MyQuery'
 
+    @testing.resolve_artifact_names
+    def test_custom_query_with_custom_mixin(self):
+        class MyAppenderMixin(AppenderMixin):
+            def add(self, items):
+                if isinstance(items, list):
+                    for item in items:
+                        self.append(item)
+                else:
+                    self.append(items)
+
+        class MyQuery(Query):
+            pass
+
+        class MyAppenderQuery(MyAppenderMixin, MyQuery):
+            query_class = MyQuery
+
+        mapper(User, users, properties={
+            'addresses':dynamic_loader(mapper(Address, addresses),
+                                       query_class=MyAppenderQuery)
+        })
+        sess = create_session()
+        u = User()
+        sess.add(u)
+
+        col = u.addresses
+        assert isinstance(col, Query)
+        assert isinstance(col, MyQuery)
+        assert hasattr(col, 'append')
+        assert hasattr(col, 'add')
+        assert type(col).__name__ == 'MyAppenderQuery'
+
+        q = col.limit(1)
+        assert isinstance(q, Query)
+        assert isinstance(q, MyQuery)
+        assert not hasattr(q, 'append')
+        assert not hasattr(q, 'add')
+        assert type(q).__name__ == 'MyQuery'
+
 
 class SessionTest(_fixtures.FixtureTest):
     run_inserts = None
@@ -165,7 +204,7 @@ class SessionTest(_fixtures.FixtureTest):
         a1 = Address(email_address='foo')
         sess.add_all([u1, a1])
         sess.flush()
-        
+
         assert testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)) == 0
         u1 = sess.query(User).get(u1.id)
         u1.addresses.append(a1)
@@ -174,11 +213,11 @@ class SessionTest(_fixtures.FixtureTest):
         assert testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall() == [
             (a1.id, u1.id, 'foo')
         ]
-        
+
         u1.addresses.remove(a1)
         sess.flush()
         assert testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)) == 0
-        
+
         u1.addresses.append(a1)
         sess.flush()
         assert testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall() == [
@@ -192,7 +231,7 @@ class SessionTest(_fixtures.FixtureTest):
         assert testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall() == [
             (a2.id, u1.id, 'bar')
         ]
-        
+
 
     @testing.resolve_artifact_names
     def test_merge(self):
@@ -204,30 +243,30 @@ class SessionTest(_fixtures.FixtureTest):
         a1 = Address(email_address='a1')
         a2 = Address(email_address='a2')
         a3 = Address(email_address='a3')
-        
+
         u1.addresses.append(a2)
         u1.addresses.append(a3)
-        
+
         sess.add_all([u1, a1])
         sess.flush()
-        
+
         u1 = User(id=u1.id, name='jack')
         u1.addresses.append(a1)
         u1.addresses.append(a3)
         u1 = sess.merge(u1)
         assert attributes.get_history(u1, 'addresses') == (
-            [a1], 
-            [a3], 
+            [a1],
+            [a3],
             [a2]
         )
 
         sess.flush()
-        
+
         eq_(
             list(u1.addresses),
             [a1, a3]
         )
-        
+
     @testing.resolve_artifact_names
     def test_flush(self):
         mapper(User, users, properties={
@@ -240,10 +279,10 @@ class SessionTest(_fixtures.FixtureTest):
         u1.addresses.append(Address(email_address='lala@hoho.com'))
         sess.add_all((u1, u2))
         sess.flush()
-        
+
         from sqlalchemy.orm import attributes
         self.assertEquals(attributes.get_history(attributes.instance_state(u1), 'addresses'), ([], [Address(email_address='lala@hoho.com')], []))
-        
+
         sess.expunge_all()
 
         # test the test fixture a little bit
@@ -254,18 +293,18 @@ class SessionTest(_fixtures.FixtureTest):
             User(name='jack', addresses=[Address(email_address='lala@hoho.com')]),
             User(name='ed', addresses=[Address(email_address='foo@bar.com')])
         ] == sess.query(User).all()
-    
+
     @testing.resolve_artifact_names
     def test_hasattr(self):
         mapper(User, users, properties={
             'addresses':dynamic_loader(mapper(Address, addresses))
         })
         u1 = User(name='jack')
-        
+
         assert 'addresses' not in u1.__dict__.keys()
         u1.addresses = [Address(email_address='test')]
         assert 'addresses' in dir(u1)
-    
+
     @testing.resolve_artifact_names
     def test_collection_set(self):
         mapper(User, users, properties={
@@ -277,7 +316,7 @@ class SessionTest(_fixtures.FixtureTest):
         a2 = Address(email_address='a2')
         a3 = Address(email_address='a3')
         a4 = Address(email_address='a4')
-        
+
         sess.add(u1)
         u1.addresses = [a1, a3]
         assert list(u1.addresses) == [a1, a3]
@@ -287,10 +326,10 @@ class SessionTest(_fixtures.FixtureTest):
         assert list(u1.addresses) == [a2, a3]
         u1.addresses = []
         assert list(u1.addresses) == []
-        
-        
 
-        
+
+
+
     @testing.resolve_artifact_names
     def test_rollback(self):
         mapper(User, users, properties={
index ce81b0ba3a574da6dba08e20e9ab4f9e337c4ee2..5981223c8332332d9b21c1507b7ce26a8aa6c731 100644 (file)
@@ -892,6 +892,20 @@ class SelfReferentialEagerTest(_base.MappedTest):
         n1.children[1].append(Node(data='n123'))
         sess.add(n1)
         sess.flush()
+        sess.expunge_all()
+        def go():
+            d = sess.query(Node).filter_by(data='n1').all()[0]
+            assert Node(data='n1', children=[
+                Node(data='n11'),
+                Node(data='n12', children=[
+                    Node(data='n121'),
+                    Node(data='n122'),
+                    Node(data='n123')
+                ]),
+                Node(data='n13')
+            ]) == d
+        self.assert_sql_count(testing.db, go, 1)
+
         sess.expunge_all()
         def go():
             d = sess.query(Node).filter_by(data='n1').first()
index c5b9b3fde63531cdea4826e178f4d23c84cc3d5b..569fdd52e327d231b57321c84192cbf4f1b0c0b2 100644 (file)
@@ -8,6 +8,8 @@ from sqlalchemy import util
 from sqlalchemy.orm import *
 from testlib import *
 from testlib import fixtures
+from orm import _base
+from testlib.testing import eq_
 
 class AttrSettable(object):
     def __init__(self, **kwargs):
@@ -610,7 +612,50 @@ class RelationTest7(ORMTest):
         for p in r:
             assert p.car_id == p.car.car_id
 
+class RelationTest8(ORMTest):
+    def define_tables(self, metadata):
+        global taggable, users
+        taggable = Table('taggable', metadata,
+                         Column('id', Integer, primary_key=True),
+                         Column('type', String(30)),
+                         Column('owner_id', Integer, ForeignKey('taggable.id')),
+                         )
+        users = Table ('users', metadata,
+                       Column('id', Integer, ForeignKey('taggable.id'), primary_key=True),
+                       Column('data', String(50)),
+                       )
+
+    def test_selfref_onjoined(self):
+        class Taggable(_base.ComparableEntity):
+            pass
+
+        class User(Taggable):
+            pass
+
+        mapper( Taggable, taggable, polymorphic_on=taggable.c.type, polymorphic_identity='taggable', properties = {
+            'owner' : relation (User,
+                               primaryjoin=taggable.c.owner_id ==taggable.c.id,
+                                remote_side=taggable.c.id
+                                ),
+        })
+
+
+        mapper(User, users, inherits=Taggable, polymorphic_identity='user',
+               inherit_condition=users.c.id == taggable.c.id,
+               )
+
+
+        u1 = User(data='u1')
+        t1 = Taggable(owner=u1)
+        sess = create_session()
+        sess.add(t1)
+        sess.flush()
         
+        sess.expunge_all()
+        eq_(
+            sess.query(Taggable).order_by(Taggable.id).all(),
+            [User(data='u1'), Taggable(owner=User(data='u1'))]
+        )
         
 class GenerativeTest(TestBase, AssertsExecutionResults):
     def setUpAll(self):
index 790cd449506dec0c1d7b85d3608a4feaf4daa3d8..02f8563c18f1d15910287c23d2373a08ec61b64c 100644 (file)
@@ -695,5 +695,34 @@ class MergeTest(_fixtures.FixtureTest):
         sess.flush()
         assert merged_user not in sess.new
 
+    @testing.resolve_artifact_names
+    def test_cascades_dont_autoflush_2(self):
+        mapper(User, users, properties={
+            'addresses':relation(Address,
+                        backref='user',
+                                 cascade="all, delete-orphan")
+        })
+        mapper(Address, addresses)
+
+        u = User(id=7, name='fred', addresses=[
+            Address(id=1, email_address='fred1'),
+        ])
+        sess = create_session(autoflush=True, autocommit=False)
+        sess.add(u)
+        sess.commit()
+
+        sess.expunge_all()
+
+        u = User(id=7, name='fred', addresses=[
+            Address(id=1, email_address='fred1'),
+            Address(id=2, email_address='fred2'),
+        ])
+        sess.merge(u)
+        assert sess.autoflush
+        sess.commit()
+
+
+
+
 if __name__ == "__main__":
     testenv.main()
index ca308bb5bee1b7f295ad19e974f4e4ca75a61dc8..878fe931e36bfbfc3eb74cbf7eb508ee688a806c 100644 (file)
@@ -99,7 +99,36 @@ class PickleTest(_fixtures.FixtureTest):
         self.assertEquals(ad.email_address, 'ed@bar.com')
         self.assertEquals(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')]))
 
+    @testing.resolve_artifact_names
+    def test_options_with_descriptors(self):
+        mapper(User, users, properties={
+            'addresses':relation(Address, backref="user")
+        })
+        mapper(Address, addresses)
+        sess = create_session()
+        u1 = User(name='ed')
+        u1.addresses.append(Address(email_address='ed@bar.com'))
+        sess.add(u1)
+        sess.flush()
+        sess.expunge_all()
 
+        for opt in [
+            sa.orm.eagerload(User.addresses),
+            sa.orm.eagerload("addresses"),
+            sa.orm.defer("name"),
+            sa.orm.defer(User.name),
+            sa.orm.defer([User.name]),
+            sa.orm.eagerload("addresses", User.addresses),
+            sa.orm.eagerload(["addresses", User.addresses]),
+        ]:
+            opt2 = pickle.loads(pickle.dumps(opt))
+            self.assertEquals(opt.key, opt2.key)
+        
+        u1 = sess.query(User).options(opt).first()
+        
+        u2 = pickle.loads(pickle.dumps(u1))
+        
+        
 class PolymorphicDeferredTest(_base.MappedTest):
     def define_tables(self, metadata):
         Table('users', metadata,
index 0d893031131a6f056a9a962583d729edcfd32d0a..6531b234c6da8c8329752e2f5750d1d3dfd39e22 100644 (file)
@@ -2786,10 +2786,18 @@ class UpdateDeleteTest(_base.MappedTest):
               Column('id', Integer, primary_key=True),
               Column('name', String(32)),
               Column('age', Integer))
+
+        Table('documents', metadata,
+              Column('id', Integer, primary_key=True),
+              Column('user_id', None, ForeignKey('users.id')),
+              Column('title', String(32)))
     
     def setup_classes(self):
         class User(_base.ComparableEntity):
             pass
+
+        class Document(_base.ComparableEntity):
+            pass
     
     @testing.resolve_artifact_names
     def insert_data(self):
@@ -2799,10 +2807,21 @@ class UpdateDeleteTest(_base.MappedTest):
             dict(id=3, name='jill', age=29),
             dict(id=4, name='jane', age=37),
         ])
+
+    @testing.resolve_artifact_names
+    def insert_documents(self):
+        documents.insert().execute([
+            dict(id=1, user_id=1, title='foo'),
+            dict(id=2, user_id=1, title='bar'),
+            dict(id=3, user_id=2, title='baz'),
+        ])
     
     @testing.resolve_artifact_names
     def setup_mappers(self):
         mapper(User, users)
+        mapper(Document, documents, properties={
+            'user': relation(User, lazy=False, backref=backref('documents', lazy=True))
+        })
     
     @testing.resolve_artifact_names
     def test_delete(self):
@@ -2989,5 +3008,37 @@ class UpdateDeleteTest(_base.MappedTest):
         rowcount = sess.query(User).filter(User.age > 26).delete(synchronize_session=False)
         self.assertEquals(rowcount, 3)
 
+    @testing.resolve_artifact_names
+    def test_update_with_eager_relations(self):
+        self.insert_documents()
+
+        sess = create_session(bind=testing.db, autocommit=False)
+
+        foo,bar,baz = sess.query(Document).order_by(Document.id).all()
+        sess.query(Document).filter(Document.user_id == 1).update({'title': Document.title+Document.title}, synchronize_session='fetch')
+
+        eq_([foo.title, bar.title, baz.title], ['foofoo','barbar', 'baz'])
+        eq_(sess.query(Document.title).order_by(Document.id).all(), zip(['foofoo','barbar', 'baz']))
+
+    @testing.resolve_artifact_names
+    def test_update_with_explicit_eagerload(self):
+        sess = create_session(bind=testing.db, autocommit=False)
+
+        john,jack,jill,jane = sess.query(User).order_by(User.id).all()
+        sess.query(User).options(eagerload(User.documents)).filter(User.age > 29).update({'age': User.age - 10}, synchronize_session='fetch')
+
+        eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
+        eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
+
+    @testing.resolve_artifact_names
+    def test_delete_with_eager_relations(self):
+        self.insert_documents()
+
+        sess = create_session(bind=testing.db, autocommit=False)
+
+        sess.query(Document).filter(Document.user_id == 1).delete(synchronize_session=False)
+
+        eq_(sess.query(Document.title).all(), zip(['baz']))
+
 if __name__ == '__main__':
     testenv.main()
index 9c61617fc6c7508f41eb2041575a723f612108da..9ab9dc979580afaa3c6ad8c9d66e4449f7f4fce3 100644 (file)
@@ -275,6 +275,44 @@ class ExecuteTest(TestBase):
 
         assert x == y == z == w == q == r
 
+    def test_extract_bind(self):
+        """Basic common denominator execution tests for extract()"""
+
+        date = datetime.date(2010, 5, 1)
+
+        def execute(field):
+            return testing.db.execute(select([extract(field, date)])).scalar()
+
+        assert execute('year') == 2010
+        assert execute('month') == 5
+        assert execute('day') == 1
+
+        date = datetime.datetime(2010, 5, 1, 12, 11, 10)
+
+        assert execute('year') == 2010
+        assert execute('month') == 5
+        assert execute('day') == 1
+
+    def test_extract_expression(self):
+        meta = MetaData(testing.db)
+        table = Table('test', meta,
+                      Column('dt', DateTime),
+                      Column('d', Date))
+        meta.create_all()
+        try:
+            table.insert().execute(
+                {'dt': datetime.datetime(2010, 5, 1, 12, 11, 10),
+                 'd': datetime.date(2010, 5, 1) })
+            rs = select([extract('year', table.c.dt),
+                         extract('month', table.c.d)]).execute()
+            row = rs.fetchone()
+            assert row[0] == 2010
+            assert row[1] == 5
+            rs.close()
+        finally:
+            meta.drop_all()
+
+
 def exec_sorted(statement, *args, **kw):
     """Executes a statement and returns a sorted list plain tuple rows."""
 
index d7ef36bce28ea8b04410dbb75c1b68711c30d4f0..e99b953b9bfb591b0c7419035d47c8bcd7346ffe 100644 (file)
@@ -834,12 +834,6 @@ FROM mytable, myothertable WHERE foo.id = foofoo(lala) AND datetime(foo) = Today
              "SELECT values.id FROM values WHERE values.val1 / (values.val2 - values.val1) / values.val1 > :param_1"
          )
 
-    def test_extract(self):
-        """test the EXTRACT function"""
-        self.assert_compile(select([extract("month", table3.c.otherstuff)]), "SELECT extract(month FROM thirdtable.otherstuff) AS extract_1 FROM thirdtable")
-
-        self.assert_compile(select([extract("day", func.to_date("03/20/2005", "MM/DD/YYYY"))]), "SELECT extract(day FROM to_date(:to_date_1, :to_date_2)) AS extract_1")
-
     def test_collate(self):
         for expr in (select([table1.c.name.collate('latin1_german2_ci')]),
                      select([collate(table1.c.name, 'latin1_german2_ci')])):
index e11810f5f34d86e176ee04ecc0b8c60eb923514a..5ef2b73f15a2fc5441b778b3fc12d134a6fd6ee1 100644 (file)
@@ -708,7 +708,6 @@ class NumericTest(TestBase, AssertsExecutionResults):
             ncasdec=Decimal("12.4"), fcasdec=Decimal("15.75"))
 
         l = numeric_table.select().execute().fetchall()
-        print l
         rounded = [
             (l[0][0], l[0][1], round(l[0][2], 5), l[0][3], l[0][4]),
             (l[1][0], l[1][1], round(l[1][2], 5), l[1][3], l[1][4]),
@@ -802,11 +801,9 @@ class BooleanTest(TestBase, AssertsExecutionResults):
         bool_table.insert().execute(id=5, value=True)
 
         res = bool_table.select(bool_table.c.value==True).execute().fetchall()
-        print res
         assert(res==[(1, True),(3, True),(4, True),(5, True)])
 
         res2 = bool_table.select(bool_table.c.value==False).execute().fetchall()
-        print res2
         assert(res2==[(2, False)])
 
 class PickleTest(TestBase):
index 1c66e3e51b81976a950f0dd3a9d7f19f1d6ed766..b51f6d2e858a8b737be36a31f052bbf55ee10a3f 100644 (file)
@@ -59,6 +59,7 @@ def row_triggers(fn):
     return _chain_decorators_on(
         fn,
         # no access to same table
+        no_support('mysql', 'requires SUPER priv'),
         exclude('mysql', '<', (5, 0, 10), 'not supported by database'),
         no_support('postgres', 'not supported by database: no statements'),
         )