From 477ef854b021c20f2d269e6a1171ff4f003a3791 Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Sat, 2 May 2009 15:48:32 +0000 Subject: [PATCH] merged -r 5869:5936 of trunk, including dialect changes re: extract() --- CHANGES | 92 +++++++++++++++---- doc/build/Makefile | 2 +- doc/build/dbengine.rst | 9 +- doc/build/ormtutorial.rst | 30 +++--- doc/build/reference/ext/serializer.rst | 36 -------- examples/adjacencytree/basic_tree.py | 4 +- examples/association/basic_association.py | 19 ++-- examples/association/proxied_association.py | 7 +- examples/collections/large_collection.py | 14 +-- .../custom_attributes/custom_management.py | 12 ++- .../custom_attributes/listen_for_events.py | 6 +- examples/derived_attributes/attributes.py | 10 +- examples/dynamic_dict/dynamic_dict.py | 4 +- examples/elementtree/adjacency_list.py | 14 +-- examples/elementtree/optimized_al.py | 14 +-- examples/elementtree/pickle.py | 13 +-- examples/graphs/graph1.py | 8 +- examples/nested_sets/nested_sets.py | 6 +- examples/pickle/custom_pickler.py | 11 ++- examples/poly_assoc/poly_assoc.py | 11 ++- examples/poly_assoc/poly_assoc_fk.py | 10 +- examples/poly_assoc/poly_assoc_generic.py | 10 +- examples/polymorph/concrete.py | 12 +-- examples/polymorph/polymorph.py | 22 ++--- examples/polymorph/single.py | 20 ++-- examples/postgis/postgis.py | 14 +-- examples/sharding/attribute_shard.py | 19 ++-- examples/vertical/dictlike-polymorphic.py | 16 ++-- examples/vertical/dictlike.py | 13 +-- examples/vertical/vertical.py | 5 +- lib/sqlalchemy/dialects/access/base.py | 17 ++++ lib/sqlalchemy/dialects/mssql/base.py | 59 ++++++++++-- lib/sqlalchemy/dialects/mysql/base.py | 5 + lib/sqlalchemy/dialects/postgres/base.py | 6 ++ lib/sqlalchemy/dialects/sqlite/base.py | 24 ++++- lib/sqlalchemy/dialects/sybase/base.py | 11 +++ lib/sqlalchemy/ext/declarative.py | 6 ++ lib/sqlalchemy/orm/__init__.py | 5 - lib/sqlalchemy/orm/dynamic.py | 42 +++++---- lib/sqlalchemy/orm/evaluator.py | 2 +- lib/sqlalchemy/orm/interfaces.py | 21 +++++ lib/sqlalchemy/orm/properties.py | 10 +- lib/sqlalchemy/orm/query.py | 4 + lib/sqlalchemy/orm/session.py | 29 +++--- lib/sqlalchemy/orm/unitofwork.py | 6 +- lib/sqlalchemy/schema.py | 28 ++++-- lib/sqlalchemy/sql/compiler.py | 42 +++++++-- lib/sqlalchemy/sql/expression.py | 36 ++++++-- lib/sqlalchemy/sql/util.py | 1 + lib/sqlalchemy/util.py | 6 ++ test/base/utils.py | 14 ++- test/dialect/access.py | 29 +++++- test/dialect/mssql.py | 32 +++++++ test/dialect/mysql.py | 13 +++ test/dialect/postgres.py | 10 ++ test/dialect/sqlite.py | 31 ++++++- test/dialect/sybase.py | 26 +++++- test/ext/declarative.py | 25 ++++- test/orm/alltests.py | 3 + test/orm/cycles.py | 22 ++++- test/orm/dynamic.py | 79 ++++++++++++---- test/orm/eager_relations.py | 14 +++ test/orm/inheritance/polymorph2.py | 45 +++++++++ test/orm/merge.py | 29 ++++++ test/orm/pickled.py | 29 ++++++ test/orm/query.py | 51 ++++++++++ test/sql/functions.py | 38 ++++++++ test/sql/select.py | 6 -- test/sql/testtypes.py | 3 - test/testlib/requires.py | 1 + 70 files changed, 999 insertions(+), 324 deletions(-) diff --git a/CHANGES b/CHANGES index 8aa1709c6e..c3dc89916f 100644 --- a/CHANGES +++ b/CHANGES @@ -3,33 +3,91 @@ ======= CHANGES ======= + 0.5.4 ===== + - orm - - Fixed the "set collection" function on "dynamic" relations - to initiate events correctly. Previously a collection - could only be assigned to a pending parent instance, - otherwise modified events would not be fired correctly. - Set collection is now compatible with merge(), - fixes [ticket:1352]. - - - Lazy loader will not use get() if the "lazy load" - SQL clause matches the clause used by get(), but - contains some parameters hardcoded. Previously - the lazy strategy would fail with the get(). Ideally - get() would be used with the hardcoded parameters - but this would require further development. + - Modified query_cls on DynamicAttributeImpl to accept a full + mixin version of the AppenderQuery, which allows subclassing + the AppenderMixin. + + - Fixed the evaluator not being able to evaluate IS NULL clauses. + + - Fixed the "set collection" function on "dynamic" relations to + initiate events correctly. Previously a collection could only + be assigned to a pending parent instance, otherwise modified + events would not be fired correctly. Set collection is now + compatible with merge(), fixes [ticket:1352]. + + - Allowed pickling of PropertyOption objects constructed with + instrumented descriptors; previously, pickle errors would occur + when pickling an object which was loaded with a descriptor-based + option, such as query.options(eagerload(MyClass.foo)). + + - Lazy loader will not use get() if the "lazy load" SQL clause + matches the clause used by get(), but contains some parameters + hardcoded. Previously the lazy strategy would fail with the + get(). Ideally get() would be used with the hardcoded + parameters but this would require further development. [ticket:1357] + - Fixed another location where autoflush was interfering + with session.merge(). autoflush is disabled completely + for the duration of merge() now. [ticket:1360] + + - Fixed bug in relation(), introduced in 0.5.3, + whereby a self referential relation + from a base class to a joined-table subclass would + not configure correctly. + + - Fixed documentation for session weak_identity_map - + the default value is True, indicating a weak + referencing map in use. + + - Fixed a unit of work issue whereby the foreign + key attribute on an item contained within a collection + owned by an object being deleted would not be set to + None if the relation() was self-referential. [ticket:1376] + + - Fixed Query.update() and Query.delete() failures with eagerloaded + relations. [ticket:1378] + +- schema + - Added a quote_schema() method to the IdentifierPreparer class + so that dialects can override how schemas get handled. This + enables the MSSQL dialect to treat schemas as multipart + identifiers, such as 'database.owner'. [ticket: 594, 1341] + - sql - - Fixed __repr__() and other _get_colspec() methods on + - ``sqlalchemy.extract()`` is now dialect sensitive and can + extract components of timestamps idiomatically across the + supported databases, including SQLite. + + - Fixed __repr__() and other _get_colspec() methods on ForeignKey constructed from __clause_element__() style construct (i.e. declarative columns). [ticket:1353] - + - mssql + - Modified how savepoint logic works to prevent it from + stepping on non-savepoint oriented routines. Savepoint + support is still very experimental. + + - Added in reserved words for MSSQL that covers version 2008 + and all prior versions. [ticket:1310] + - Corrected problem with information schema not working with a - binary collation based database. Cleaned up information - schema since it is only used by mssql now. [ticket:1343] + binary collation based database. Cleaned up information schema + since it is only used by mssql now. [ticket:1343] + +- sqlite + - Corrected the float type so that it correctly maps to a + SLFloat type when being reflected. [ticket:1273] + +- extensions + + - Fixed adding of deferred or other column properties to a + declarative class. [ticket:1379] 0.5.3 ===== diff --git a/doc/build/Makefile b/doc/build/Makefile index 6dcd0321ee..f7ac2ca576 100644 --- a/doc/build/Makefile +++ b/doc/build/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -SPHINXBUILD = ./bin/sphinx-build +SPHINXBUILD = sphinx-build PAPER = # Internal variables. diff --git a/doc/build/dbengine.rst b/doc/build/dbengine.rst index 92e4d3063c..362526943d 100644 --- a/doc/build/dbengine.rst +++ b/doc/build/dbengine.rst @@ -92,8 +92,12 @@ Available drivernames are ``sqlite``, ``mysql``, ``postgres``, ``oracle``, ``mss # mysql mysql_db = create_engine('mysql://localhost/foo') + # oracle + oracle_db = create_engine('oracle://scott:tiger@host:port/dbname?key1=value1&key2=value2') + # oracle via TNS name - oracle_db = create_engine('oracle://scott:tiger@dsn') + oracle_db = create_engine('oracle://scott:tiger@tnsname') + oracle_db = create_engine('oracle://scott:tiger@tnsname/?key1=value1&key2=value2') # oracle will feed host/port/SID into cx_oracle.makedsn oracle_db = create_engine('oracle://scott:tiger@127.0.0.1:1521/sidname') @@ -102,7 +106,8 @@ Available drivernames are ``sqlite``, ``mysql``, ``postgres``, ``oracle``, ``mss mssql_db = create_engine('mssql://username:password@localhost/database') # mssql via a DSN connection - mssql_db = create_engine('mssql://username:password@/?dsn=mydsn') + mssql_db = create_engine('mssql://mydsn') + mssql_db = create_engine('mssql://username:password@mydsn') The :class:`~sqlalchemy.engine.base.Engine` will ask the connection pool for a connection when the ``connect()`` or ``execute()`` methods are called. The default connection pool, :class:`~sqlalchemy.pool.QueuePool`, as well as the default connection pool used with SQLite, :class:`~sqlalchemy.pool.SingletonThreadPool`, will open connections to the database on an as-needed basis. As concurrent statements are executed, :class:`~sqlalchemy.pool.QueuePool` will grow its pool of connections to a default size of five, and will allow a default "overflow" of ten. Since the ``Engine`` is essentially "home base" for the connection pool, it follows that you should keep a single :class:`~sqlalchemy.engine.base.Engine` per database established within an application, rather than creating a new one for each connection. diff --git a/doc/build/ormtutorial.rst b/doc/build/ormtutorial.rst index 1190f3c336..423c235d24 100644 --- a/doc/build/ormtutorial.rst +++ b/doc/build/ormtutorial.rst @@ -501,9 +501,10 @@ The ``all()``, ``one()``, and ``first()`` methods of ``Query`` immediately issue .. sourcecode:: python+sql - {sql}>>> try: #doctest: +NORMALIZE_WHITESPACE + {sql}>>> from sqlalchemy.orm.exc import MultipleResultsFound + >>> try: #doctest: +NORMALIZE_WHITESPACE ... user = query.one() - ... except Exception, e: + ... except MultipleResultsFound, e: ... print e SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password FROM users @@ -514,9 +515,10 @@ The ``all()``, ``one()``, and ``first()`` methods of ``Query`` immediately issue .. sourcecode:: python+sql - {sql}>>> try: #doctest: +NORMALIZE_WHITESPACE + {sql}>>> from sqlalchemy.orm.exc import NoResultFound + >>> try: #doctest: +NORMALIZE_WHITESPACE ... user = query.filter(User.id == 99).one() - ... except Exception, e: + ... except NoResultFound, e: ... print e SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, users.password AS users_password FROM users @@ -682,7 +684,7 @@ Querying for Jack, we get just Jack back. No SQL is yet issued for Jack's addre LIMIT 2 OFFSET 0 ['jack'] - >>> jack + {stop}>>> jack Let's look at the ``addresses`` collection. Watch the SQL: @@ -709,14 +711,14 @@ If you want to reduce the number of queries (dramatically, in many cases), we ca anon_1.users_fullname AS anon_1_users_fullname, anon_1.users_password AS anon_1_users_password, addresses_1.id AS addresses_1_id, addresses_1.email_address AS addresses_1_email_address, addresses_1.user_id AS addresses_1_user_id - FROM (SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, - users.password AS users_password - FROM users WHERE users.name = ? - LIMIT 2 OFFSET 0) AS anon_1 LEFT OUTER JOIN addresses AS addresses_1 - ON anon_1.users_id = addresses_1.user_id ORDER BY addresses_1.id - ['jack'] - - >>> jack + FROM (SELECT users.id AS users_id, users.name AS users_name, users.fullname AS users_fullname, + users.password AS users_password + FROM users WHERE users.name = ? + LIMIT 2 OFFSET 0) AS anon_1 LEFT OUTER JOIN addresses AS addresses_1 + ON anon_1.users_id = addresses_1.user_id ORDER BY addresses_1.id + ['jack'] + + {stop}>>> jack >>> jack.addresses @@ -1199,7 +1201,7 @@ Usage is not too different from what we've been doing. Let's give Wendy some bl WHERE users.name = ? LIMIT 2 OFFSET 0 ['wendy'] - + {stop} >>> post = BlogPost("Wendy's Blog Post", "This is a test", wendy) >>> session.add(post) diff --git a/doc/build/reference/ext/serializer.rst b/doc/build/reference/ext/serializer.rst index 184e57f1bd..759ba35ce6 100644 --- a/doc/build/reference/ext/serializer.rst +++ b/doc/build/reference/ext/serializer.rst @@ -3,42 +3,6 @@ serializer :author: Mike Bayer -Serializer/Deserializer objects for usage with SQLAlchemy structures. - -Any SQLAlchemy structure, including Tables, Columns, expressions, mappers, -Query objects etc. can be serialized in a minimally-sized format, -and deserialized when given a Metadata and optional ScopedSession object -to use as context on the way out. - -Usage is nearly the same as that of the standard Python pickle module: - -.. sourcecode:: python+sql - - from sqlalchemy.ext.serializer import loads, dumps - metadata = MetaData(bind=some_engine) - Session = scoped_session(sessionmaker()) - - # ... define mappers - - query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey) - - # pickle the query - serialized = dumps(query) - - # unpickle. Pass in metadata + scoped_session - query2 = loads(serialized, metadata, Session) - - print query2.all() - -Similar restrictions as when using raw pickle apply; mapped classes must be -themselves be pickleable, meaning they are importable from a module-level -namespace. - -Note that instances of user-defined classes do not require this extension -in order to be pickled; these contain no references to engines, sessions -or expression constructs in the typical case and can be serialized directly. -This module is specifically for ORM and expression constructs. - .. automodule:: sqlalchemy.ext.serializer :members: :undoc-members: diff --git a/examples/adjacencytree/basic_tree.py b/examples/adjacencytree/basic_tree.py index ac149d68a9..dec03e5504 100644 --- a/examples/adjacencytree/basic_tree.py +++ b/examples/adjacencytree/basic_tree.py @@ -72,7 +72,7 @@ print "Flushing:" print "----------------------------" session = create_session() -session.save(node) +session.add(node) session.flush() print "\n\n\n----------------------------" @@ -113,7 +113,7 @@ print "Clearing session, selecting " print "tree new where node_id=%d:" % nodeid print "----------------------------" -session.clear() +session.expunge_all() t = session.query(TreeNode).filter(TreeNode.id==nodeid)[0] print "\n\n\n----------------------------" diff --git a/examples/association/basic_association.py b/examples/association/basic_association.py index 8078a2bb9c..a56d2383e6 100644 --- a/examples/association/basic_association.py +++ b/examples/association/basic_association.py @@ -10,13 +10,14 @@ the price paid by the user for that particular order, which is independent of the price on each Item (since those can change). """ -import logging from datetime import datetime -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, + String, DateTime, Numeric, ForeignKey, and_) +from sqlalchemy.orm import mapper, relation, create_session # Uncomment these to watch database activity. +#import logging #logging.basicConfig(format='%(message)s') #logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) @@ -72,10 +73,10 @@ mapper(OrderItem, orderitems, properties={ session = create_session() # create our catalog -session.save(Item('SA T-Shirt', 10.99)) -session.save(Item('SA Mug', 6.50)) -session.save(Item('SA Hat', 8.99)) -session.save(Item('MySQL Crowbar', 16.99)) +session.add(Item('SA T-Shirt', 10.99)) +session.add(Item('SA Mug', 6.50)) +session.add(Item('SA Hat', 8.99)) +session.add(Item('MySQL Crowbar', 16.99)) session.flush() # function to return items from the DB @@ -89,10 +90,10 @@ order = Order('john smith') order.order_items.append(OrderItem(item('SA Mug'))) order.order_items.append(OrderItem(item('MySQL Crowbar'), 10.99)) order.order_items.append(OrderItem(item('SA Hat'))) -session.save(order) +session.add(order) session.flush() -session.clear() +session.expunge_all() # query the order, print items order = session.query(Order).filter_by(customer_name='john smith').one() diff --git a/examples/association/proxied_association.py b/examples/association/proxied_association.py index ed3afd597c..3a78e3adfb 100644 --- a/examples/association/proxied_association.py +++ b/examples/association/proxied_association.py @@ -2,8 +2,9 @@ the usage of the associationproxy extension.""" from datetime import datetime -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, + String, DateTime, Float, ForeignKey, and_) +from sqlalchemy.orm import mapper, relation, create_session from sqlalchemy.ext.associationproxy import AssociationProxy engine = create_engine('sqlite://') @@ -82,7 +83,7 @@ order.items.append(item('SA Hat')) session.add(order) session.flush() -session.clear() +session.expunge_all() # query the order, print items order = session.query(Order).filter_by(customer_name='john smith').one() diff --git a/examples/collections/large_collection.py b/examples/collections/large_collection.py index 203aa6d230..2451ea5826 100644 --- a/examples/collections/large_collection.py +++ b/examples/collections/large_collection.py @@ -4,8 +4,10 @@ Also see the docs regarding the new "dynamic" relation option, which presents a more refined version of some of these patterns. """ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey +from sqlalchemy.orm import (mapper, relation, create_session, MapperExtension, + object_session) + meta = MetaData('sqlite://') meta.bind.echo = True @@ -56,11 +58,11 @@ org.members.append(Member('member one')) org.members.append(Member('member two')) org.members.append(Member('member three')) -sess.save(org) +sess.add(org) print "-------------------------\nflush one - save org + 3 members" sess.flush() -sess.clear() +sess.expunge_all() # reload. load the org and some child members print "-------------------------\nload subset of members" @@ -68,7 +70,7 @@ org = sess.query(Organization).get(org.org_id) members = org.member_query.filter(member_table.c.name.like('%member t%')).all() print members -sess.clear() +sess.expunge_all() # reload. create some more members and flush, without loading any of the original members @@ -80,7 +82,7 @@ org.members.append(Member('member six')) print "-------------------------\nflush two - save 3 more members" sess.flush() -sess.clear() +sess.expunge_all() org = sess.query(Organization).get(org.org_id) # now delete. note that this will explictily delete members four, five and six because they are in the session, diff --git a/examples/custom_attributes/custom_management.py b/examples/custom_attributes/custom_management.py index 707e182ba0..b8ea70dfd5 100644 --- a/examples/custom_attributes/custom_management.py +++ b/examples/custom_attributes/custom_management.py @@ -9,8 +9,10 @@ of "instance.__dict__". Note that the default collection implementations can be with a custom attribute system as well. """ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, Text, + ForeignKey) +from sqlalchemy.orm import (mapper, relation, create_session, + InstrumentationManager) from sqlalchemy.orm.attributes import set_attribute, get_attribute, del_attribute, is_instrumented from sqlalchemy.orm.collections import collection_adapter @@ -170,10 +172,10 @@ if __name__ == '__main__': assert isinstance(a1.bs, MyCollection) sess = create_session() - sess.save(a1) + sess.add(a1) sess.flush() - sess.clear() + sess.expunge_all() a1 = sess.query(A).get(a1.id) @@ -184,7 +186,7 @@ if __name__ == '__main__': a1.bs.remove(a1.bs[0]) sess.flush() - sess.clear() + sess.expunge_all() a1 = sess.query(A).get(a1.id) assert len(a1.bs) == 1 diff --git a/examples/custom_attributes/listen_for_events.py b/examples/custom_attributes/listen_for_events.py index de28df5b3a..3264b02462 100644 --- a/examples/custom_attributes/listen_for_events.py +++ b/examples/custom_attributes/listen_for_events.py @@ -39,8 +39,8 @@ class AttributeListener(AttributeExtension): if __name__ == '__main__': - from sqlalchemy import * - from sqlalchemy.orm import * + from sqlalchemy import Column, Integer, String, ForeignKey + from sqlalchemy.orm import relation from sqlalchemy.ext.declarative import declarative_base class Base(object): @@ -82,4 +82,4 @@ if __name__ == '__main__': m1.related.mapped.append(MyMappedClass(data='m2')) del m1.data - \ No newline at end of file + diff --git a/examples/derived_attributes/attributes.py b/examples/derived_attributes/attributes.py index f53badc745..4a16189854 100644 --- a/examples/derived_attributes/attributes.py +++ b/examples/derived_attributes/attributes.py @@ -36,8 +36,8 @@ def hybrid_property(fget, fset=None, fdel=None): ### Example code -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer +from sqlalchemy.orm import mapper, create_session metadata = MetaData('sqlite://') metadata.bind.echo = True @@ -99,14 +99,14 @@ session = create_session() intervals = [Interval1(1,4), Interval1(3,15), Interval1(11,16)] for interval in intervals: - session.save(interval) - session.save(Interval2(interval.start, interval.length)) + session.add(interval) + session.add(Interval2(interval.start, interval.length)) session.flush() print "Clear the cache and do some queries" -session.clear() +session.expunge_all() for Interval in (Interval1, Interval2): print "Querying using interval class %s" % Interval.__name__ diff --git a/examples/dynamic_dict/dynamic_dict.py b/examples/dynamic_dict/dynamic_dict.py index b47a6d68f6..3846c29944 100644 --- a/examples/dynamic_dict/dynamic_dict.py +++ b/examples/dynamic_dict/dynamic_dict.py @@ -40,8 +40,8 @@ class MyProxyDict(object): self.collection.append(value) from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import create_engine, Column, Integer, String, ForeignKey +from sqlalchemy.orm import sessionmaker, dynamic_loader Base = declarative_base(engine=create_engine('sqlite://')) diff --git a/examples/elementtree/adjacency_list.py b/examples/elementtree/adjacency_list.py index 706cc88a07..58156dcb67 100644 --- a/examples/elementtree/adjacency_list.py +++ b/examples/elementtree/adjacency_list.py @@ -7,8 +7,9 @@ styles of persistence are identical, as is the structure of the main Document cl """ ################################# PART I - Imports/Coniguration ########################################### -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey, + Unicode, and_) +from sqlalchemy.orm import mapper, relation, create_session, lazyload import sys, os, StringIO, re @@ -22,8 +23,7 @@ logging.basicConfig() #logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG) -from elementtree import ElementTree -from elementtree.ElementTree import Element, SubElement +from xml.etree import ElementTree meta = MetaData() meta.bind = 'sqlite://' @@ -155,16 +155,16 @@ session = create_session() # get ElementTree documents for file in ('test.xml', 'test2.xml', 'test3.xml'): - filename = os.path.join(os.path.dirname(sys.argv[0]), file) + filename = os.path.join(os.path.dirname(__file__), file) doc = ElementTree.parse(filename) - session.save(Document(file, doc)) + session.add(Document(file, doc)) print "\nSaving three documents...", line session.flush() print "Done." # clear session (to illustrate a full load), restore -session.clear() +session.expunge_all() print "\nFull text of document 'text.xml':", line document = session.query(Document).filter_by(filename="test.xml").first() diff --git a/examples/elementtree/optimized_al.py b/examples/elementtree/optimized_al.py index 8b50f6805d..c03acee1d4 100644 --- a/examples/elementtree/optimized_al.py +++ b/examples/elementtree/optimized_al.py @@ -6,8 +6,9 @@ which joins on only three tables. """ ################################# PART I - Imports/Configuration ########################################### -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey, + Unicode, and_) +from sqlalchemy.orm import mapper, relation, create_session, lazyload import sys, os, StringIO, re @@ -21,8 +22,7 @@ logging.basicConfig() #logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG) -from elementtree import ElementTree -from elementtree.ElementTree import Element, SubElement +from xml.etree import ElementTree meta = MetaData() meta.bind = 'sqlite://' @@ -164,16 +164,16 @@ session = create_session() # get ElementTree documents for file in ('test.xml', 'test2.xml', 'test3.xml'): - filename = os.path.join(os.path.dirname(sys.argv[0]), file) + filename = os.path.join(os.path.dirname(__file__), file) doc = ElementTree.parse(filename) - session.save(Document(file, doc)) + session.add(Document(file, doc)) print "\nSaving three documents...", line session.flush() print "Done." # clear session (to illustrate a full load), restore -session.clear() +session.expunge_all() print "\nFull text of document 'text.xml':", line document = session.query(Document).filter_by(filename="test.xml").first() diff --git a/examples/elementtree/pickle.py b/examples/elementtree/pickle.py index e7cd86984e..220bb22953 100644 --- a/examples/elementtree/pickle.py +++ b/examples/elementtree/pickle.py @@ -6,8 +6,9 @@ structure in distinct rows using two additional mapped entities. Note that the styles of persistence are identical, as is the structure of the main Document class. """ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, String, + PickleType) +from sqlalchemy.orm import mapper, create_session import sys, os @@ -20,7 +21,7 @@ logging.basicConfig() # uncomment to show SQL statements and result sets #logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG) -from elementtree import ElementTree +from xml.etree import ElementTree engine = create_engine('sqlite://') meta = MetaData(engine) @@ -48,16 +49,16 @@ mapper(Document, documents) ###### time to test ! ######### # get ElementTree document -filename = os.path.join(os.path.dirname(sys.argv[0]), "test.xml") +filename = os.path.join(os.path.dirname(__file__), "test.xml") doc = ElementTree.parse(filename) # save to DB session = create_session() -session.save(Document("test.xml", doc)) +session.add(Document("test.xml", doc)) session.flush() # clear session (to illustrate a full load), restore -session.clear() +session.expunge_all() document = session.query(Document).filter_by(filename="test.xml").first() # print diff --git a/examples/graphs/graph1.py b/examples/graphs/graph1.py index 8188d7c870..6122e65f1f 100644 --- a/examples/graphs/graph1.py +++ b/examples/graphs/graph1.py @@ -1,7 +1,7 @@ """a directed graph example.""" -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, ForeignKey +from sqlalchemy.orm import mapper, relation, create_session import logging logging.basicConfig() @@ -70,10 +70,10 @@ n7.add_neighbor(n2) n1.add_neighbor(n3) n2.add_neighbor(n1) -[session.save(x) for x in [n1, n2, n3, n4, n5, n6, n7]] +[session.add(x) for x in [n1, n2, n3, n4, n5, n6, n7]] session.flush() -session.clear() +session.expunge_all() n2 = session.query(Node).get(2) n3 = session.query(Node).get(3) diff --git a/examples/nested_sets/nested_sets.py b/examples/nested_sets/nested_sets.py index e83a263e93..fbb4817590 100644 --- a/examples/nested_sets/nested_sets.py +++ b/examples/nested_sets/nested_sets.py @@ -4,9 +4,9 @@ http://www.intelligententerprise.com/001020/celko.jhtml """ -from sqlalchemy import * -from sqlalchemy.orm import * -from sqlalchemy.orm import attributes +from sqlalchemy import (create_engine, Column, Integer, String, select, case, + func) +from sqlalchemy.orm import sessionmaker, MapperExtension, aliased from sqlalchemy.ext.declarative import declarative_base engine = create_engine('sqlite://', echo=True) diff --git a/examples/pickle/custom_pickler.py b/examples/pickle/custom_pickler.py index 1c88c88e82..79a8b3fa39 100644 --- a/examples/pickle/custom_pickler.py +++ b/examples/pickle/custom_pickler.py @@ -1,7 +1,8 @@ """illustrates one way to use a custom pickler that is session-aware.""" -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, String, PickleType +from sqlalchemy.orm import (mapper, create_session, MapperExtension, + class_mapper, EXT_CONTINUE) from sqlalchemy.orm.session import object_session from cStringIO import StringIO from pickle import Pickler, Unpickler @@ -28,7 +29,7 @@ class MyPickler(object): if getattr(obj, "id", None) is None: sess = MyPickler.sessions.current newsess = create_session(bind=sess.connection(class_mapper(Bar))) - newsess.save(obj) + newsess.add(obj) newsess.flush() key = "%s:%s" % (type(obj).__name__, obj.id) return key @@ -74,9 +75,9 @@ mapper(Bar, bar_table) sess = create_session() f = Foo() f.bar = Bar('some bar') -sess.save(f) +sess.add(f) sess.flush() -sess.clear() +sess.expunge_all() del MyPickler.sessions.current diff --git a/examples/poly_assoc/poly_assoc.py b/examples/poly_assoc/poly_assoc.py index 3823f4b7f0..6e09935cb1 100644 --- a/examples/poly_assoc/poly_assoc.py +++ b/examples/poly_assoc/poly_assoc.py @@ -20,8 +20,9 @@ the associated target object from those which associate with it. """ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, String, and_ +from sqlalchemy.orm import (mapper, relation, create_session, class_mapper, + backref) metadata = MetaData('sqlite://') @@ -124,11 +125,11 @@ a3 = o1.create_address() a3.street = '444 park ave.' sess = create_session() -sess.save(u1) -sess.save(o1) +sess.add(u1) +sess.add(o1) sess.flush() -sess.clear() +sess.expunge_all() # query objects, get their addresses diff --git a/examples/poly_assoc/poly_assoc_fk.py b/examples/poly_assoc/poly_assoc_fk.py index 32600fcbf2..f5dda2e395 100644 --- a/examples/poly_assoc/poly_assoc_fk.py +++ b/examples/poly_assoc/poly_assoc_fk.py @@ -19,8 +19,8 @@ For a more genericized version of this example, see poly_assoc_generic.py. """ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey +from sqlalchemy.orm import mapper, relation, create_session, class_mapper metadata = MetaData('sqlite://') @@ -137,11 +137,11 @@ o1.address = Address() o1.address.street = '444 park ave.' sess = create_session() -sess.save(u1) -sess.save(o1) +sess.add(u1) +sess.add(o1) sess.flush() -sess.clear() +sess.expunge_all() # query objects, get their addresses diff --git a/examples/poly_assoc/poly_assoc_generic.py b/examples/poly_assoc/poly_assoc_generic.py index 8cc3edf609..233df51958 100644 --- a/examples/poly_assoc/poly_assoc_generic.py +++ b/examples/poly_assoc/poly_assoc_generic.py @@ -6,8 +6,8 @@ function "association" which creates a new polymorphic association "interface". """ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey +from sqlalchemy.orm import mapper, relation, create_session, class_mapper metadata = MetaData('sqlite://') @@ -133,11 +133,11 @@ o1.address = Address() o1.address.street = '444 park ave.' sess = create_session() -sess.save(u1) -sess.save(o1) +sess.add(u1) +sess.add(o1) sess.flush() -sess.clear() +sess.expunge_all() # query objects, get their addresses diff --git a/examples/polymorph/concrete.py b/examples/polymorph/concrete.py index 5f12e9a3d7..c50513b555 100644 --- a/examples/polymorph/concrete.py +++ b/examples/polymorph/concrete.py @@ -1,5 +1,5 @@ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String +from sqlalchemy.orm import mapper, create_session, polymorphic_union metadata = MetaData() @@ -56,11 +56,11 @@ m1 = Manager("pointy haired boss", "manager1") e1 = Engineer("wally", "engineer1") e2 = Engineer("dilbert", "engineer2") -session.save(m1) -session.save(e1) -session.save(e2) +session.add(m1) +session.add(e1) +session.add(e2) session.flush() -employees = session.query(Employee).select() +employees = session.query(Employee) print [e for e in employees] diff --git a/examples/polymorph/polymorph.py b/examples/polymorph/polymorph.py index 4f3aeb7d26..ea56ffed1f 100644 --- a/examples/polymorph/polymorph.py +++ b/examples/polymorph/polymorph.py @@ -1,5 +1,5 @@ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey +from sqlalchemy.orm import mapper, relation, create_session import sets # this example illustrates a polymorphic load of two classes @@ -58,12 +58,12 @@ class Company(object): person_join = people.outerjoin(engineers).outerjoin(managers) -person_mapper = mapper(Person, people, select_table=person_join,polymorphic_on=people.c.type, polymorphic_identity='person') +person_mapper = mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person') mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer') mapper(Manager, managers, inherits=person_mapper, polymorphic_identity='manager') mapper(Company, companies, properties={ - 'employees': relation(Person, lazy=False, private=True, backref='company') + 'employees': relation(Person, lazy=False, backref='company', cascade="all, delete-orphan") }) session = create_session(echo_uow=False) @@ -73,30 +73,30 @@ c.employees.append(Engineer(name='dilbert', status='BBA', engineer_name='enginee c.employees.append(Person(name='joesmith', status='HHH')) c.employees.append(Engineer(name='wally', status='CGG', engineer_name='engineer2', primary_language='python')) c.employees.append(Manager(name='jsmith', status='ABA', manager_name='manager2')) -session.save(c) +session.add(c) print session.new session.flush() -session.clear() +session.expunge_all() c = session.query(Company).get(1) for e in c.employees: - print e, e._instance_key, e.company + print e, e._sa_instance_state.key, e.company assert sets.Set([e.name for e in c.employees]) == sets.Set(['pointy haired boss', 'dilbert', 'joesmith', 'wally', 'jsmith']) print "\n" -dilbert = session.query(Person).get_by(name='dilbert') -dilbert2 = session.query(Engineer).get_by(name='dilbert') +dilbert = session.query(Person).filter_by(name='dilbert').one() +dilbert2 = session.query(Engineer).filter_by(name='dilbert').one() assert dilbert is dilbert2 dilbert.engineer_name = 'hes dibert!' session.flush() -session.clear() +session.expunge_all() c = session.query(Company).get(1) for e in c.employees: - print e, e._instance_key + print e, e._sa_instance_state.key session.delete(c) session.flush() diff --git a/examples/polymorph/single.py b/examples/polymorph/single.py index dfc4264169..5a4f5c8413 100644 --- a/examples/polymorph/single.py +++ b/examples/polymorph/single.py @@ -1,5 +1,5 @@ -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import MetaData, Table, Column, Integer, String, ForeignKey +from sqlalchemy.orm import mapper, relation, create_session metadata = MetaData('sqlite://') metadata.bind.echo = 'debug' @@ -48,7 +48,7 @@ engineer_mapper = mapper(Engineer, inherits=person_mapper, polymorphic_identity= mapper(Company, companies, properties={ - 'employees': relation(Person, lazy=True, private=True, backref='company') + 'employees': relation(Person, lazy=True, backref='company') }) session = create_session() @@ -58,29 +58,29 @@ c.employees.append(Engineer(name='dilbert', status='BBA', engineer_name='enginee c.employees.append(Person(name='joesmith', status='HHH')) c.employees.append(Engineer(name='wally', status='CGG', engineer_name='engineer2', primary_language='python')) c.employees.append(Manager(name='jsmith', status='ABA', manager_name='manager2')) -session.save(c) +session.add(c) session.flush() -session.clear() +session.expunge_all() c = session.query(Company).get(1) for e in c.employees: - print e, e._instance_key, e.company + print e, e._sa_instance_state.key, e.company print "\n" -dilbert = session.query(Person).get_by(name='dilbert') -dilbert2 = session.query(Engineer).get_by(name='dilbert') +dilbert = session.query(Person).filter_by(name='dilbert').one() +dilbert2 = session.query(Engineer).filter_by(name='dilbert').one() assert dilbert is dilbert2 dilbert.engineer_name = 'hes dibert!' session.flush() -session.clear() +session.expunge_all() c = session.query(Company).get(1) for e in c.employees: - print e, e._instance_key + print e, e._sa_instance_state.key session.delete(c) session.flush() diff --git a/examples/postgis/postgis.py b/examples/postgis/postgis.py index 802aa0ea90..c482d82560 100644 --- a/examples/postgis/postgis.py +++ b/examples/postgis/postgis.py @@ -28,11 +28,10 @@ Includes: * a standalone operator example. The implementation is limited to only public, well known -and simple to use extension points, with the exception -of one temporary monkeypatch in the DDL extension. -Future SQLAlchemy expansion points may allow more seamless -integration of some features. - +and simple to use extension points. Future SQLAlchemy +expansion points may allow more seamless integration of +some features. + """ from sqlalchemy.orm.interfaces import AttributeExtension @@ -227,8 +226,9 @@ def GISColumn(*args, **kw): # illustrate usage if __name__ == '__main__': - from sqlalchemy import * - from sqlalchemy.orm import * + from sqlalchemy import (create_engine, MetaData, Column, Integer, String, + func, literal, select) + from sqlalchemy.orm import sessionmaker, column_property from sqlalchemy.ext.declarative import declarative_base engine = create_engine('postgres://scott:tiger@localhost/gistest', echo=True) diff --git a/examples/sharding/attribute_shard.py b/examples/sharding/attribute_shard.py index 0a9e992a32..2f03a5a344 100644 --- a/examples/sharding/attribute_shard.py +++ b/examples/sharding/attribute_shard.py @@ -18,8 +18,9 @@ To set up a sharding system, you need: """ # step 1. imports -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, + String, ForeignKey, Float, DateTime) +from sqlalchemy.orm import sessionmaker, mapper, relation from sqlalchemy.orm.shard import ShardedSession from sqlalchemy.sql import operators from sqlalchemy import sql @@ -27,10 +28,10 @@ import datetime # step 2. databases echo = True -db1 = create_engine('sqlite:///shard1.db', echo=echo) -db2 = create_engine('sqlite:///shard2.db', echo=echo) -db3 = create_engine('sqlite:///shard3.db', echo=echo) -db4 = create_engine('sqlite:///shard4.db', echo=echo) +db1 = create_engine('sqlite://', echo=echo) +db2 = create_engine('sqlite://', echo=echo) +db3 = create_engine('sqlite://', echo=echo) +db4 = create_engine('sqlite://', echo=echo) # step 3. create session function. this binds the shard ids @@ -183,10 +184,10 @@ quito.reports.append(Report(85)) sess = create_session() for c in [tokyo, newyork, toronto, london, dublin, brasilia, quito]: - sess.save(c) + sess.add(c) sess.flush() -sess.clear() +sess.expunge_all() t = sess.query(WeatherLocation).get(tokyo.id) assert t.city == tokyo.city @@ -195,6 +196,6 @@ assert t.reports[0].temperature == 80.0 north_american_cities = sess.query(WeatherLocation).filter(WeatherLocation.continent == 'North America') assert [c.city for c in north_american_cities] == ['New York', 'Toronto'] -asia_and_europe = sess.query(WeatherLocation).filter(WeatherLocation.continent.in_('Europe', 'Asia')) +asia_and_europe = sess.query(WeatherLocation).filter(WeatherLocation.continent.in_(['Europe', 'Asia'])) assert set([c.city for c in asia_and_europe]) == set(['Tokyo', 'London', 'Dublin']) diff --git a/examples/vertical/dictlike-polymorphic.py b/examples/vertical/dictlike-polymorphic.py index 4065337c2e..6180fa3034 100644 --- a/examples/vertical/dictlike-polymorphic.py +++ b/examples/vertical/dictlike-polymorphic.py @@ -29,8 +29,8 @@ Note: Something much like 'comparable_property' is slated for inclusion in a future version of SQLAlchemy. """ -from sqlalchemy.orm.interfaces import PropComparator, MapperProperty -from sqlalchemy.orm import session as sessionlib, comparable_property +from sqlalchemy.orm.interfaces import PropComparator +from sqlalchemy.orm import comparable_property # Using the VerticalPropertyDictMixin from the base example from dictlike import VerticalPropertyDictMixin @@ -131,7 +131,9 @@ class PolymorphicVerticalProperty(object): if __name__ == '__main__': - from sqlalchemy import * + from sqlalchemy import (MetaData, Table, Column, Integer, Unicode, + ForeignKey, UnicodeText, and_, not_, or_, String, Boolean, cast, text, + null, case) from sqlalchemy.orm import mapper, relation, create_session from sqlalchemy.orm.collections import attribute_mapped_collection @@ -198,9 +200,9 @@ if __name__ == '__main__': stoat[u'cuteness'] = 7 stoat[u'weasel-like'] = True - session.save(stoat) + session.add(stoat) session.flush() - session.clear() + session.expunge_all() critter = session.query(Animal).filter(Animal.name == u'stoat').one() print critter[u'color'] @@ -217,14 +219,14 @@ if __name__ == '__main__': marten[u'cuteness'] = 5 marten[u'weasel-like'] = True marten[u'poisonous'] = False - session.save(marten) + session.add(marten) shrew = Animal(u'shrew') shrew[u'cuteness'] = 5 shrew[u'weasel-like'] = False shrew[u'poisonous'] = True - session.save(shrew) + session.add(shrew) session.flush() q = (session.query(Animal). diff --git a/examples/vertical/dictlike.py b/examples/vertical/dictlike.py index 5f478d7d05..1e1635ed76 100644 --- a/examples/vertical/dictlike.py +++ b/examples/vertical/dictlike.py @@ -122,7 +122,8 @@ class VerticalPropertyDictMixin(object): if __name__ == '__main__': - from sqlalchemy import * + from sqlalchemy import (MetaData, Table, Column, Integer, Unicode, + ForeignKey, UnicodeText, and_, not_) from sqlalchemy.orm import mapper, relation, create_session from sqlalchemy.orm.collections import attribute_mapped_collection @@ -183,9 +184,9 @@ if __name__ == '__main__': # stoat.facts collection: print stoat.facts[u'color'] - session.save(stoat) + session.add(stoat) session.flush() - session.clear() + session.expunge_all() critter = session.query(Animal).filter(Animal.name == u'stoat').one() print critter[u'color'] @@ -201,17 +202,17 @@ if __name__ == '__main__': marten = Animal(u'marten') marten[u'color'] = u'brown' marten[u'cuteness'] = u'somewhat' - session.save(marten) + session.add(marten) shrew = Animal(u'shrew') shrew[u'cuteness'] = u'somewhat' shrew[u'poisonous-part'] = u'saliva' - session.save(shrew) + session.add(shrew) loris = Animal(u'slow loris') loris[u'cuteness'] = u'fairly' loris[u'poisonous-part'] = u'elbows' - session.save(loris) + session.add(loris) session.flush() q = (session.query(Animal). diff --git a/examples/vertical/vertical.py b/examples/vertical/vertical.py index 6c3a61919e..4a8bf77871 100644 --- a/examples/vertical/vertical.py +++ b/examples/vertical/vertical.py @@ -2,8 +2,9 @@ represented in distinct database rows. This allows objects to be created with dynamically changing fields that are all persisted in a normalized fashion.""" -from sqlalchemy import * -from sqlalchemy.orm import * +from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, String, + ForeignKey, PickleType, DateTime, and_) +from sqlalchemy.orm import mapper, relation, sessionmaker, scoped_session from sqlalchemy.orm.collections import mapped_collection import datetime diff --git a/lib/sqlalchemy/dialects/access/base.py b/lib/sqlalchemy/dialects/access/base.py index de4af6bcb7..5b55eca97d 100644 --- a/lib/sqlalchemy/dialects/access/base.py +++ b/lib/sqlalchemy/dialects/access/base.py @@ -328,6 +328,20 @@ class AccessDialect(default.DefaultDialect): class AccessCompiler(compiler.SQLCompiler): + extract_map = compiler.SQLCompiler.extract_map.copy() + extract_map.update ({ + 'month': 'm', + 'day': 'd', + 'year': 'yyyy', + 'second': 's', + 'hour': 'h', + 'doy': 'y', + 'minute': 'n', + 'quarter': 'q', + 'dow': 'w', + 'week': 'ww' + }) + def visit_select_precolumns(self, select): """Access puts TOP, it's version of LIMIT here """ s = select.distinct and "DISTINCT " or "" @@ -375,6 +389,9 @@ class AccessCompiler(compiler.SQLCompiler): return (self.process(join.left, asfrom=True) + (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN ") + \ self.process(join.right, asfrom=True) + " ON " + self.process(join.onclause)) + def visit_extract(self, extract): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART("%s", %s)' % (field, self.process(extract.expr)) class AccessSchemaGenerator(compiler.SchemaGenerator): def get_column_specification(self, column, **kwargs): diff --git a/lib/sqlalchemy/dialects/mssql/base.py b/lib/sqlalchemy/dialects/mssql/base.py index cfaf614890..1ce0209bf3 100644 --- a/lib/sqlalchemy/dialects/mssql/base.py +++ b/lib/sqlalchemy/dialects/mssql/base.py @@ -241,7 +241,36 @@ MS_2008_VERSION = (10,) MS_2005_VERSION = (9,) MS_2000_VERSION = (8,) -MSSQL_RESERVED_WORDS = set(['function']) +RESERVED_WORDS = set( + ['add', 'all', 'alter', 'and', 'any', 'as', 'asc', 'authorization', + 'backup', 'begin', 'between', 'break', 'browse', 'bulk', 'by', 'cascade', + 'case', 'check', 'checkpoint', 'close', 'clustered', 'coalesce', + 'collate', 'column', 'commit', 'compute', 'constraint', 'contains', + 'containstable', 'continue', 'convert', 'create', 'cross', 'current', + 'current_date', 'current_time', 'current_timestamp', 'current_user', + 'cursor', 'database', 'dbcc', 'deallocate', 'declare', 'default', + 'delete', 'deny', 'desc', 'disk', 'distinct', 'distributed', 'double', + 'drop', 'dump', 'else', 'end', 'errlvl', 'escape', 'except', 'exec', + 'execute', 'exists', 'exit', 'external', 'fetch', 'file', 'fillfactor', + 'for', 'foreign', 'freetext', 'freetexttable', 'from', 'full', + 'function', 'goto', 'grant', 'group', 'having', 'holdlock', 'identity', + 'identity_insert', 'identitycol', 'if', 'in', 'index', 'inner', 'insert', + 'intersect', 'into', 'is', 'join', 'key', 'kill', 'left', 'like', + 'lineno', 'load', 'merge', 'national', 'nocheck', 'nonclustered', 'not', + 'null', 'nullif', 'of', 'off', 'offsets', 'on', 'open', 'opendatasource', + 'openquery', 'openrowset', 'openxml', 'option', 'or', 'order', 'outer', + 'over', 'percent', 'pivot', 'plan', 'precision', 'primary', 'print', + 'proc', 'procedure', 'public', 'raiserror', 'read', 'readtext', + 'reconfigure', 'references', 'replication', 'restore', 'restrict', + 'return', 'revert', 'revoke', 'right', 'rollback', 'rowcount', + 'rowguidcol', 'rule', 'save', 'schema', 'securityaudit', 'select', + 'session_user', 'set', 'setuser', 'shutdown', 'some', 'statistics', + 'system_user', 'table', 'tablesample', 'textsize', 'then', 'to', 'top', + 'tran', 'transaction', 'trigger', 'truncate', 'tsequal', 'union', + 'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values', + 'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with', + 'writetext', + ]) class MSNumeric(sqltypes.Numeric): @@ -859,6 +888,14 @@ class MSSQLCompiler(compiler.SQLCompiler): } ) + extract_map = compiler.SQLCompiler.extract_map.copy() + extract_map.update ({ + 'doy': 'dayofyear', + 'dow': 'weekday', + 'milliseconds': 'millisecond', + 'microseconds': 'microsecond' + }) + def __init__(self, *args, **kwargs): super(MSSQLCompiler, self).__init__(*args, **kwargs) self.tablealiases = {} @@ -927,9 +964,9 @@ class MSSQLCompiler(compiler.SQLCompiler): kwargs['mssql_aliased'] = True return super(MSSQLCompiler, self).visit_alias(alias, **kwargs) - def visit_savepoint(self, savepoint_stmt): - util.warn("Savepoint support in mssql is experimental and may lead to data loss.") - return "SAVE TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt) + def visit_extract(self, extract): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART("%s", %s)' % (field, self.process(extract.expr)) def visit_rollback_to_savepoint(self, savepoint_stmt): return "ROLLBACK TRANSACTION %s" % self.preparer.format_savepoint(savepoint_stmt) @@ -1051,7 +1088,7 @@ class MSDDLCompiler(compiler.DDLCompiler): class MSIdentifierPreparer(compiler.IdentifierPreparer): - reserved_words = compiler.IdentifierPreparer.reserved_words.union(MSSQL_RESERVED_WORDS) + reserved_words = RESERVED_WORDS def __init__(self, dialect): super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[', final_quote=']') @@ -1060,6 +1097,10 @@ class MSIdentifierPreparer(compiler.IdentifierPreparer): #TODO: determine MSSQL's escaping rules return value + def quote_schema(self, schema, force=True): + """Prepare a quoted table and schema name.""" + result = '.'.join([self.quote(x, force) for x in schema.split('.')]) + return result class MSDialect(default.DefaultDialect): name = 'mssql' @@ -1097,10 +1138,10 @@ class MSDialect(default.DefaultDialect): self.max_identifier_length super(MSDialect, self).__init__(**opts) - def do_begin(self, connection): - cursor = connection.cursor() - cursor.execute("SET IMPLICIT_TRANSACTIONS OFF") - cursor.execute("BEGIN TRANSACTION") + def do_savepoint(self, connection, name): + util.warn("Savepoint support in mssql is experimental and may lead to data loss.") + connection.execute("IF @@TRANCOUNT = 0 BEGIN TRANSACTION") + connection.execute("SAVE TRANSACTION %s" % name) def do_release_savepoint(self, connection, name): pass diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index eb4fb2d731..fd5ba7348e 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -1300,6 +1300,11 @@ class MySQLCompiler(compiler.SQLCompiler): "utc_timestamp":"UTC_TIMESTAMP" }) + extract_map = compiler.SQLCompiler.extract_map.copy() + extract_map.update ({ + 'milliseconds': 'millisecond', + }) + def visit_typeclause(self, typeclause): type_ = typeclause.type.dialect_impl(self.dialect) if isinstance(type_, MSInteger): diff --git a/lib/sqlalchemy/dialects/postgres/base.py b/lib/sqlalchemy/dialects/postgres/base.py index 448d3b90ae..3942e0d570 100644 --- a/lib/sqlalchemy/dialects/postgres/base.py +++ b/lib/sqlalchemy/dialects/postgres/base.py @@ -277,6 +277,11 @@ class PGCompiler(compiler.SQLCompiler): else: return text + def visit_extract(self, extract, **kwargs): + field = self.extract_map.get(extract.field, extract.field) + return "EXTRACT(%s FROM %s::timestamp)" % ( + field, self.process(extract.expr)) + class PGDDLCompiler(compiler.DDLCompiler): def get_column_specification(self, column, **kwargs): colspec = self.preparer.format_column(column) @@ -326,6 +331,7 @@ class PGDDLCompiler(compiler.DDLCompiler): text += " WHERE " + inlined_clause return text + class PGDefaultRunner(base.DefaultRunner): def __init__(self, context): base.DefaultRunner.__init__(self, context) diff --git a/lib/sqlalchemy/dialects/sqlite/base.py b/lib/sqlalchemy/dialects/sqlite/base.py index def9b6f899..0c7400c2bd 100644 --- a/lib/sqlalchemy/dialects/sqlite/base.py +++ b/lib/sqlalchemy/dialects/sqlite/base.py @@ -177,12 +177,34 @@ class SQLiteCompiler(compiler.SQLCompiler): } ) + extract_map = compiler.SQLCompiler.extract_map.copy() + extract_map.update({ + 'month': '%m', + 'day': '%d', + 'year': '%Y', + 'second': '%S', + 'hour': '%H', + 'doy': '%j', + 'minute': '%M', + 'epoch': '%s', + 'dow': '%w', + 'week': '%W' + }) + def visit_cast(self, cast, **kwargs): if self.dialect.supports_cast: return super(SQLiteCompiler, self).visit_cast(cast) else: return self.process(cast.clause) + def visit_extract(self, extract): + try: + return "CAST(STRFTIME('%s', %s) AS INTEGER)" % ( + self.extract_map[extract.field], self.process(extract.expr)) + except KeyError: + raise exc.ArgumentError( + "%s is not a valid extract argument." % extract.field) + def limit_clause(self, select): text = "" if select._limit is not None: @@ -227,7 +249,7 @@ class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): 'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive', 'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob', 'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index', - 'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is', + 'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is', 'isnull', 'join', 'key', 'left', 'like', 'limit', 'match', 'natural', 'not', 'notnull', 'null', 'of', 'offset', 'on', 'or', 'order', 'outer', 'plan', 'pragma', 'primary', 'query', 'raise', 'references', diff --git a/lib/sqlalchemy/dialects/sybase/base.py b/lib/sqlalchemy/dialects/sybase/base.py index 16b739ae96..4204530ae2 100644 --- a/lib/sqlalchemy/dialects/sybase/base.py +++ b/lib/sqlalchemy/dialects/sybase/base.py @@ -217,6 +217,13 @@ class SybaseSQLCompiler(compiler.SQLCompiler): sql_operators.mod: lambda x, y: "MOD(%s, %s)" % (x, y), }) + extract_map = compiler.SQLCompiler.extract_map.copy() + extract_map.update ({ + 'doy': 'dayofyear', + 'dow': 'weekday', + 'milliseconds': 'millisecond' + }) + def bindparam_string(self, name): res = super(SybaseSQLCompiler, self).bindparam_string(name) if name.lower().startswith('literal'): @@ -270,6 +277,10 @@ class SybaseSQLCompiler(compiler.SQLCompiler): res = "CAST(%s AS %s)" % (res, self.process(cast.typeclause)) return res + def visit_extract(self, extract): + field = self.extract_map.get(extract.field, extract.field) + return 'DATEPART("%s", %s)' % (field, self.process(extract.expr)) + def for_update_clause(self, select): # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which SQLAlchemy doesn't use return '' diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py index 084ec2699a..e22928b487 100644 --- a/lib/sqlalchemy/ext/declarative.py +++ b/lib/sqlalchemy/ext/declarative.py @@ -552,6 +552,12 @@ class DeclarativeMeta(type): _undefer_column_name(key, value) cls.__table__.append_column(value) cls.__mapper__.add_property(key, value) + elif isinstance(value, ColumnProperty): + for col in value.columns: + if isinstance(col, Column) and col.table is None: + _undefer_column_name(key, col) + cls.__table__.append_column(col) + cls.__mapper__.add_property(key, value) elif isinstance(value, MapperProperty): cls.__mapper__.add_property(key, _deferred_relation(cls, value)) else: diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index bf3e3cc864..8a1de2b507 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -614,11 +614,6 @@ def mapper(class_, local_table=None, *args, **params): alternative, see the method `populate_existing()` on :class:`~sqlalchemy.orm.query.Query`. - allow_column_override - If True, allows the usage of a ``relation()`` which has the - same name as a column in the mapped table. The table column - will no longer be mapped. - allow_null_pks Indicates that composite primary keys where one or more (but not all) columns contain NULL is a valid primary key. Primary keys which diff --git a/lib/sqlalchemy/orm/dynamic.py b/lib/sqlalchemy/orm/dynamic.py index 4bc3f58c2e..3d31a686a2 100644 --- a/lib/sqlalchemy/orm/dynamic.py +++ b/lib/sqlalchemy/orm/dynamic.py @@ -28,9 +28,9 @@ class DynaLoader(strategies.AbstractRelationLoader): strategies._register_attribute(self, mapper, useobject=True, - impl_class=DynamicAttributeImpl, - target_mapper=self.parent_property.mapper, - order_by=self.parent_property.order_by, + impl_class=DynamicAttributeImpl, + target_mapper=self.parent_property.mapper, + order_by=self.parent_property.order_by, query_class=self.parent_property.query_class ) @@ -43,13 +43,15 @@ class DynamicAttributeImpl(attributes.AttributeImpl): uses_objects = True accepts_scalar_loader = False - def __init__(self, class_, key, typecallable, + def __init__(self, class_, key, typecallable, target_mapper, order_by, query_class=None, **kwargs): super(DynamicAttributeImpl, self).__init__(class_, key, typecallable, **kwargs) self.target_mapper = target_mapper self.order_by = order_by if not query_class: self.query_class = AppenderQuery + elif AppenderMixin in query_class.mro(): + self.query_class = query_class else: self.query_class = mixin_user_query(query_class) @@ -87,7 +89,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl): ext.remove(state, value, initiator or self) def _modified_event(self, state): - + if self.key not in state.committed_state: state.committed_state[self.key] = CollectionHistory(self, state) @@ -108,7 +110,7 @@ class DynamicAttributeImpl(attributes.AttributeImpl): collection_history = self._modified_event(state) new_values = list(iterable) - + if _state_has_identity(state): old_collection = list(self.get(state)) else: @@ -128,31 +130,31 @@ class DynamicAttributeImpl(attributes.AttributeImpl): c = state.committed_state[self.key] else: c = CollectionHistory(self, state) - + if not passive: return CollectionHistory(self, state, apply_to=c) else: return c - + def append(self, state, value, initiator, passive=False): if initiator is not self: self.fire_append_event(state, value, initiator) - + def remove(self, state, value, initiator, passive=False): if initiator is not self: self.fire_remove_event(state, value, initiator) class DynCollectionAdapter(object): """the dynamic analogue to orm.collections.CollectionAdapter""" - + def __init__(self, attr, owner_state, data): self.attr = attr self.state = owner_state self.data = data - + def __iter__(self): return iter(self.data) - + def append_with_event(self, item, initiator=None): self.attr.append(self.state, item, initiator) @@ -161,10 +163,10 @@ class DynCollectionAdapter(object): def append_without_event(self, item): pass - + def remove_without_event(self, item): pass - + class AppenderMixin(object): query_class = None @@ -172,7 +174,7 @@ class AppenderMixin(object): Query.__init__(self, attr.target_mapper, None) self.instance = state.obj() self.attr = attr - + def __session(self): sess = object_session(self.instance) if sess is not None and self.autoflush and sess.autoflush and self.instance in sess: @@ -181,11 +183,11 @@ class AppenderMixin(object): return None else: return sess - + def session(self): return self.__session() session = property(session, lambda s, x:None) - + def __iter__(self): sess = self.__session() if sess is None: @@ -203,7 +205,7 @@ class AppenderMixin(object): passive=True).added_items.__getitem__(index) else: return self._clone(sess).__getitem__(index) - + def count(self): sess = self.__session() if sess is None: @@ -253,7 +255,7 @@ def mixin_user_query(cls): name = 'Appender' + cls.__name__ return type(name, (AppenderMixin, cls), {'query_class': cls}) -class CollectionHistory(object): +class CollectionHistory(object): """Overrides AttributeHistory to receive append/remove events directly.""" def __init__(self, attr, state, apply_to=None): @@ -268,4 +270,4 @@ class CollectionHistory(object): self.deleted_items = [] self.added_items = [] self.unchanged_items = [] - + diff --git a/lib/sqlalchemy/orm/evaluator.py b/lib/sqlalchemy/orm/evaluator.py index e788913531..05af5d8ca7 100644 --- a/lib/sqlalchemy/orm/evaluator.py +++ b/lib/sqlalchemy/orm/evaluator.py @@ -72,7 +72,7 @@ class EvaluatorCompiler(object): if operator is operators.is_: def evaluate(obj): return eval_left(obj) == eval_right(obj) - if operator is operators.isnot: + elif operator is operators.isnot: def evaluate(obj): return eval_left(obj) != eval_right(obj) elif operator in _straight_ops: diff --git a/lib/sqlalchemy/orm/interfaces.py b/lib/sqlalchemy/orm/interfaces.py index 3b7507def6..d36f51194e 100644 --- a/lib/sqlalchemy/orm/interfaces.py +++ b/lib/sqlalchemy/orm/interfaces.py @@ -682,6 +682,27 @@ class PropertyOption(MapperOption): else: return None + def __getstate__(self): + d = self.__dict__.copy() + d['key'] = ret = [] + for token in util.to_list(self.key): + if isinstance(token, PropComparator): + ret.append((token.mapper.class_, token.key)) + else: + ret.append(token) + return d + + def __setstate__(self, state): + ret = [] + for key in state['key']: + if isinstance(key, tuple): + cls, propkey = key + ret.append(getattr(cls, propkey)) + else: + ret.append(key) + state['key'] = tuple(ret) + self.__dict__ = state + def __get_paths(self, query, raiseerr): path = None entity = None diff --git a/lib/sqlalchemy/orm/properties.py b/lib/sqlalchemy/orm/properties.py index 8999e7d8ec..398cbe5d98 100644 --- a/lib/sqlalchemy/orm/properties.py +++ b/lib/sqlalchemy/orm/properties.py @@ -650,7 +650,7 @@ class RelationProperty(StrategizedProperty): dest_list = [] for current in instances: _recursive[(current, self)] = True - obj = session.merge(current, dont_load=dont_load, _recursive=_recursive) + obj = session._merge(current, dont_load=dont_load, _recursive=_recursive) if obj is not None: dest_list.append(obj) if dont_load: @@ -663,7 +663,7 @@ class RelationProperty(StrategizedProperty): current = instances[0] if current is not None: _recursive[(current, self)] = True - obj = session.merge(current, dont_load=dont_load, _recursive=_recursive) + obj = session._merge(current, dont_load=dont_load, _recursive=_recursive) if obj is not None: if dont_load: dest_state.dict[self.key] = obj @@ -1021,7 +1021,11 @@ class RelationProperty(StrategizedProperty): def _refers_to_parent_table(self): - return self.parent.mapped_table is self.target + for c, f in self.synchronize_pairs: + if c.table is f.table: + return True + else: + return False def _is_self_referential(self): return self.mapper.common_parent(self.parent) diff --git a/lib/sqlalchemy/orm/query.py b/lib/sqlalchemy/orm/query.py index 7336b65562..219eed311e 100644 --- a/lib/sqlalchemy/orm/query.py +++ b/lib/sqlalchemy/orm/query.py @@ -1549,6 +1549,8 @@ class Query(object): if synchronize_session not in [False, 'evaluate', 'fetch']: raise sa_exc.ArgumentError("Valid strategies for session synchronization are False, 'evaluate' and 'fetch'") + self = self.enable_eagerloads(False) + context = self._compile_context() if len(context.statement.froms) != 1 or not isinstance(context.statement.froms[0], schema.Table): raise sa_exc.ArgumentError("Only deletion via a single table query is currently supported") @@ -1646,6 +1648,8 @@ class Query(object): if synchronize_session not in [False, 'evaluate', 'fetch']: raise sa_exc.ArgumentError("Valid strategies for session synchronization are False, 'evaluate' and 'fetch'") + self = self.enable_eagerloads(False) + context = self._compile_context() if len(context.statement.froms) != 1 or not isinstance(context.statement.froms[0], schema.Table): raise sa_exc.ArgumentError("Only update via a single table query is currently supported") diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index d8af4e74f5..1e3a750d95 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -158,13 +158,13 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False, before each transaction is committed. weak_identity_map - When set to the default value of ``False``, a weak-referencing map is + When set to the default value of ``True``, a weak-referencing map is used; instances which are not externally referenced will be garbage collected immediately. For dereferenced instances which have pending changes present, the attribute management system will create a temporary strong-reference to the object which lasts until the changes are flushed to the database, at which point it's again dereferenced. Alternatively, - when using the value ``True``, the identity map uses a regular Python + when using the value ``False``, the identity map uses a regular Python dictionary to store instances. The session will maintain all instances present until they are removed using expunge(), clear(), or purge(). @@ -1141,8 +1141,7 @@ class Session(object): for state, m, o in cascade_states: self._delete_impl(state) - def merge(self, instance, dont_load=False, - _recursive=None): + def merge(self, instance, dont_load=False): """Copy the state an instance onto the persistent instance with the same identifier. If there is no persistent instance currently associated with the @@ -1155,13 +1154,18 @@ class Session(object): mapped with ``cascade="merge"``. """ - if _recursive is None: - # TODO: this should be an IdentityDict for instances, but will - # need a separate dict for PropertyLoader tuples - _recursive = {} - # Autoflush only on the topmost call - self._autoflush() - + # TODO: this should be an IdentityDict for instances, but will + # need a separate dict for PropertyLoader tuples + _recursive = {} + self._autoflush() + autoflush = self.autoflush + try: + self.autoflush = False + return self._merge(instance, dont_load=dont_load, _recursive=_recursive) + finally: + self.autoflush = autoflush + + def _merge(self, instance, dont_load=False, _recursive=None): mapper = _object_mapper(instance) if instance in _recursive: return _recursive[instance] @@ -1169,6 +1173,7 @@ class Session(object): new_instance = False state = attributes.instance_state(instance) key = state.key + if key is None: if dont_load: raise sa_exc.InvalidRequestError( @@ -1194,7 +1199,7 @@ class Session(object): self._update_impl(merged_state) new_instance = True else: - merged = self.query(mapper.class_).autoflush(False).get(key[1]) + merged = self.query(mapper.class_).get(key[1]) if merged is None: merged = mapper.class_manager.new_instance() diff --git a/lib/sqlalchemy/orm/unitofwork.py b/lib/sqlalchemy/orm/unitofwork.py index 8ad8898202..4ac9c765e0 100644 --- a/lib/sqlalchemy/orm/unitofwork.py +++ b/lib/sqlalchemy/orm/unitofwork.py @@ -720,6 +720,10 @@ class UOWExecutor(object): def execute_save_steps(self, trans, task): self.save_objects(trans, task) + for dep in task.polymorphic_cyclical_dependencies: + self.execute_dependency(trans, dep, False) + for dep in task.polymorphic_cyclical_dependencies: + self.execute_dependency(trans, dep, True) self.execute_cyclical_dependencies(trans, task, False) self.execute_dependencies(trans, task) @@ -735,7 +739,5 @@ class UOWExecutor(object): self.execute_dependency(trans, dep, True) def execute_cyclical_dependencies(self, trans, task, isdelete): - for dep in task.polymorphic_cyclical_dependencies: - self.execute_dependency(trans, dep, isdelete) for t in task.dependent_tasks: self.execute(trans, [t], isdelete) diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index a4e9aa5c4f..8c0418a5e2 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -881,17 +881,33 @@ class ForeignKey(SchemaItem): raise exc.ArgumentError( "Parent column '%s' does not descend from a " "table-attached Column" % str(self.parent)) - m = re.match(r"^(.+?)(?:\.(.+?))?(?:\.(.+?))?$", self._colspec, - re.UNICODE) + + m = self._colspec.split('.') + if m is None: raise exc.ArgumentError( "Invalid foreign key column specification: %s" % self._colspec) - if m.group(3) is None: - (tname, colname) = m.group(1, 2) - schema = None + + # A FK between column 'bar' and table 'foo' can be + # specified as 'foo', 'foo.bar', 'dbo.foo.bar', + # 'otherdb.dbo.foo.bar'. Once we have the column name and + # the table name, treat everything else as the schema + # name. Some databases (e.g. Sybase) support + # inter-database foreign keys. See tickets#1341 and -- + # indirectly related -- Ticket #594. This assumes that '.' + # will never appear *within* any component of the FK. + + (schema, tname, colname) = (None, None, None) + if (len(m) == 1): + tname = m.pop() else: - (schema, tname, colname) = m.group(1, 2, 3) + colname = m.pop() + tname = m.pop() + + if (len(m) > 0): + schema = '.'.join(m) + if _get_table_key(tname, schema) not in parenttable.metadata: raise exc.NoReferencedTableError( "Could not find table '%s' with which to generate a " diff --git a/lib/sqlalchemy/sql/compiler.py b/lib/sqlalchemy/sql/compiler.py index 787827e8be..a419de431e 100644 --- a/lib/sqlalchemy/sql/compiler.py +++ b/lib/sqlalchemy/sql/compiler.py @@ -110,6 +110,23 @@ FUNCTIONS = { functions.user: 'USER' } +EXTRACT_MAP = { + 'month': 'month', + 'day': 'day', + 'year': 'year', + 'second': 'second', + 'hour': 'hour', + 'doy': 'doy', + 'minute': 'minute', + 'quarter': 'quarter', + 'dow': 'dow', + 'week': 'week', + 'epoch': 'epoch', + 'milliseconds': 'milliseconds', + 'microseconds': 'microseconds', + 'timezone_hour': 'timezone_hour', + 'timezone_minute': 'timezone_minute' +} class _CompileLabel(visitors.Visitable): """lightweight label object which acts as an expression._Label.""" @@ -135,6 +152,7 @@ class SQLCompiler(engine.Compiled): operators = OPERATORS functions = FUNCTIONS + extract_map = EXTRACT_MAP # class-level defaults which can be set at the instance # level to define if this Compiled instance represents @@ -273,7 +291,7 @@ class SQLCompiler(engine.Compiled): return name else: if column.table.schema: - schema_prefix = self.preparer.quote(column.table.schema, column.table.quote_schema) + '.' + schema_prefix = self.preparer.quote_schema(column.table.schema, column.table.quote_schema) + '.' else: schema_prefix = '' tablename = column.table.name @@ -345,6 +363,10 @@ class SQLCompiler(engine.Compiled): def visit_cast(self, cast, **kwargs): return "CAST(%s AS %s)" % (self.process(cast.clause), self.process(cast.typeclause)) + def visit_extract(self, extract, **kwargs): + field = self.extract_map.get(extract.field, extract.field) + return "EXTRACT(%s FROM %s)" % (field, self.process(extract.expr)) + def visit_function(self, func, result_map=None, **kwargs): if result_map is not None: result_map[func.name.lower()] = (func.name, None, func.type) @@ -589,7 +611,7 @@ class SQLCompiler(engine.Compiled): def visit_table(self, table, asfrom=False, **kwargs): if asfrom: if getattr(table, "schema", None): - return self.preparer.quote(table.schema, table.quote_schema) + "." + self.preparer.quote(table.name, table.quote) + return self.preparer.quote_schema(table.schema, table.quote_schema) + "." + self.preparer.quote(table.name, table.quote) else: return self.preparer.quote(table.name, table.quote) else: @@ -1118,7 +1140,15 @@ class IdentifierPreparer(object): or self.illegal_initial_characters.match(value[0]) or not self.legal_characters.match(unicode(value)) or (lc_value != value)) - + + def quote_schema(self, schema, force): + """Quote a schema. + + Subclasses should override this to provide database-dependent + quoting behavior. + """ + return self.quote(schema, force) + def quote(self, ident, force): if force is None: if ident in self._strings: @@ -1137,7 +1167,7 @@ class IdentifierPreparer(object): def format_sequence(self, sequence, use_schema=True): name = self.quote(sequence.name, sequence.quote) if not self.omit_schema and use_schema and sequence.schema is not None: - name = self.quote(sequence.schema, sequence.quote) + "." + name + name = self.quote_schema(sequence.schema, sequence.quote) + "." + name return name def format_label(self, label, name=None): @@ -1159,7 +1189,7 @@ class IdentifierPreparer(object): name = table.name result = self.quote(name, table.quote) if not self.omit_schema and use_schema and getattr(table, "schema", None): - result = self.quote(table.schema, table.quote_schema) + "." + result + result = self.quote_schema(table.schema, table.quote_schema) + "." + result return result def format_column(self, column, use_table=False, name=None, table_name=None): @@ -1187,7 +1217,7 @@ class IdentifierPreparer(object): # a longer sequence. if not self.omit_schema and use_schema and getattr(table, 'schema', None): - return (self.quote(table.schema, table.quote_schema), + return (self.quote_schema(table.schema, table.quote_schema), self.format_table(table, use_schema=False)) else: return (self.format_table(table, use_schema=False), ) diff --git a/lib/sqlalchemy/sql/expression.py b/lib/sqlalchemy/sql/expression.py index 5e565cfe90..4ecc7a0678 100644 --- a/lib/sqlalchemy/sql/expression.py +++ b/lib/sqlalchemy/sql/expression.py @@ -483,8 +483,7 @@ def cast(clause, totype, **kwargs): def extract(field, expr): """Return the clause ``extract(field FROM expr)``.""" - expr = _BinaryExpression(text(field), expr, operators.from_) - return func.extract(expr) + return _Extract(field, expr) def collate(expression, collation): """Return the clause ``expression COLLATE collation``.""" @@ -2226,13 +2225,17 @@ class _Case(ColumnElement): whenlist = [(_literal_as_binds(c).self_group(), _literal_as_binds(r)) for (c, r) in whens] else: whenlist = [(_no_literals(c).self_group(), _literal_as_binds(r)) for (c, r) in whens] - + if whenlist: type_ = list(whenlist[-1])[-1].type else: type_ = None - - self.value = value + + if value is None: + self.value = None + else: + self.value = _literal_as_binds(value) + self.type = type_ self.whens = whenlist if else_ is not None: @@ -2258,7 +2261,7 @@ class _Case(ColumnElement): @property def _from_objects(self): - return itertools.chain(*[x._from_objects for x in self.get_children()]) + return list(itertools.chain(*[x._from_objects for x in self.get_children()])) class Function(ColumnElement, FromClause): """Describe a SQL function.""" @@ -2334,6 +2337,27 @@ class _Cast(ColumnElement): return self.clause._from_objects +class _Extract(ColumnElement): + + __visit_name__ = 'extract' + + def __init__(self, field, expr, **kwargs): + self.type = sqltypes.Integer() + self.field = field + self.expr = _literal_as_binds(expr, None) + + def _copy_internals(self, clone=_clone): + self.field = clone(self.field) + self.expr = clone(self.expr) + + def get_children(self, **kwargs): + return self.field, self.expr + + @property + def _from_objects(self): + return self.expr._from_objects + + class _UnaryExpression(ColumnElement): __visit_name__ = 'unary' diff --git a/lib/sqlalchemy/sql/util.py b/lib/sqlalchemy/sql/util.py index a8de5c6352..36357faf50 100644 --- a/lib/sqlalchemy/sql/util.py +++ b/lib/sqlalchemy/sql/util.py @@ -45,6 +45,7 @@ def find_join_source(clauses, join_to): return i, f else: return None, None + def find_tables(clause, check_columns=False, include_aliases=False, include_joins=False, include_selects=False): """locate Table objects within the given expression.""" diff --git a/lib/sqlalchemy/util.py b/lib/sqlalchemy/util.py index 8ea8c36dd8..8c2847396e 100644 --- a/lib/sqlalchemy/util.py +++ b/lib/sqlalchemy/util.py @@ -715,6 +715,12 @@ class OrderedDict(dict): self._list = [] dict.clear(self) + def copy(self): + return self.__copy__() + + def __copy__(self): + return OrderedDict(self) + def sort(self, *arg, **kw): self._list.sort(*arg, **kw) diff --git a/test/base/utils.py b/test/base/utils.py index fa3aff4807..1721f621d3 100644 --- a/test/base/utils.py +++ b/test/base/utils.py @@ -1,5 +1,5 @@ import testenv; testenv.configure_for_tests() -import threading, unittest +import copy, threading, unittest from sqlalchemy import util, sql, exc from testlib import TestBase from testlib.compat import gc_collect @@ -56,6 +56,18 @@ class OrderedDictTest(TestBase): o = util.OrderedDict([('name', 'jbe'), ('fullname', 'jonathan'), ('password', '')]) eq_(o.keys(), ['name', 'fullname', 'password']) + def test_odict_copy(self): + o = util.OrderedDict() + o["zzz"] = 1 + o["aaa"] = 2 + eq_(o.keys(), ['zzz', 'aaa']) + + o2 = o.copy() + eq_(o2.keys(), o.keys()) + + o3 = copy.copy(o) + eq_(o3.keys(), o.keys()) + class OrderedSetTest(TestBase): def test_mutators_against_iter(self): # testing a set modified against an iterator diff --git a/test/dialect/access.py b/test/dialect/access.py index 311231947e..57af45a9d6 100644 --- a/test/dialect/access.py +++ b/test/dialect/access.py @@ -1,14 +1,33 @@ import testenv; testenv.configure_for_tests() from sqlalchemy import * +from sqlalchemy import sql from sqlalchemy.databases import access from testlib import * -class BasicTest(TestBase, AssertsExecutionResults): - # A simple import of the database/ module should work on all systems. - def test_import(self): - # we got this far, right? - return True +class CompileTest(TestBase, AssertsCompiledSQL): + __dialect__ = access.dialect() + + def test_extract(self): + t = sql.table('t', sql.column('col1')) + + mapping = { + 'month': 'm', + 'day': 'd', + 'year': 'yyyy', + 'second': 's', + 'hour': 'h', + 'doy': 'y', + 'minute': 'n', + 'quarter': 'q', + 'dow': 'w', + 'week': 'ww' + } + + for field, subst in mapping.items(): + self.assert_compile( + select([extract(field, t.c.col1)]), + 'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst) if __name__ == "__main__": diff --git a/test/dialect/mssql.py b/test/dialect/mssql.py index ecaf7b5a6d..e62582ddc4 100755 --- a/test/dialect/mssql.py +++ b/test/dialect/mssql.py @@ -89,6 +89,30 @@ class CompileTest(TestBase, AssertsCompiledSQL): s = select([tbl.c.id]).where(tbl.c.id==1) self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM paj.test WHERE paj.test.id IN (SELECT test_1.id FROM paj.test AS test_1 WHERE test_1.id = :id_1)") + def test_delete_schema_multipart(self): + metadata = MetaData() + tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana.paj') + self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM banana.paj.test WHERE banana.paj.test.id = :id_1") + + s = select([tbl.c.id]).where(tbl.c.id==1) + self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM banana.paj.test WHERE banana.paj.test.id IN (SELECT test_1.id FROM banana.paj.test AS test_1 WHERE test_1.id = :id_1)") + + def test_delete_schema_multipart_needs_quoting(self): + metadata = MetaData() + tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana split.paj') + self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM [banana split].paj.test WHERE [banana split].paj.test.id = :id_1") + + s = select([tbl.c.id]).where(tbl.c.id==1) + self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM [banana split].paj.test WHERE [banana split].paj.test.id IN (SELECT test_1.id FROM [banana split].paj.test AS test_1 WHERE test_1.id = :id_1)") + + def test_delete_schema_multipart_both_need_quoting(self): + metadata = MetaData() + tbl = Table('test', metadata, Column('id', Integer, primary_key=True), schema='banana split.paj with a space') + self.assert_compile(tbl.delete(tbl.c.id == 1), "DELETE FROM [banana split].[paj with a space].test WHERE [banana split].[paj with a space].test.id = :id_1") + + s = select([tbl.c.id]).where(tbl.c.id==1) + self.assert_compile(tbl.delete().where(tbl.c.id==(s)), "DELETE FROM [banana split].[paj with a space].test WHERE [banana split].[paj with a space].test.id IN (SELECT test_1.id FROM [banana split].[paj with a space].test AS test_1 WHERE test_1.id = :id_1)") + def test_union(self): t1 = table('t1', column('col1'), @@ -126,6 +150,14 @@ class CompileTest(TestBase, AssertsCompiledSQL): self.assert_compile(func.current_date(), "GETDATE()") self.assert_compile(func.length(3), "LEN(:length_1)") + def test_extract(self): + t = table('t', column('col1')) + + for field in 'day', 'month', 'year': + self.assert_compile( + select([extract(field, t.c.col1)]), + 'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % field) + class IdentityInsertTest(TestBase, AssertsCompiledSQL): __only_on__ = 'mssql' diff --git a/test/dialect/mysql.py b/test/dialect/mysql.py index acd1c5a43b..de4480a4f7 100644 --- a/test/dialect/mysql.py +++ b/test/dialect/mysql.py @@ -982,6 +982,19 @@ class SQLTest(TestBase, AssertsCompiledSQL): for type_, expected in specs: self.assert_compile(cast(t.c.col, type_), expected) + def test_extract(self): + t = sql.table('t', sql.column('col1')) + + for field in 'year', 'month', 'day': + self.assert_compile( + select([extract(field, t.c.col1)]), + "SELECT EXTRACT(%s FROM t.col1) AS anon_1 FROM t" % field) + + # millsecondS to millisecond + self.assert_compile( + select([extract('milliseconds', t.c.col1)]), + "SELECT EXTRACT(millisecond FROM t.col1) AS anon_1 FROM t") + class RawReflectionTest(TestBase): def setUp(self): diff --git a/test/dialect/postgres.py b/test/dialect/postgres.py index 997c3fcb5e..fe72d474f2 100644 --- a/test/dialect/postgres.py +++ b/test/dialect/postgres.py @@ -22,6 +22,8 @@ class SequenceTest(TestBase, AssertsCompiledSQL): assert dialect.identifier_preparer.format_sequence(seq) == '"Some_Schema"."My_Seq"' class CompileTest(TestBase, AssertsCompiledSQL): + __dialect__ = postgres.dialect() + def test_update_returning(self): dialect = postgres.dialect() table1 = table('mytable', @@ -65,6 +67,14 @@ class CompileTest(TestBase, AssertsCompiledSQL): self.assert_compile(schema.CreateIndex(idx), "CREATE INDEX test_idx1 ON testtbl (data) WHERE testtbl.data > 5 AND testtbl.data < 10", dialect=postgres.dialect()) + def test_extract(self): + t = table('t', column('col1')) + + for field in 'year', 'month', 'day': + self.assert_compile( + select([extract(field, t.c.col1)]), + "SELECT EXTRACT(%s FROM t.col1::timestamp) AS anon_1 " + "FROM t" % field) class ReturningTest(TestBase, AssertsExecutionResults): __only_on__ = 'postgres' diff --git a/test/dialect/sqlite.py b/test/dialect/sqlite.py index 35f6cbba72..f114619497 100644 --- a/test/dialect/sqlite.py +++ b/test/dialect/sqlite.py @@ -3,7 +3,7 @@ import testenv; testenv.configure_for_tests() import datetime from sqlalchemy import * -from sqlalchemy import exc +from sqlalchemy import exc, sql from sqlalchemy.dialects.sqlite import base as sqlite, pysqlite as pysqlite_dialect from testlib import * @@ -285,6 +285,35 @@ class DialectTest(TestBase, AssertsExecutionResults): isolation_level="FOO") +class SQLTest(TestBase, AssertsCompiledSQL): + """Tests SQLite-dialect specific compilation.""" + + __dialect__ = sqlite.dialect() + + + def test_extract(self): + t = sql.table('t', sql.column('col1')) + + mapping = { + 'month': '%m', + 'day': '%d', + 'year': '%Y', + 'second': '%S', + 'hour': '%H', + 'doy': '%j', + 'minute': '%M', + 'epoch': '%s', + 'dow': '%w', + 'week': '%W', + } + + for field, subst in mapping.items(): + self.assert_compile( + select([extract(field, t.c.col1)]), + "SELECT CAST(STRFTIME('%s', t.col1) AS INTEGER) AS anon_1 " + "FROM t" % subst) + + class InsertTest(TestBase, AssertsExecutionResults): """Tests inserts and autoincrement.""" diff --git a/test/dialect/sybase.py b/test/dialect/sybase.py index 19cca465bd..32b9904d8a 100644 --- a/test/dialect/sybase.py +++ b/test/dialect/sybase.py @@ -1,14 +1,30 @@ import testenv; testenv.configure_for_tests() from sqlalchemy import * +from sqlalchemy import sql from sqlalchemy.databases import sybase from testlib import * -class BasicTest(TestBase, AssertsExecutionResults): - # A simple import of the database/ module should work on all systems. - def test_import(self): - # we got this far, right? - return True +class CompileTest(TestBase, AssertsCompiledSQL): + __dialect__ = sybase.dialect() + + def test_extract(self): + t = sql.table('t', sql.column('col1')) + + mapping = { + 'day': 'day', + 'doy': 'dayofyear', + 'dow': 'weekday', + 'milliseconds': 'millisecond', + 'millisecond': 'millisecond', + 'year': 'year', + } + + for field, subst in mapping.items(): + self.assert_compile( + select([extract(field, t.c.col1)]), + 'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst) + if __name__ == "__main__": diff --git a/test/ext/declarative.py b/test/ext/declarative.py index c6d4fe6814..0c835641fb 100644 --- a/test/ext/declarative.py +++ b/test/ext/declarative.py @@ -4,7 +4,7 @@ from sqlalchemy.ext import declarative as decl from sqlalchemy import exc from testlib import sa, testing from testlib.sa import MetaData, Table, Column, Integer, String, ForeignKey, ForeignKeyConstraint, asc, Index -from testlib.sa.orm import relation, create_session, class_mapper, eagerload, compile_mappers, backref, clear_mappers, polymorphic_union +from testlib.sa.orm import relation, create_session, class_mapper, eagerload, compile_mappers, backref, clear_mappers, polymorphic_union, deferred from testlib.testing import eq_ from orm._base import ComparableEntity, MappedTest @@ -1082,6 +1082,29 @@ class DeclarativeInheritanceTest(DeclarativeTestBase): Engineer(name="vlad", primary_language="cobol") ) + def test_add_deferred(self): + class Person(Base, ComparableEntity): + __tablename__ = 'people' + id = Column('id', Integer, primary_key=True) + + Person.name = deferred(Column(String(10))) + + Base.metadata.create_all() + sess = create_session() + p = Person(name='ratbert') + + sess.add(p) + sess.flush() + sess.expunge_all() + eq_( + sess.query(Person).all(), + [ + Person(name='ratbert') + ] + ) + person = sess.query(Person).filter(Person.name == 'ratbert').one() + assert 'name' not in person.__dict__ + def test_single_fksonsub(self): """test single inheritance with a foreign key-holding column on a subclass. diff --git a/test/orm/alltests.py b/test/orm/alltests.py index f628460c11..d0c9d8b045 100644 --- a/test/orm/alltests.py +++ b/test/orm/alltests.py @@ -22,6 +22,7 @@ def suite(): 'orm.assorted_eager', 'orm.naturalpks', + 'orm.defaults', 'orm.unitofwork', 'orm.session', 'orm.transaction', @@ -40,6 +41,8 @@ def suite(): 'orm.onetoone', 'orm.dynamic', + 'orm.evaluator', + 'orm.deprecations', ) alltests = unittest.TestSuite() diff --git a/test/orm/cycles.py b/test/orm/cycles.py index 958afadaf3..3e36360852 100644 --- a/test/orm/cycles.py +++ b/test/orm/cycles.py @@ -108,7 +108,27 @@ class SelfReferentialTest(_base.MappedTest): sess.delete(a) sess.flush() - + @testing.resolve_artifact_names + def test_setnull_ondelete(self): + mapper(C1, t1, properties={ + 'children':relation(C1) + }) + + sess = create_session() + c1 = C1() + c2 = C1() + c1.children.append(c2) + sess.add(c1) + sess.flush() + assert c2.parent_c1 == c1.c1 + + sess.delete(c1) + sess.flush() + assert c2.parent_c1 is None + + sess.expire_all() + assert c2.parent_c1 is None + class SelfReferentialNoPKTest(_base.MappedTest): """A self-referential relationship that joins on a column other than the primary key column""" diff --git a/test/orm/dynamic.py b/test/orm/dynamic.py index f975f762f8..3bd94b7c0e 100644 --- a/test/orm/dynamic.py +++ b/test/orm/dynamic.py @@ -4,6 +4,7 @@ from sqlalchemy.orm import dynamic_loader, backref from testlib import testing from testlib.sa import Table, Column, Integer, String, ForeignKey, desc, select, func from testlib.sa.orm import mapper, relation, create_session, Query, attributes +from sqlalchemy.orm.dynamic import AppenderMixin from testlib.testing import eq_ from testlib.compat import _function_named from orm import _base, _fixtures @@ -151,6 +152,44 @@ class DynamicTest(_fixtures.FixtureTest): assert not hasattr(q, 'append') assert type(q).__name__ == 'MyQuery' + @testing.resolve_artifact_names + def test_custom_query_with_custom_mixin(self): + class MyAppenderMixin(AppenderMixin): + def add(self, items): + if isinstance(items, list): + for item in items: + self.append(item) + else: + self.append(items) + + class MyQuery(Query): + pass + + class MyAppenderQuery(MyAppenderMixin, MyQuery): + query_class = MyQuery + + mapper(User, users, properties={ + 'addresses':dynamic_loader(mapper(Address, addresses), + query_class=MyAppenderQuery) + }) + sess = create_session() + u = User() + sess.add(u) + + col = u.addresses + assert isinstance(col, Query) + assert isinstance(col, MyQuery) + assert hasattr(col, 'append') + assert hasattr(col, 'add') + assert type(col).__name__ == 'MyAppenderQuery' + + q = col.limit(1) + assert isinstance(q, Query) + assert isinstance(q, MyQuery) + assert not hasattr(q, 'append') + assert not hasattr(q, 'add') + assert type(q).__name__ == 'MyQuery' + class SessionTest(_fixtures.FixtureTest): run_inserts = None @@ -165,7 +204,7 @@ class SessionTest(_fixtures.FixtureTest): a1 = Address(email_address='foo') sess.add_all([u1, a1]) sess.flush() - + assert testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)) == 0 u1 = sess.query(User).get(u1.id) u1.addresses.append(a1) @@ -174,11 +213,11 @@ class SessionTest(_fixtures.FixtureTest): assert testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall() == [ (a1.id, u1.id, 'foo') ] - + u1.addresses.remove(a1) sess.flush() assert testing.db.scalar(select([func.count(1)]).where(addresses.c.user_id!=None)) == 0 - + u1.addresses.append(a1) sess.flush() assert testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall() == [ @@ -192,7 +231,7 @@ class SessionTest(_fixtures.FixtureTest): assert testing.db.execute(select([addresses]).where(addresses.c.user_id!=None)).fetchall() == [ (a2.id, u1.id, 'bar') ] - + @testing.resolve_artifact_names def test_merge(self): @@ -204,30 +243,30 @@ class SessionTest(_fixtures.FixtureTest): a1 = Address(email_address='a1') a2 = Address(email_address='a2') a3 = Address(email_address='a3') - + u1.addresses.append(a2) u1.addresses.append(a3) - + sess.add_all([u1, a1]) sess.flush() - + u1 = User(id=u1.id, name='jack') u1.addresses.append(a1) u1.addresses.append(a3) u1 = sess.merge(u1) assert attributes.get_history(u1, 'addresses') == ( - [a1], - [a3], + [a1], + [a3], [a2] ) sess.flush() - + eq_( list(u1.addresses), [a1, a3] ) - + @testing.resolve_artifact_names def test_flush(self): mapper(User, users, properties={ @@ -240,10 +279,10 @@ class SessionTest(_fixtures.FixtureTest): u1.addresses.append(Address(email_address='lala@hoho.com')) sess.add_all((u1, u2)) sess.flush() - + from sqlalchemy.orm import attributes self.assertEquals(attributes.get_history(attributes.instance_state(u1), 'addresses'), ([], [Address(email_address='lala@hoho.com')], [])) - + sess.expunge_all() # test the test fixture a little bit @@ -254,18 +293,18 @@ class SessionTest(_fixtures.FixtureTest): User(name='jack', addresses=[Address(email_address='lala@hoho.com')]), User(name='ed', addresses=[Address(email_address='foo@bar.com')]) ] == sess.query(User).all() - + @testing.resolve_artifact_names def test_hasattr(self): mapper(User, users, properties={ 'addresses':dynamic_loader(mapper(Address, addresses)) }) u1 = User(name='jack') - + assert 'addresses' not in u1.__dict__.keys() u1.addresses = [Address(email_address='test')] assert 'addresses' in dir(u1) - + @testing.resolve_artifact_names def test_collection_set(self): mapper(User, users, properties={ @@ -277,7 +316,7 @@ class SessionTest(_fixtures.FixtureTest): a2 = Address(email_address='a2') a3 = Address(email_address='a3') a4 = Address(email_address='a4') - + sess.add(u1) u1.addresses = [a1, a3] assert list(u1.addresses) == [a1, a3] @@ -287,10 +326,10 @@ class SessionTest(_fixtures.FixtureTest): assert list(u1.addresses) == [a2, a3] u1.addresses = [] assert list(u1.addresses) == [] - - - + + + @testing.resolve_artifact_names def test_rollback(self): mapper(User, users, properties={ diff --git a/test/orm/eager_relations.py b/test/orm/eager_relations.py index ce81b0ba3a..5981223c83 100644 --- a/test/orm/eager_relations.py +++ b/test/orm/eager_relations.py @@ -892,6 +892,20 @@ class SelfReferentialEagerTest(_base.MappedTest): n1.children[1].append(Node(data='n123')) sess.add(n1) sess.flush() + sess.expunge_all() + def go(): + d = sess.query(Node).filter_by(data='n1').all()[0] + assert Node(data='n1', children=[ + Node(data='n11'), + Node(data='n12', children=[ + Node(data='n121'), + Node(data='n122'), + Node(data='n123') + ]), + Node(data='n13') + ]) == d + self.assert_sql_count(testing.db, go, 1) + sess.expunge_all() def go(): d = sess.query(Node).filter_by(data='n1').first() diff --git a/test/orm/inheritance/polymorph2.py b/test/orm/inheritance/polymorph2.py index c5b9b3fde6..569fdd52e3 100644 --- a/test/orm/inheritance/polymorph2.py +++ b/test/orm/inheritance/polymorph2.py @@ -8,6 +8,8 @@ from sqlalchemy import util from sqlalchemy.orm import * from testlib import * from testlib import fixtures +from orm import _base +from testlib.testing import eq_ class AttrSettable(object): def __init__(self, **kwargs): @@ -610,7 +612,50 @@ class RelationTest7(ORMTest): for p in r: assert p.car_id == p.car.car_id +class RelationTest8(ORMTest): + def define_tables(self, metadata): + global taggable, users + taggable = Table('taggable', metadata, + Column('id', Integer, primary_key=True), + Column('type', String(30)), + Column('owner_id', Integer, ForeignKey('taggable.id')), + ) + users = Table ('users', metadata, + Column('id', Integer, ForeignKey('taggable.id'), primary_key=True), + Column('data', String(50)), + ) + + def test_selfref_onjoined(self): + class Taggable(_base.ComparableEntity): + pass + + class User(Taggable): + pass + + mapper( Taggable, taggable, polymorphic_on=taggable.c.type, polymorphic_identity='taggable', properties = { + 'owner' : relation (User, + primaryjoin=taggable.c.owner_id ==taggable.c.id, + remote_side=taggable.c.id + ), + }) + + + mapper(User, users, inherits=Taggable, polymorphic_identity='user', + inherit_condition=users.c.id == taggable.c.id, + ) + + + u1 = User(data='u1') + t1 = Taggable(owner=u1) + sess = create_session() + sess.add(t1) + sess.flush() + sess.expunge_all() + eq_( + sess.query(Taggable).order_by(Taggable.id).all(), + [User(data='u1'), Taggable(owner=User(data='u1'))] + ) class GenerativeTest(TestBase, AssertsExecutionResults): def setUpAll(self): diff --git a/test/orm/merge.py b/test/orm/merge.py index 790cd44950..02f8563c18 100644 --- a/test/orm/merge.py +++ b/test/orm/merge.py @@ -695,5 +695,34 @@ class MergeTest(_fixtures.FixtureTest): sess.flush() assert merged_user not in sess.new + @testing.resolve_artifact_names + def test_cascades_dont_autoflush_2(self): + mapper(User, users, properties={ + 'addresses':relation(Address, + backref='user', + cascade="all, delete-orphan") + }) + mapper(Address, addresses) + + u = User(id=7, name='fred', addresses=[ + Address(id=1, email_address='fred1'), + ]) + sess = create_session(autoflush=True, autocommit=False) + sess.add(u) + sess.commit() + + sess.expunge_all() + + u = User(id=7, name='fred', addresses=[ + Address(id=1, email_address='fred1'), + Address(id=2, email_address='fred2'), + ]) + sess.merge(u) + assert sess.autoflush + sess.commit() + + + + if __name__ == "__main__": testenv.main() diff --git a/test/orm/pickled.py b/test/orm/pickled.py index ca308bb5be..878fe931e3 100644 --- a/test/orm/pickled.py +++ b/test/orm/pickled.py @@ -99,7 +99,36 @@ class PickleTest(_fixtures.FixtureTest): self.assertEquals(ad.email_address, 'ed@bar.com') self.assertEquals(u2, User(name='ed', addresses=[Address(email_address='ed@bar.com')])) + @testing.resolve_artifact_names + def test_options_with_descriptors(self): + mapper(User, users, properties={ + 'addresses':relation(Address, backref="user") + }) + mapper(Address, addresses) + sess = create_session() + u1 = User(name='ed') + u1.addresses.append(Address(email_address='ed@bar.com')) + sess.add(u1) + sess.flush() + sess.expunge_all() + for opt in [ + sa.orm.eagerload(User.addresses), + sa.orm.eagerload("addresses"), + sa.orm.defer("name"), + sa.orm.defer(User.name), + sa.orm.defer([User.name]), + sa.orm.eagerload("addresses", User.addresses), + sa.orm.eagerload(["addresses", User.addresses]), + ]: + opt2 = pickle.loads(pickle.dumps(opt)) + self.assertEquals(opt.key, opt2.key) + + u1 = sess.query(User).options(opt).first() + + u2 = pickle.loads(pickle.dumps(u1)) + + class PolymorphicDeferredTest(_base.MappedTest): def define_tables(self, metadata): Table('users', metadata, diff --git a/test/orm/query.py b/test/orm/query.py index 0d89303113..6531b234c6 100644 --- a/test/orm/query.py +++ b/test/orm/query.py @@ -2786,10 +2786,18 @@ class UpdateDeleteTest(_base.MappedTest): Column('id', Integer, primary_key=True), Column('name', String(32)), Column('age', Integer)) + + Table('documents', metadata, + Column('id', Integer, primary_key=True), + Column('user_id', None, ForeignKey('users.id')), + Column('title', String(32))) def setup_classes(self): class User(_base.ComparableEntity): pass + + class Document(_base.ComparableEntity): + pass @testing.resolve_artifact_names def insert_data(self): @@ -2799,10 +2807,21 @@ class UpdateDeleteTest(_base.MappedTest): dict(id=3, name='jill', age=29), dict(id=4, name='jane', age=37), ]) + + @testing.resolve_artifact_names + def insert_documents(self): + documents.insert().execute([ + dict(id=1, user_id=1, title='foo'), + dict(id=2, user_id=1, title='bar'), + dict(id=3, user_id=2, title='baz'), + ]) @testing.resolve_artifact_names def setup_mappers(self): mapper(User, users) + mapper(Document, documents, properties={ + 'user': relation(User, lazy=False, backref=backref('documents', lazy=True)) + }) @testing.resolve_artifact_names def test_delete(self): @@ -2989,5 +3008,37 @@ class UpdateDeleteTest(_base.MappedTest): rowcount = sess.query(User).filter(User.age > 26).delete(synchronize_session=False) self.assertEquals(rowcount, 3) + @testing.resolve_artifact_names + def test_update_with_eager_relations(self): + self.insert_documents() + + sess = create_session(bind=testing.db, autocommit=False) + + foo,bar,baz = sess.query(Document).order_by(Document.id).all() + sess.query(Document).filter(Document.user_id == 1).update({'title': Document.title+Document.title}, synchronize_session='fetch') + + eq_([foo.title, bar.title, baz.title], ['foofoo','barbar', 'baz']) + eq_(sess.query(Document.title).order_by(Document.id).all(), zip(['foofoo','barbar', 'baz'])) + + @testing.resolve_artifact_names + def test_update_with_explicit_eagerload(self): + sess = create_session(bind=testing.db, autocommit=False) + + john,jack,jill,jane = sess.query(User).order_by(User.id).all() + sess.query(User).options(eagerload(User.documents)).filter(User.age > 29).update({'age': User.age - 10}, synchronize_session='fetch') + + eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27]) + eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27])) + + @testing.resolve_artifact_names + def test_delete_with_eager_relations(self): + self.insert_documents() + + sess = create_session(bind=testing.db, autocommit=False) + + sess.query(Document).filter(Document.user_id == 1).delete(synchronize_session=False) + + eq_(sess.query(Document.title).all(), zip(['baz'])) + if __name__ == '__main__': testenv.main() diff --git a/test/sql/functions.py b/test/sql/functions.py index 9c61617fc6..9ab9dc9795 100644 --- a/test/sql/functions.py +++ b/test/sql/functions.py @@ -275,6 +275,44 @@ class ExecuteTest(TestBase): assert x == y == z == w == q == r + def test_extract_bind(self): + """Basic common denominator execution tests for extract()""" + + date = datetime.date(2010, 5, 1) + + def execute(field): + return testing.db.execute(select([extract(field, date)])).scalar() + + assert execute('year') == 2010 + assert execute('month') == 5 + assert execute('day') == 1 + + date = datetime.datetime(2010, 5, 1, 12, 11, 10) + + assert execute('year') == 2010 + assert execute('month') == 5 + assert execute('day') == 1 + + def test_extract_expression(self): + meta = MetaData(testing.db) + table = Table('test', meta, + Column('dt', DateTime), + Column('d', Date)) + meta.create_all() + try: + table.insert().execute( + {'dt': datetime.datetime(2010, 5, 1, 12, 11, 10), + 'd': datetime.date(2010, 5, 1) }) + rs = select([extract('year', table.c.dt), + extract('month', table.c.d)]).execute() + row = rs.fetchone() + assert row[0] == 2010 + assert row[1] == 5 + rs.close() + finally: + meta.drop_all() + + def exec_sorted(statement, *args, **kw): """Executes a statement and returns a sorted list plain tuple rows.""" diff --git a/test/sql/select.py b/test/sql/select.py index d7ef36bce2..e99b953b9b 100644 --- a/test/sql/select.py +++ b/test/sql/select.py @@ -834,12 +834,6 @@ FROM mytable, myothertable WHERE foo.id = foofoo(lala) AND datetime(foo) = Today "SELECT values.id FROM values WHERE values.val1 / (values.val2 - values.val1) / values.val1 > :param_1" ) - def test_extract(self): - """test the EXTRACT function""" - self.assert_compile(select([extract("month", table3.c.otherstuff)]), "SELECT extract(month FROM thirdtable.otherstuff) AS extract_1 FROM thirdtable") - - self.assert_compile(select([extract("day", func.to_date("03/20/2005", "MM/DD/YYYY"))]), "SELECT extract(day FROM to_date(:to_date_1, :to_date_2)) AS extract_1") - def test_collate(self): for expr in (select([table1.c.name.collate('latin1_german2_ci')]), select([collate(table1.c.name, 'latin1_german2_ci')])): diff --git a/test/sql/testtypes.py b/test/sql/testtypes.py index e11810f5f3..5ef2b73f15 100644 --- a/test/sql/testtypes.py +++ b/test/sql/testtypes.py @@ -708,7 +708,6 @@ class NumericTest(TestBase, AssertsExecutionResults): ncasdec=Decimal("12.4"), fcasdec=Decimal("15.75")) l = numeric_table.select().execute().fetchall() - print l rounded = [ (l[0][0], l[0][1], round(l[0][2], 5), l[0][3], l[0][4]), (l[1][0], l[1][1], round(l[1][2], 5), l[1][3], l[1][4]), @@ -802,11 +801,9 @@ class BooleanTest(TestBase, AssertsExecutionResults): bool_table.insert().execute(id=5, value=True) res = bool_table.select(bool_table.c.value==True).execute().fetchall() - print res assert(res==[(1, True),(3, True),(4, True),(5, True)]) res2 = bool_table.select(bool_table.c.value==False).execute().fetchall() - print res2 assert(res2==[(2, False)]) class PickleTest(TestBase): diff --git a/test/testlib/requires.py b/test/testlib/requires.py index 1c66e3e51b..b51f6d2e85 100644 --- a/test/testlib/requires.py +++ b/test/testlib/requires.py @@ -59,6 +59,7 @@ def row_triggers(fn): return _chain_decorators_on( fn, # no access to same table + no_support('mysql', 'requires SUPER priv'), exclude('mysql', '<', (5, 0, 10), 'not supported by database'), no_support('postgres', 'not supported by database: no statements'), ) -- 2.47.3