]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
- mega example cleanup
authorMike Bayer <mike_mp@zzzcomputing.com>
Tue, 19 Jan 2010 00:53:12 +0000 (00:53 +0000)
committerMike Bayer <mike_mp@zzzcomputing.com>
Tue, 19 Jan 2010 00:53:12 +0000 (00:53 +0000)
- added READMEs to all examples in each __init__.py and added to sphinx documentation
- added versioning example
- removed vertical/vertical.py, the dictlikes are more straightforward

50 files changed:
doc/build/conf.py
doc/build/examples.rst [new file with mode: 0644]
doc/build/index.rst
doc/build/mappers.rst
examples/adjacency_list/__init__.py
examples/adjacency_list/adjacency_list.py
examples/association/__init__.py
examples/beaker_caching/README [deleted file]
examples/beaker_caching/__init__.py
examples/beaker_caching/advanced.py
examples/beaker_caching/environment.py [new file with mode: 0644]
examples/beaker_caching/helloworld.py
examples/beaker_caching/local_session_caching.py
examples/beaker_caching/relation_caching.py
examples/custom_attributes/__init__.py
examples/custom_attributes/custom_management.py
examples/derived_attributes/__init__.py
examples/derived_attributes/attributes.py
examples/dynamic_dict/__init__.py
examples/dynamic_dict/dynamic_dict.py
examples/elementtree/__init__.py
examples/elementtree/adjacency_list.py
examples/elementtree/optimized_al.py
examples/elementtree/pickle.py
examples/graphs/__init__.py
examples/graphs/directed_graph.py [moved from examples/graphs/graph1.py with 100% similarity]
examples/inheritance/__init__.py [new file with mode: 0644]
examples/inheritance/concrete.py [moved from examples/polymorph/concrete.py with 100% similarity]
examples/inheritance/polymorph.py [moved from examples/polymorph/polymorph.py with 96% similarity]
examples/inheritance/single.py [moved from examples/polymorph/single.py with 97% similarity]
examples/large_collection/__init__.py
examples/large_collection/large_collection.py
examples/nested_sets/__init__.py
examples/poly_assoc/__init__.py
examples/polymorph/__init__.py [deleted file]
examples/postgis/__init__.py
examples/postgis/postgis.py
examples/sharding/__init__.py
examples/sharding/attribute_shard.py
examples/versioning/__init__.py [new file with mode: 0644]
examples/versioning/history_meta.py [new file with mode: 0644]
examples/versioning/test_versioning.py [new file with mode: 0644]
examples/vertical/__init__.py
examples/vertical/dictlike-polymorphic.py
examples/vertical/dictlike.py
examples/vertical/vertical.py [deleted file]
lib/sqlalchemy/orm/session.py
lib/sqlalchemy/test/entities.py [new file with mode: 0644]
setup.cfg
test/orm/_base.py

index eaf51a62818afbdd5cf920b423e674976bbecd27..8b7650e14f5ef648d357d4a608b900879d2ec878 100644 (file)
@@ -17,6 +17,7 @@ import sys, os
 # is relative to the documentation root, use os.path.abspath to make it
 # absolute, like shown here.
 sys.path.insert(0, os.path.abspath('../../lib'))
+sys.path.insert(0, os.path.abspath('../../examples'))
 sys.path.insert(0, os.path.abspath('.'))
 
 import sqlalchemy
@@ -44,7 +45,7 @@ master_doc = 'index'
 
 # General information about the project.
 project = u'SQLAlchemy'
-copyright = u'2007, 2008, 2009, the SQLAlchemy authors and contributors'
+copyright = u'2007, 2008, 2009, 2010, the SQLAlchemy authors and contributors'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
diff --git a/doc/build/examples.rst b/doc/build/examples.rst
new file mode 100644 (file)
index 0000000..f91b8a6
--- /dev/null
@@ -0,0 +1,121 @@
+.. _examples_toplevel:
+
+Examples
+========
+
+The SQLAlchemy distribution includes a variety of code examples illustrating a select set of patterns, some typical and some not so typical.   All are runnable and can be found in the ``/examples`` directory of the distribution.   Each example contains a README in its ``__init__.py`` file, each of which are listed below.
+
+Additional SQLAlchemy examples, some user contributed, are available on the wiki at `<http://www.sqlalchemy.org/trac/wiki/UsageRecipes>`_.
+
+Adjacency List
+--------------
+
+Location: /examples/adjacency_list/
+
+.. automodule:: adjacency_list
+
+Associations
+------------
+
+Location: /examples/association/
+
+.. automodule:: association
+
+Attribute Instrumentation
+-------------------------
+
+Location: /examples/custom_attributes/
+
+.. automodule:: custom_attributes
+
+Beaker Caching
+--------------
+
+Location: /examples/beaker_caching/
+
+.. automodule:: beaker_caching
+
+Derived Attributes
+------------------
+
+Location: /examples/derived_attributes/
+
+.. automodule:: derived_attributes
+
+
+Directed Graphs
+---------------
+
+Location: /examples/graphs/
+
+.. automodule:: graphs
+
+Dynamic Relations as Dictionaries
+----------------------------------
+
+Location: /examples/dynamic_dict/
+
+.. automodule:: dynamic_dict
+
+Horizontal Sharding
+-------------------
+
+Location: /examples/sharding
+
+.. automodule:: sharding
+
+Inheritance Mappings
+--------------------
+
+Location: /examples/inheritance/
+
+.. automodule:: inheritance
+
+Large Collections
+-----------------
+
+Location: /examples/large_collection/
+
+.. automodule:: large_collection
+
+Nested Sets
+-----------
+
+Location: /examples/nested_sets/
+
+.. automodule:: nested_sets
+
+Polymorphic Associations
+------------------------
+
+Location: /examples/poly_assoc/
+
+.. automodule:: poly_assoc
+
+PostGIS Integration
+-------------------
+
+Location: /examples/postgis
+
+.. automodule:: postgis
+
+Versioned Objects
+-----------------
+
+Location: /examples/versioning
+
+.. automodule:: versioning
+
+Vertical Attribute Mapping
+--------------------------
+
+Location: /examples/vertical
+
+.. automodule:: vertical
+
+XML Persistence
+---------------
+
+Location: /examples/elementtree/
+
+.. automodule:: elementtree
index c3385d4e055abaf077773d099f37e9aeec69eda3..e2338c0986e89bedc5da70a6230e3a1fe0427dfb 100644 (file)
@@ -10,6 +10,7 @@ Table of Contents
     session
     dbengine
     metadata
+    examples
     reference/index
 
 Indices and tables
index 9017cf2d54e28845b702cc7d38681615c12f44f2..4b0ff6f9f051e022497b03aba20467ad48b6a3ff 100644 (file)
@@ -337,6 +337,8 @@ Ordering for rows loaded through ``Query`` is usually specified using the ``orde
 
 Above, a ``Query`` issued for the ``User`` class will use the value of the mapper's ``order_by`` setting if the ``Query`` itself has no ordering specified.
 
+.. _datamapping_inheritance:
+
 Mapping Class Inheritance Hierarchies 
 --------------------------------------
 
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..44f27090bf90df269e042c9aa81ca6c7c5414caf 100644 (file)
@@ -0,0 +1,16 @@
+"""
+An example of a dictionary-of-dictionaries structure mapped using
+an adjacency list model.
+
+E.g.::
+
+    node = TreeNode('rootnode')
+    node.append('node1')
+    node.append('node3')
+    session.add(node)
+    session.commit()
+
+    dump_tree(node)
+
+"""
+
index 3bf20dc5f8386dd6b177464f4a0b318b5285749c..cc539c054534942aca16618b22af36a0c7d61a83 100644 (file)
@@ -1,9 +1,3 @@
-"""
-An example of a dictionary-of-dictionaries structure mapped using
-an adjacency list model
-
-"""
-
 from sqlalchemy import MetaData, Table, Column, Sequence, ForeignKey,\
                         Integer, String, create_engine
                         
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..46b596991f096ba998e8dbd8e76cb8e4ab941851 100644 (file)
@@ -0,0 +1,25 @@
+"""
+
+Examples illustrating the usage of the "association object" pattern,
+where an intermediary object associates two endpoint objects together.
+
+The first example illustrates a basic association from a User object
+to a collection or Order objects, each which references a collection of Item objects.
+
+The second example builds upon the first to add the Association Proxy extension.
+
+E.g.::
+
+    # create an order
+    order = Order('john smith')
+
+    # append an OrderItem association via the "itemassociations"
+    # collection with a custom price.
+    order.itemassociations.append(OrderItem(item('MySQL Crowbar'), 10.99))
+
+    # append two more Items via the transparent "items" proxy, which
+    # will create OrderItems automatically using the default price.
+    order.items.append(item('SA Mug'))
+    order.items.append(item('SA Hat'))
+
+"""
\ No newline at end of file
diff --git a/examples/beaker_caching/README b/examples/beaker_caching/README
deleted file mode 100644 (file)
index 7026900..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-Illustrates how to embed Beaker cache functionality within
-the Query object, allowing full cache control as well as the
-ability to pull "lazy loaded" attributes from long term cache 
-as well.
-
-In this demo, the following techniques are illustrated:
-
-  * Using custom subclasses of Query
-  * Basic technique of circumventing Query to pull from a 
-    custom cache source instead of the database.
-  * Rudimental caching with Beaker, using "regions" which allow
-    global control over a fixed set of configurations.
-  * Using custom MapperOption objects to configure options on 
-    a Query, including the ability to invoke the options 
-    deep within an object graph when lazy loads occur.
-
-To run, both SQLAlchemy and Beaker (1.4 or greater) must be
-installed or on the current PYTHONPATH. The demo will create a local
-directory for datafiles, insert initial data, and run. Running the
-demo a second time will utilize the cache files already present, and
-exactly one SQL statement against two tables will be emitted - the
-displayed result however will utilize dozens of lazyloads that all
-pull from cache.
-
-Three endpoint scripts, in order of complexity, are run as follows:
-
-   python examples/beaker_caching/helloworld.py
-
-   python examples/beaker_caching/relation_caching.py
-
-   python examples/beaker_caching/advanced.py
-
-   python examples/beaker_caching/local_session_caching.py
-
-
-Listing of files:
-
-__init__.py - Establish data / cache file paths, and configurations, 
-bootstrap fixture data if necessary.
-
-meta.py - Represent persistence structures which allow the usage of
-Beaker caching with SQLAlchemy.  Introduces a query option called
-FromCache.
-
-model.py - The datamodel, which represents Person that has multiple
-Address objects, each with PostalCode, City, Country
-
-fixture_data.py - creates demo PostalCode, Address, Person objects
-in the database.
-
-helloworld.py - the basic idea.
-
-relation_caching.py - Illustrates how to add cache options on
-relation endpoints, so that lazyloads load from cache.
-
-advanced.py - Further examples of how to use FromCache.  Combines
-techniques from the first two scripts.
-
-local_session_caching.py - Grok everything so far ?   This example
-creates a new Beaker container that will persist data in a dictionary
-which is local to the current session.   remove() the session
-and the cache is gone.
\ No newline at end of file
index 40652136e5e538733b2d5e67b612d12fa6aef405..ba06c6ee8a219cfec2ccdab71949b6f2eb5c6215 100644 (file)
@@ -1,44 +1,77 @@
-"""__init__.py
+"""
+Illustrates how to embed Beaker cache functionality within
+the Query object, allowing full cache control as well as the
+ability to pull "lazy loaded" attributes from long term cache 
+as well.
 
-Establish data / cache file paths, and configurations, 
-bootstrap fixture data if necessary.
+In this demo, the following techniques are illustrated:
 
-"""
-import meta, model, fixture_data
-from sqlalchemy import create_engine
-import os
-
-root = "./beaker_data/"
-
-if not os.path.exists(root):
-    raw_input("Will create datafiles in %r.\n"
-                "To reset the cache + database, delete this directory.\n"
-                "Press enter to continue.\n" % root
-                )
-    os.makedirs(root)
+* Using custom subclasses of Query
+* Basic technique of circumventing Query to pull from a 
+  custom cache source instead of the database.
+* Rudimental caching with Beaker, using "regions" which allow
+  global control over a fixed set of configurations.
+* Using custom MapperOption objects to configure options on 
+  a Query, including the ability to invoke the options 
+  deep within an object graph when lazy loads occur.
+
+E.g.::
+    
+    # query for Person objects, specifying cache
+    q = Session.query(Person).options(FromCache("default", "all_people"))
+    
+    # specify that each Person's "addresses" collection comes from
+    # cache too
+    q = q.options(FromCache("default", "by_person", Person.addresses))
+    
+    # query
+    print q.all()
     
-dbfile = os.path.join(root, "beaker_demo.db")
-engine = create_engine('sqlite:///%s' % dbfile, echo=True)
-meta.Session.configure(bind=engine)
-
-# configure the "default" cache region.
-meta.cache_manager.regions['default'] ={
-
-        # using type 'file' to illustrate
-        # serialized persistence.  In reality,
-        # use memcached.   Other backends
-        # are much, much slower.
-        'type':'file',
-        'data_dir':root,
-        'expire':3600,
-        
-        # set start_time to current time
-        # to re-cache everything
-        # upon application startup
-        #'start_time':time.time()
-    }
-
-installed = False
-if not os.path.exists(dbfile):
-    fixture_data.install()
-    installed = True
\ No newline at end of file
+To run, both SQLAlchemy and Beaker (1.4 or greater) must be
+installed or on the current PYTHONPATH. The demo will create a local
+directory for datafiles, insert initial data, and run. Running the
+demo a second time will utilize the cache files already present, and
+exactly one SQL statement against two tables will be emitted - the
+displayed result however will utilize dozens of lazyloads that all
+pull from cache.
+
+Three endpoint scripts, in order of complexity, are run as follows::
+
+   python examples/beaker_caching/helloworld.py
+
+   python examples/beaker_caching/relation_caching.py
+
+   python examples/beaker_caching/advanced.py
+
+   python examples/beaker_caching/local_session_caching.py
+
+
+Listing of files:
+
+    environment.py - Establish data / cache file paths, and configurations, 
+    bootstrap fixture data if necessary.
+
+    meta.py - Represent persistence structures which allow the usage of
+    Beaker caching with SQLAlchemy.  Introduces a query option called
+    FromCache.
+
+    model.py - The datamodel, which represents Person that has multiple
+    Address objects, each with PostalCode, City, Country
+
+    fixture_data.py - creates demo PostalCode, Address, Person objects
+    in the database.
+
+    helloworld.py - the basic idea.
+
+    relation_caching.py - Illustrates how to add cache options on
+    relation endpoints, so that lazyloads load from cache.
+
+    advanced.py - Further examples of how to use FromCache.  Combines
+    techniques from the first two scripts.
+
+    local_session_caching.py - Grok everything so far ?   This example
+    creates a new Beaker container that will persist data in a dictionary
+    which is local to the current session.   remove() the session
+    and the cache is gone.
+
+"""
index 6a8db082c53244f636ebaad33f875ab1ee89533d..ebb17cfe53de0d76a04f26ade3e4a5beca428cb0 100644 (file)
@@ -6,7 +6,7 @@ and collection caching.
 
 """
 
-import __init__ # if running as a script
+import environment
 from model import Person, Address, cache_address_bits
 from meta import Session, FromCache
 from sqlalchemy.orm import eagerload
diff --git a/examples/beaker_caching/environment.py b/examples/beaker_caching/environment.py
new file mode 100644 (file)
index 0000000..cdf1794
--- /dev/null
@@ -0,0 +1,44 @@
+"""environment.py
+
+Establish data / cache file paths, and configurations, 
+bootstrap fixture data if necessary.
+
+"""
+import meta, model, fixture_data
+from sqlalchemy import create_engine
+import os
+
+root = "./beaker_data/"
+
+if not os.path.exists(root):
+    raw_input("Will create datafiles in %r.\n"
+                "To reset the cache + database, delete this directory.\n"
+                "Press enter to continue.\n" % root
+                )
+    os.makedirs(root)
+    
+dbfile = os.path.join(root, "beaker_demo.db")
+engine = create_engine('sqlite:///%s' % dbfile, echo=True)
+meta.Session.configure(bind=engine)
+
+# configure the "default" cache region.
+meta.cache_manager.regions['default'] ={
+
+        # using type 'file' to illustrate
+        # serialized persistence.  In reality,
+        # use memcached.   Other backends
+        # are much, much slower.
+        'type':'file',
+        'data_dir':root,
+        'expire':3600,
+        
+        # set start_time to current time
+        # to re-cache everything
+        # upon application startup
+        #'start_time':time.time()
+    }
+
+installed = False
+if not os.path.exists(dbfile):
+    fixture_data.install()
+    installed = True
\ No newline at end of file
index 33454cf80bc4072fdfbdcb779efe938d08dbdd9f..3d37777d7c810c90e9cf207d646b08419b51c4b2 100644 (file)
@@ -4,7 +4,7 @@ Illustrate how to load some data, and cache the results.
 
 """
 
-import __init__ # if running as a script
+import environment
 from model import Person
 from meta import Session, FromCache
 
index c422600d87f17dbe76a8206d4b3bd6d8afc54057..b5ce31483750a731b5d311dd1c93c22b2ee2dda4 100644 (file)
@@ -53,7 +53,7 @@ class ScopedSessionNamespace(container.MemoryNamespaceManager):
 
 
 if __name__ == '__main__':
-    import __init__ # if running as a script
+    import environment
     import meta
     
     # create a Beaker container type called "ext:local_session".
index b4508ba1ec2abb8e69851c80cbfcf02dbf05e85a..2aeb87bc0cd7b6132e7c0ecf360da73acb3a07e5 100644 (file)
@@ -5,7 +5,7 @@ related PostalCode, City, Country objects should be pulled from long
 term cache.
 
 """
-import __init__ # if running as a script
+import environment
 from model import Person, Address, cache_address_bits
 from meta import Session
 from sqlalchemy.orm import eagerload
@@ -22,4 +22,4 @@ print "\n\nIf this was the first run of relation_caching.py, SQL was likely emit
         "To clear the cache, delete the directory %r.  \n"\
         "This will cause a re-load of cities, postal codes and countries on "\
         "the next run.\n"\
-        % os.path.join(__init__.root, 'container_file')
+        % os.path.join(environment.root, 'container_file')
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..6f7613e5cf6b6567f1c1a4b99fad7107de3c05d8 100644 (file)
@@ -0,0 +1,8 @@
+"""
+Two examples illustrating modifications to SQLAlchemy's attribute management system.
+
+``listen_for_events.py`` illustrates the usage of :class:`~sqlalchemy.orm.interfaces.AttributeExtension` to intercept attribute events.  It additionally illustrates a way to automatically attach these listeners to all class attributes using a :class:`~sqlalchemy.orm.interfaces.InstrumentationManager`.
+
+``custom_management.py`` illustrates much deeper usage of :class:`~sqlalchemy.orm.interfaces.InstrumentationManager` as well as collection adaptation, to completely change the underlying method used to store state on an object.   This example was developed to illustrate techniques which would be used by other third party object instrumentation systems to interact with SQLAlchemy's event system and is only intended for very intricate framework integrations.
+
+"""
\ No newline at end of file
index b8ea70dfd57a88999677646efe2aa005570a3cb1..3c80183e27e81ab29845890f7d6cb61c0b99443c 100644 (file)
@@ -82,6 +82,7 @@ class MyClass(object):
 
 class MyCollectionAdapter(object):
     """An wholly alternative instrumentation implementation."""
+    
     def __init__(self, key, state, collection):
         self.key = key
         self.state = state
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..98c946fca63a4faa24366b9fe7bc2aa89e52329b 100644 (file)
@@ -0,0 +1,10 @@
+"""Illustrates a clever technique using Python descriptors to create custom attributes representing SQL expressions when used at the class level, and Python expressions when used at the instance level.   In some cases this technique replaces the need to configure the attribute in the mapping, instead relying upon ordinary Python behavior to create custom expression components.
+
+E.g.::
+
+    class BaseInterval(object):
+        @hybrid
+        def contains(self,point):
+            return (self.start <= point) & (point < self.end)
+
+"""
index 4a1618985402c617162d821cb74febbd0eafdb66..15af7b2adb65645e9fe72e02f6e191f6b49ce82c 100644 (file)
@@ -1,6 +1,3 @@
-"""A couple of helper descriptors to allow to use the same code as query
-criterion creators and as instance code. As this doesn't do advanced
-magic recompiling, you can only use basic expression-like code."""
 
 import new
 
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..69ac409522a8f33a57545ff2a2e1dccb324b59cd 100644 (file)
@@ -0,0 +1,5 @@
+"""Illustrates how to place a dictionary-like facade on top of a "dynamic" relation, so
+that dictionary operations (assuming simple string keys) can operate upon a large 
+collection without loading the full collection at once.
+
+"""
\ No newline at end of file
index f58c19c00b59f96dc6e983e3c206526c2a883d4e..d94c2426ac0d0197f4087ee79ef98ea7d1cb25da 100644 (file)
@@ -1,12 +1,3 @@
-"""Illustrates how to place a dictionary-like facade on top of a dynamic_loader, so\r
-that dictionary operations (assuming simple string keys) can operate upon a large \r
-collection without loading the full collection at once.\r
-\r
-Similar approaches could be taken towards sets and dictionaries with non-string keys \r
-although the hash policy of the members would need to be distilled into a filter() criterion.\r
-\r
-"""\r
-\r
 class ProxyDict(object):\r
     def __init__(self, parent, collection_name, childclass, keyname):\r
         self.parent = parent\r
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..70554f5c90e61f01c024c03fb26ae1573f9bbd20 100644 (file)
@@ -0,0 +1,30 @@
+"""
+Illustrates three strategies for persisting and querying XML documents as represented by 
+ElementTree in a relational database.   The techniques do not apply any mappings to the ElementTree objects directly, so are compatible with the native cElementTree as well as lxml, and can be adapted to suit any kind of DOM representation system.   Querying along xpath-like strings is illustrated as well.
+
+In order of complexity:
+
+* ``pickle.py`` - Quick and dirty, serialize the whole DOM into a BLOB column.  While the example
+  is very brief, it has very limited functionality.
+* ``adjacency_list.py`` - Each DOM node is stored in an individual table row, with attributes
+  represented in a separate table.  The nodes are associated in a hierarchy using an adjacency list
+  structure.  A query function is introduced which can search for nodes along any path with a given
+  structure of attributes, basically a (very narrow) subset of xpath.
+* ``optimized_al.py`` - Uses the same strategy as ``adjacency_list.py``, but adds a
+  :class:`~sqlalchemy.orm.interfaces.MapperExtension` which optimizes how the hierarchical structure  
+  is loaded, such that the full set of DOM nodes are loaded within a single table result set, and
+  are organized hierarchically as they are received during a load.
+
+E.g.::
+    
+    # parse an XML file and persist in the database
+    doc = ElementTree.parse("test.xml")
+    session.add(Document(file, doc))
+    session.commit()
+    
+    # locate documents with a certain path/attribute structure 
+    for document in find_document('/somefile/header/field2[@attr=foo]'):
+        # dump the XML
+        print document
+    
+"""
\ No newline at end of file
index 58156dcb6776060b406b8da46281447e72f15b8f..d2151d6efe537a2ee91f541e84fdb5ebfea44554 100644 (file)
@@ -13,16 +13,6 @@ from sqlalchemy.orm import mapper, relation, create_session, lazyload
 
 import sys, os, StringIO, re
 
-import logging
-logging.basicConfig()
-
-# uncomment to show SQL statements
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
-# uncomment to show SQL statements and result sets
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
-
-
 from xml.etree import ElementTree
 
 meta = MetaData()
index c03acee1d4ff7faa7fddad4562a0353b0dd85b9a..dcc3c00ba650aad369424a5cc1bea21bb99e4e07 100644 (file)
@@ -12,16 +12,6 @@ from sqlalchemy.orm import mapper, relation, create_session, lazyload
 
 import sys, os, StringIO, re
 
-import logging
-logging.basicConfig()
-
-# uncomment to show SQL statements
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
-# uncomment to show SQL statements and result sets
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
-
-
 from xml.etree import ElementTree
 
 meta = MetaData()
index 2176512cf1d8c5532987fdb4b72b37f6997a1b7a..4eaaa2f8d8c44660fd93ad8f791e97d320faa72a 100644 (file)
@@ -12,15 +12,6 @@ from sqlalchemy.orm import mapper, create_session
 
 import sys, os
 
-import logging
-logging.basicConfig()
-
-# uncomment to show SQL statements
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
-# uncomment to show SQL statements and result sets
-#logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
-
 from xml.etree import ElementTree
 
 engine = create_engine('sqlite://')
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..28a064baddac56dcaf4ebcd6994bdf9017ca9246 100644 (file)
@@ -0,0 +1,9 @@
+"""
+An example of persistence for a directed graph structure.   The graph is stored as a collection of edges, each referencing both a "lower" and an "upper" node in a table of nodes.  Basic persistence and querying for lower- and upper- neighbors are illustrated::
+
+    n2 = Node(2)
+    n5 = Node(5)
+    n2.add_neighbor(n5)
+    print n2.higher_neighbors()
+
+"""
\ No newline at end of file
diff --git a/examples/inheritance/__init__.py b/examples/inheritance/__init__.py
new file mode 100644 (file)
index 0000000..a3b8546
--- /dev/null
@@ -0,0 +1,4 @@
+"""
+Working examples of single-table, joined-table, and concrete-table inheritance as described in :ref:`datamapping_inheritance`.
+
+"""
\ No newline at end of file
similarity index 96%
rename from examples/polymorph/polymorph.py
rename to examples/inheritance/polymorph.py
index 60ef98f57dc19e1949de30a388a37e858dcd15cf..4cf13985b4273892cb5d4c81c2ea9eb7feb05c60 100644 (file)
@@ -56,8 +56,6 @@ class Company(object):
         return "Company %s" % self.name
 
 
-person_join = people.outerjoin(engineers).outerjoin(managers)
-
 person_mapper = mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person')
 mapper(Engineer, engineers, inherits=person_mapper, polymorphic_identity='engineer')
 mapper(Manager, managers, inherits=person_mapper, polymorphic_identity='manager')
@@ -75,7 +73,6 @@ c.employees.append(Engineer(name='wally', status='CGG', engineer_name='engineer2
 c.employees.append(Manager(name='jsmith', status='ABA', manager_name='manager2'))
 session.add(c)
 
-print session.new
 session.flush()
 session.expunge_all()
 
@@ -96,7 +93,7 @@ session.expunge_all()
 
 c = session.query(Company).get(1)
 for e in c.employees:
-    print e, e._sa_instance_state.key
+    print e
 
 session.delete(c)
 session.flush()
similarity index 97%
rename from examples/polymorph/single.py
rename to examples/inheritance/single.py
index 5a4f5c841357f83f586c92bff4045f86f425b3d6..00feaefe8ccfbfd6aae79db9b5ebaa5f32c0f0cd 100644 (file)
@@ -45,8 +45,6 @@ person_mapper = mapper(Person, employees_table, polymorphic_on=employees_table.c
 manager_mapper = mapper(Manager, inherits=person_mapper, polymorphic_identity='manager')
 engineer_mapper = mapper(Engineer, inherits=person_mapper, polymorphic_identity='engineer')
 
-
-
 mapper(Company, companies, properties={
     'employees': relation(Person, lazy=True, backref='company')
 })
@@ -65,7 +63,7 @@ session.expunge_all()
 
 c = session.query(Company).get(1)
 for e in c.employees:
-    print e, e._sa_instance_state.key, e.company
+    print e, e.company
 
 print "\n"
 
@@ -80,7 +78,7 @@ session.expunge_all()
 
 c = session.query(Company).get(1)
 for e in c.employees:
-    print e, e._sa_instance_state.key
+    print e
 
 session.delete(c)
 session.flush()
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..a6c6179b81467c75de7d1424d1a1f563ec873780 100644 (file)
@@ -0,0 +1,8 @@
+"""Large collection example.
+
+Illustrates the options to use with :func:`~sqlalchemy.orm.relation()` when the list of related objects is very large, including:
+
+* "dynamic" relations which query slices of data as accessed
+* how to use ON DELETE CASCADE in conjunction with ``passive_deletes=True`` to greatly improve the performance of related collection deletion.
+
+"""
index 4d98eed2b4a961debaed1df13cf2c4d2d8c7fe0f..c6adf131068e5a75caf16bbf80fa8aa3ac1ce1b4 100644 (file)
@@ -1,9 +1,3 @@
-"""Large collection example.
-
-Illustrates the options to use on relation() when the list of related objects
-is very large.
-
-"""
 
 from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
                         create_engine)
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..b730f71734c49b7fe2f4b87684a74fca003e2c1c 100644 (file)
@@ -0,0 +1,4 @@
+"""
+Illustrates a rudimentary way to implement the "nested sets" pattern for hierarchical data using the SQLAlchemy ORM.
+
+"""
\ No newline at end of file
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..19836a742ec41be7fc8279b8216b255f8ecd9c70 100644 (file)
@@ -0,0 +1,10 @@
+"""
+Illustrates polymorphic associations, a method of associating a particular child object with many different types of parent object.
+
+This example is based off the original blog post at `<http://techspot.zzzeek.org/?p=13>`_ and illustrates three techniques:
+
+* ``poly_assoc.py`` - imitates the non-foreign-key schema used by Ruby on Rails' Active Record.
+* ``poly_assoc_fk.py`` - Adds a polymorphic association table so that referential integrity can be maintained.
+* ``poly_assoc_generic.py`` - further automates the approach of ``poly_assoc_fk.py`` to also generate the association table definitions automatically.
+
+"""
\ No newline at end of file
diff --git a/examples/polymorph/__init__.py b/examples/polymorph/__init__.py
deleted file mode 100644 (file)
index e69de29..0000000
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..1a351f7ae0c8f45895402a943730abe678f15f3f 100644 (file)
@@ -0,0 +1,34 @@
+"""A naive example illustrating techniques to help 
+embed PostGIS functionality.
+
+This example was originally developed in the hopes that it would be extrapolated into a comprehensive PostGIS integration layer.  We are pleased to announce that this has come to fruition as `GeoAlchemy <http://www.geoalchemy.org/>`_.
+
+The example illustrates:
+
+* a DDL extension which allows CREATE/DROP to work in 
+  conjunction with AddGeometryColumn/DropGeometryColumn
+  
+* a Geometry type, as well as a few subtypes, which
+  convert result row values to a GIS-aware object,
+  and also integrates with the DDL extension.
+
+* a GIS-aware object which stores a raw geometry value
+  and provides a factory for functions such as AsText().
+  
+* an ORM comparator which can override standard column
+  methods on mapped objects to produce GIS operators.
+  
+* an attribute event listener that intercepts strings
+  and converts to GeomFromText().
+  
+* a standalone operator example.
+
+The implementation is limited to only public, well known
+and simple to use extension points. 
+
+E.g.::
+
+    print session.query(Road).filter(Road.road_geom.intersects(r1.road_geom)).all()
+
+"""
+
index d84648a952b2930589b46c73302d2ad6ed648b55..d3f728293f25647e7b0acd1b1d1ef1ca311dd157 100644 (file)
@@ -1,39 +1,3 @@
-"""A naive example illustrating techniques to help 
-embed PostGIS functionality.
-
-The techniques here could be used by a capable developer 
-as the basis for a comprehensive PostGIS SQLAlchemy extension.
-Please note this is an entirely incomplete proof of concept
-only, and PostGIS support is *not* a supported feature 
-of SQLAlchemy.
-
-Includes:
-
- * a DDL extension which allows CREATE/DROP to work in 
-   conjunction with AddGeometryColumn/DropGeometryColumn
-   
- * a Geometry type, as well as a few subtypes, which
-   convert result row values to a GIS-aware object,
-   and also integrates with the DDL extension.
-
- * a GIS-aware object which stores a raw geometry value
-   and provides a factory for functions such as AsText().
-   
- * an ORM comparator which can override standard column
-   methods on mapped objects to produce GIS operators.
-   
- * an attribute event listener that intercepts strings
-   and converts to GeomFromText().
-   
- * a standalone operator example.
-
-The implementation is limited to only public, well known
-and simple to use extension points. Future SQLAlchemy
-expansion points may allow more seamless integration of
-some features.
-
-"""
-
 from sqlalchemy.orm.interfaces import AttributeExtension
 from sqlalchemy.orm.properties import ColumnProperty
 from sqlalchemy.types import TypeEngine
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..19f6db2459987e1cd5b8d49eca421ece265ac932 100644 (file)
@@ -0,0 +1,20 @@
+"""a basic example of using the SQLAlchemy Sharding API.
+Sharding refers to horizontally scaling data across multiple
+databases.
+
+The basic components of a "sharded" mapping are:
+
+* multiple databases, each assigned a 'shard id'
+* a function which can return a single shard id, given an instance
+  to be saved; this is called "shard_chooser"
+* a function which can return a list of shard ids which apply to a particular
+  instance identifier; this is called "id_chooser".  If it returns all shard ids,
+  all shards will be searched.
+* a function which can return a list of shard ids to try, given a particular 
+  Query ("query_chooser").  If it returns all shard ids, all shards will be 
+  queried and the results joined together.
+
+In this example, four sqlite databases will store information about
+weather data on a database-per-continent basis.  We provide example shard_chooser, id_chooser and query_chooser functions.  The query_chooser illustrates inspection of the SQL expression element in order to attempt to determine a single shard being requested.
+
+"""
index 2ac0c88ff8cb8ae8797a5080111bf2f91b9867fa..89d4243fc96929222d1840cedbd3812e0644827c 100644 (file)
@@ -1,21 +1,3 @@
-"""a basic example of using the SQLAlchemy Sharding API.
-Sharding refers to horizontally scaling data across multiple
-databases.
-
-In this example, four sqlite databases will store information about
-weather data on a database-per-continent basis.
-
-To set up a sharding system, you need:
-    1. multiple databases, each assined a 'shard id'
-    2. a function which can return a single shard id, given an instance
-    to be saved; this is called "shard_chooser"
-    3. a function which can return a list of shard ids which apply to a particular
-    instance identifier; this is called "id_chooser".  If it returns all shard ids,
-    all shards will be searched.
-    4. a function which can return a list of shard ids to try, given a particular 
-    Query ("query_chooser").  If it returns all shard ids, all shards will be 
-    queried and the results joined together.
-"""
 
 # step 1. imports
 from sqlalchemy import (create_engine, MetaData, Table, Column, Integer,
diff --git a/examples/versioning/__init__.py b/examples/versioning/__init__.py
new file mode 100644 (file)
index 0000000..f0e1cf5
--- /dev/null
@@ -0,0 +1,67 @@
+"""
+Illustrates an extension which creates version tables for entities and stores records for each change.  The same idea as Elixir's versioned extension, but more efficient (uses attribute API to get history) and handles class inheritance.  The given extensions generate an anonymous "history" class which represents historical versions of the target object.   
+
+Usage is illustrated via a unit test module ``test_versioning.py``, which can be run via nose::
+
+    nosetests -w examples/versioning/
+
+A fragment of example usage, using declarative::
+
+    from history_meta import VersionedMeta, VersionedListener
+
+    Base = declarative_base(metaclass=VersionedMeta, bind=engine)
+    Session = sessionmaker(extension=VersionedListener())
+
+    class SomeClass(Base):
+        __tablename__ = 'sometable'
+    
+        id = Column(Integer, primary_key=True)
+        name = Column(String(50))
+    
+        def __eq__(self, other):
+            assert type(other) is SomeClass and other.id == self.id
+        
+    sess = Session()
+    sc = SomeClass(name='sc1')
+    sess.add(sc)
+    sess.commit()
+
+    sc.name = 'sc1modified'
+    sess.commit()
+
+    assert sc.version == 2
+
+    SomeClassHistory = SomeClass.__history_mapper__.class_
+
+    assert sess.query(SomeClassHistory).\\
+                filter(SomeClassHistory.version == 1).\\
+                all() \\
+                == [SomeClassHistory(version=1, name='sc1')]
+
+To apply ``VersionedMeta`` to a subset of classes (probably more typical), the metaclass can be applied on a per-class basis::
+
+    from history_meta import VersionedMeta, VersionedListener
+
+    Base = declarative_base(bind=engine)
+
+    class SomeClass(Base):
+        __tablename__ = 'sometable'
+
+        # ...
+
+    class SomeVersionedClass(Base):
+        __metaclass__ = VersionedMeta
+        __tablename__ = 'someothertable'
+
+        # ...
+
+The ``VersionedMeta`` is a declarative metaclass - to use the extension with plain mappers, the ``_history_mapper`` function can be applied::
+
+    from history_meta import _history_mapper
+
+    m = mapper(SomeClass, sometable)
+    _history_mapper(m)
+
+    SomeHistoryClass = SomeClass.__history_mapper__.class_
+
+"""
\ No newline at end of file
diff --git a/examples/versioning/history_meta.py b/examples/versioning/history_meta.py
new file mode 100644 (file)
index 0000000..8a91679
--- /dev/null
@@ -0,0 +1,164 @@
+from sqlalchemy.ext.declarative import DeclarativeMeta
+from sqlalchemy.orm import mapper, class_mapper, attributes, object_mapper
+from sqlalchemy.orm.exc import UnmappedClassError, UnmappedColumnError
+from sqlalchemy import Table, Column, ForeignKeyConstraint, Integer
+from sqlalchemy.orm.interfaces import SessionExtension
+
+def col_references_table(col, table):
+    for fk in col.foreign_keys:
+        if fk.references(table):
+            return True
+    return False
+
+def _history_mapper(local_mapper):
+    cls = local_mapper.class_
+
+    # SLIGHT SQLA HACK #1 - set the "active_history" flag
+    # on on column-mapped attributes so that the old version
+    # of the info is always loaded (currently sets it on all attributes)
+    for prop in local_mapper.iterate_properties:
+        getattr(local_mapper.class_, prop.key).impl.active_history = True
+    
+    super_mapper = local_mapper.inherits
+    super_history_mapper = getattr(cls, '__history_mapper__', None)
+    
+    polymorphic_on = None
+    super_fks = []
+    if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
+        cols = []
+        for column in local_mapper.local_table.c:
+            if column.name == 'version':
+                continue
+                
+            col = column.copy()
+
+            if super_mapper and col_references_table(column, super_mapper.local_table):
+                super_fks.append((col.key, list(super_history_mapper.base_mapper.local_table.primary_key)[0]))
+
+            cols.append(col)
+            
+            if column is local_mapper.polymorphic_on:
+                polymorphic_on = col
+        
+        if super_mapper:
+            super_fks.append(('version', super_history_mapper.base_mapper.local_table.c.version))
+            cols.append(Column('version', Integer, primary_key=True))
+        else:
+            cols.append(Column('version', Integer, primary_key=True))
+        
+        if super_fks:
+            cols.append(ForeignKeyConstraint(*zip(*super_fks)))
+
+        table = Table(local_mapper.local_table.name + '_history', local_mapper.local_table.metadata,
+           *cols
+        )
+    else:
+        # single table inheritance.  take any additional columns that may have
+        # been added and add them to the history table.
+        for column in local_mapper.local_table.c:
+            if column.key not in super_history_mapper.local_table.c:
+                col = column.copy()
+                super_history_mapper.local_table.append_column(col)
+        table = None
+        
+    if super_history_mapper:
+        bases = (super_history_mapper.class_,)
+    else:
+        bases = local_mapper.base_mapper.class_.__bases__
+    versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
+    
+    m = mapper(
+            versioned_cls, 
+            table, 
+            inherits=super_history_mapper, 
+            polymorphic_on=polymorphic_on,
+            polymorphic_identity=local_mapper.polymorphic_identity
+            )
+    cls.__history_mapper__ = m
+    
+    if not super_history_mapper:
+        cls.version = Column('version', Integer, default=1, nullable=False)
+    
+    
+class VersionedMeta(DeclarativeMeta):
+    def __init__(cls, classname, bases, dict_):
+        DeclarativeMeta.__init__(cls, classname, bases, dict_)
+
+        try:
+            mapper = class_mapper(cls)
+            _history_mapper(mapper)
+        except UnmappedClassError:
+            pass
+
+
+def versioned_objects(iter):
+    for obj in iter:
+        if hasattr(obj, '__history_mapper__'):
+            yield obj
+
+def create_version(obj, session, deleted = False):
+    obj_mapper = object_mapper(obj)
+    history_mapper = obj.__history_mapper__
+    history_cls = history_mapper.class_
+    
+    obj_state = attributes.instance_state(obj)
+    
+    attr = {}
+
+    obj_changed = False
+    
+    for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()):
+        if hm.single:
+            continue
+    
+        for hist_col in hm.local_table.c:
+            if hist_col.key == 'version':
+                continue
+                
+            obj_col = om.local_table.c[hist_col.key]
+
+            # SLIGHT SQLA HACK #3 - get the value of the
+            # attribute based on the MapperProperty related to the
+            # mapped column.  this will allow usage of MapperProperties
+            # that have a different keyname than that of the mapped column.
+            try:
+                prop = obj_mapper._get_col_to_prop(obj_col)
+            except UnmappedColumnError:
+                # in the case of single table inheritance, there may be 
+                # columns on the mapped table intended for the subclass only.
+                # the "unmapped" status of the subclass column on the 
+                # base class is a feature of the declarative module as of sqla 0.5.2.
+                continue
+                
+            # expired object attributes and also deferred cols might not be in the
+            # dict.  force it to load no matter what by using getattr().
+            if prop.key not in obj_state.dict:
+                getattr(obj, prop.key)
+
+            a, u, d = attributes.get_history(obj, prop.key)
+
+            if d:
+                attr[hist_col.key] = d[0]
+                obj_changed = True
+            elif u:
+                attr[hist_col.key] = u[0]
+            else:
+                raise Exception("TODO: what makes us arrive here ?")
+                
+    if not obj_changed and not deleted:            
+        return
+
+    attr['version'] = obj.version
+    hist = history_cls()
+    for key, value in attr.iteritems():
+        setattr(hist, key, value)
+    session.add(hist)
+    obj.version += 1
+    
+class VersionedListener(SessionExtension):
+    def before_flush(self, session, flush_context, instances):
+        for obj in versioned_objects(session.dirty):
+            create_version(obj, session)
+        for obj in versioned_objects(session.deleted):
+            create_version(obj, session, deleted = True)
+            
diff --git a/examples/versioning/test_versioning.py b/examples/versioning/test_versioning.py
new file mode 100644 (file)
index 0000000..1a72a06
--- /dev/null
@@ -0,0 +1,248 @@
+from sqlalchemy.ext.declarative import declarative_base
+from history_meta import VersionedMeta, VersionedListener
+from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
+from sqlalchemy.orm import clear_mappers, compile_mappers, sessionmaker, deferred
+from sqlalchemy.test.testing import TestBase, eq_
+from sqlalchemy.test.entities import ComparableEntity
+
+def setup():
+    global engine
+    engine = create_engine('sqlite://', echo=True)
+    
+class TestVersioning(TestBase):
+    def setup(self):
+        global Base, Session
+        Base = declarative_base(metaclass=VersionedMeta, bind=engine)
+        Session = sessionmaker(extension=VersionedListener())
+        
+    def teardown(self):
+        clear_mappers()
+        Base.metadata.drop_all()
+    
+    def create_tables(self):
+        Base.metadata.create_all()
+        
+    def test_plain(self):
+        class SomeClass(Base, ComparableEntity):
+            __tablename__ = 'sometable'
+            
+            id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+        
+        self.create_tables()
+        sess = Session()
+        sc = SomeClass(name='sc1')
+        sess.add(sc)
+        sess.commit()
+        
+        sc.name = 'sc1modified'
+        sess.commit()
+        
+        assert sc.version == 2
+        
+        SomeClassHistory = SomeClass.__history_mapper__.class_
+        
+        eq_(
+            sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+            [SomeClassHistory(version=1, name='sc1')]
+        )
+
+        sc.name = 'sc1modified2'
+
+        eq_(
+            sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+            [
+                SomeClassHistory(version=1, name='sc1'),
+                SomeClassHistory(version=2, name='sc1modified')
+            ]
+        )
+
+        assert sc.version == 3
+        
+        sess.commit()
+
+        sc.name = 'temp'
+        sc.name = 'sc1modified2'
+
+        sess.commit()
+
+        eq_(
+            sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+            [
+                SomeClassHistory(version=1, name='sc1'),
+                SomeClassHistory(version=2, name='sc1modified')
+            ]
+        )
+        
+        sess.delete(sc)
+        sess.commit()
+
+        eq_(
+            sess.query(SomeClassHistory).order_by(SomeClassHistory.version).all(),
+            [
+                SomeClassHistory(version=1, name='sc1'),
+                SomeClassHistory(version=2, name='sc1modified'),
+                SomeClassHistory(version=3, name='sc1modified2')
+            ]
+        )
+
+
+
+
+    def test_deferred(self):
+        """test versioning of unloaded, deferred columns."""
+        
+        class SomeClass(Base, ComparableEntity):
+            __tablename__ = 'sometable'
+
+            id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            data = deferred(Column(String(25)))
+            
+        self.create_tables()
+        sess = Session()
+        sc = SomeClass(name='sc1', data='somedata')
+        sess.add(sc)
+        sess.commit()
+        sess.close()
+        
+        sc = sess.query(SomeClass).first()
+        assert 'data' not in sc.__dict__
+        
+        sc.name = 'sc1modified'
+        sess.commit()
+
+        assert sc.version == 2
+
+        SomeClassHistory = SomeClass.__history_mapper__.class_
+
+        eq_(
+            sess.query(SomeClassHistory).filter(SomeClassHistory.version == 1).all(),
+            [SomeClassHistory(version=1, name='sc1', data='somedata')]
+        )
+        
+        
+    def test_joined_inheritance(self):
+        class BaseClass(Base, ComparableEntity):
+            __tablename__ = 'basetable'
+
+            id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            type = Column(String(20))
+            
+            __mapper_args__ = {'polymorphic_on':type, 'polymorphic_identity':'base'}
+            
+        class SubClassSeparatePk(BaseClass):
+            __tablename__ = 'subtable1'
+
+            id = Column(Integer, primary_key=True)
+            base_id = Column(Integer, ForeignKey('basetable.id'))
+            subdata1 = Column(String(50))
+
+            __mapper_args__ = {'polymorphic_identity':'sep'}
+
+        class SubClassSamePk(BaseClass):
+            __tablename__ = 'subtable2'
+
+            id = Column(Integer, ForeignKey('basetable.id'), primary_key=True)
+            subdata2 = Column(String(50))
+
+            __mapper_args__ = {'polymorphic_identity':'same'}
+
+        self.create_tables()
+        sess = Session()
+
+        sep1 = SubClassSeparatePk(name='sep1', subdata1='sep1subdata')
+        base1 = BaseClass(name='base1')
+        same1 = SubClassSamePk(name='same1', subdata2='same1subdata')
+        sess.add_all([sep1, base1, same1])
+        sess.commit()
+        
+        base1.name = 'base1mod'
+        same1.subdata2 = 'same1subdatamod'
+        sep1.name ='sep1mod'
+        sess.commit()
+
+        BaseClassHistory = BaseClass.__history_mapper__.class_
+        SubClassSeparatePkHistory = SubClassSeparatePk.__history_mapper__.class_
+        SubClassSamePkHistory = SubClassSamePk.__history_mapper__.class_
+        eq_(
+            sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
+            [
+                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), 
+                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), 
+                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1)            
+            ]
+        )
+        
+        same1.subdata2 = 'same1subdatamod2'
+
+        eq_(
+            sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+            [
+                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), 
+                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), 
+                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), 
+                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)            
+            ]
+        )
+
+        base1.name = 'base1mod2'
+        eq_(
+            sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+            [
+                SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1), 
+                BaseClassHistory(id=2, name=u'base1', type=u'base', version=1), 
+                BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2), 
+                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1), 
+                SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)            
+            ]
+        )
+
+    def test_single_inheritance(self):
+        class BaseClass(Base, ComparableEntity):
+            __tablename__ = 'basetable'
+
+            id = Column(Integer, primary_key=True)
+            name = Column(String(50))
+            type = Column(String(50))
+            __mapper_args__ = {'polymorphic_on':type, 'polymorphic_identity':'base'}
+            
+        class SubClass(BaseClass):
+
+            subname = Column(String(50))
+            __mapper_args__ = {'polymorphic_identity':'sub'}
+
+        self.create_tables()
+        sess = Session()
+
+        b1 = BaseClass(name='b1')
+        sc = SubClass(name='s1', subname='sc1')
+        
+        sess.add_all([b1, sc])
+        
+        sess.commit()
+        
+        b1.name='b1modified'
+
+        BaseClassHistory = BaseClass.__history_mapper__.class_
+        SubClassHistory = SubClass.__history_mapper__.class_
+        
+        eq_(
+            sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+            [BaseClassHistory(id=1, name=u'b1', type=u'base', version=1)]
+        )
+        
+        sc.name ='s1modified'
+        b1.name='b1modified2'
+
+        eq_(
+            sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
+            [
+                BaseClassHistory(id=1, name=u'b1', type=u'base', version=1),
+                BaseClassHistory(id=1, name=u'b1modified', type=u'base', version=2),
+                SubClassHistory(id=2, name=u's1', type=u'sub', version=1)
+            ]
+        )
+        
+
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..61ba2228bf3e43c9b11cf794260fbf480399e974 100644 (file)
@@ -0,0 +1,27 @@
+"""
+Illustrates "vertical table" mappings.
+
+A "vertical table" refers to a technique where individual attributes of an object are stored as distinct rows in a table.   
+The "vertical table" technique is used to persist objects which can have a varied set of attributes, at the expense of simple query control and brevity.   It is commonly found in content/document management systems in order to represent user-created structures flexibly.
+
+Two variants on the approach are given.  In the second, each row references a "datatype" which contains information about the type of information stored in the attribute, such as integer, string, or date.
+
+
+Example::
+
+    shrew = Animal(u'shrew')
+    shrew[u'cuteness'] = 5
+    shrew[u'weasel-like'] = False
+    shrew[u'poisonous'] = True
+
+    session.add(shrew)
+    session.flush()
+
+    q = (session.query(Animal).
+         filter(Animal.facts.any(
+           and_(AnimalFact.key == u'weasel-like',
+                AnimalFact.value == True))))
+    print 'weasel-like animals', q.all()
+
+
+"""
\ No newline at end of file
index 6180fa3034c004415130b4dbe296b97428a01a18..a66576945ca461fdd30b9c0611d549e4ef04f096 100644 (file)
@@ -25,8 +25,6 @@ we'll use a Python @property to build a smart '.value' attribute that wraps up
 reading and writing those various '_value' columns and keeps the '.type' up to
 date.
 
-Note: Something much like 'comparable_property' is slated for inclusion in a
-      future version of SQLAlchemy.
 """
 
 from sqlalchemy.orm.interfaces import PropComparator
index 1e1635ed763ce6d0007c3877f26175f1cdd4dbbd..683eda02916d680f8d41523fc21a2a4e57b2d433 100644 (file)
@@ -28,6 +28,7 @@ entity, and another related table holding key/value pairs::
 Because the key/value pairs in a vertical scheme are not fixed in advance,
 accessing them like a Python dict can be very convenient.  The example below
 can be used with many common vertical schemas as-is or with minor adaptations.
+
 """
 
 class VerticalProperty(object):
diff --git a/examples/vertical/vertical.py b/examples/vertical/vertical.py
deleted file mode 100644 (file)
index 4a8bf77..0000000
+++ /dev/null
@@ -1,201 +0,0 @@
-"""this example illustrates a "vertical table".  an object is stored with individual attributes 
-represented in distinct database rows.  This allows objects to be created with dynamically changing
-fields that are all persisted in a normalized fashion."""
-
-from sqlalchemy import (create_engine, MetaData, Table, Column, Integer, String,
-    ForeignKey, PickleType, DateTime, and_)
-from sqlalchemy.orm import mapper, relation, sessionmaker, scoped_session
-from sqlalchemy.orm.collections import mapped_collection
-import datetime
-
-engine = create_engine('sqlite://', echo=False)
-meta = MetaData(engine)
-
-Session = scoped_session(sessionmaker())
-
-# represent Entity objects
-entities = Table('entities', meta, 
-    Column('entity_id', Integer, primary_key=True),
-    Column('title', String(100), nullable=False),
-    )
-
-# represent named, typed fields
-entity_fields = Table('entity_fields', meta,
-    Column('field_id', Integer, primary_key=True),
-    Column('name', String(40), nullable=False),
-    Column('datatype', String(30), nullable=False))
-    
-# associate a field row with an entity row, including a typed value
-entity_values = Table('entity_values', meta, 
-    Column('value_id', Integer, primary_key=True),
-    Column('field_id', Integer, ForeignKey('entity_fields.field_id'), nullable=False),
-    Column('entity_id', Integer, ForeignKey('entities.entity_id'), nullable=False),
-    Column('int_value', Integer), 
-    Column('string_value', String(500)),
-    Column('binary_value', PickleType),
-    Column('datetime_value', DateTime))
-
-meta.create_all()
-
-class Entity(object):
-    """a persistable dynamic object.  
-    
-    Marshalls attributes into a dictionary which is 
-    mapped to the database.
-    
-    """
-    def __init__(self, **kwargs):
-        for k in kwargs:
-            setattr(self, k, kwargs[k])
-            
-    def __getattr__(self, key):
-        """Proxy requests for attributes to the underlying _entities dictionary."""
-
-        if key[0] == '_':
-            return super(Entity, self).__getattr__(key)
-        try:
-            return self._entities[key].value
-        except KeyError:
-            raise AttributeError(key)
-
-    def __setattr__(self, key, value):
-        """Proxy requests for attribute set operations to the underlying _entities dictionary."""
-
-        if key[0] == "_" or hasattr(Entity, key):
-            object.__setattr__(self, key, value)
-            return
-            
-        try:
-            ev = self._entities[key]
-            ev.value = value
-        except KeyError:
-            ev = _EntityValue(key, value)
-            self._entities[key] = ev
-        
-class _EntityField(object):
-    """Represents a field of a particular name and datatype."""
-
-    def __init__(self, name, datatype):
-        self.name = name
-        self.datatype = datatype
-
-class _EntityValue(object):
-    """Represents an individual value."""
-
-    def __init__(self, key, value):
-        datatype = self._figure_datatype(value)
-        field = \
-            Session.query(_EntityField).filter(
-                and_(_EntityField.name==key, _EntityField.datatype==datatype)
-            ).first()
-
-        if not field:
-            field = _EntityField(key, datatype)
-            Session.add(field)
-        
-        self.field = field
-        setattr(self, self.field.datatype + "_value", value)
-    
-    def _figure_datatype(self, value):
-        typemap = {
-            int:'int',
-            str:'string',
-            datetime.datetime:'datetime',
-        }
-        for k in typemap:
-            if isinstance(value, k):
-                return typemap[k]
-        else:
-            return 'binary'
-
-    def _get_value(self):
-        return getattr(self, self.field.datatype + "_value")
-
-    def _set_value(self, value):
-        setattr(self, self.field.datatype + "_value", value)
-    value = property(_get_value, _set_value)
-    
-    def name(self):
-        return self.field.name
-    name = property(name)
-
-
-# the mappers are a straightforward eager chain of 
-# Entity--(1->many)->EntityValue-(many->1)->EntityField
-# notice that we are identifying each mapper to its connecting
-# relation by just the class itself.
-mapper(_EntityField, entity_fields)
-mapper(
-    _EntityValue, entity_values,
-    properties = {
-        'field' : relation(_EntityField, lazy=False, cascade='all')
-    }
-)
-
-mapper(Entity, entities, properties = {
-    '_entities' : relation(
-                        _EntityValue, 
-                        lazy=False, 
-                        cascade='all', 
-                        collection_class=mapped_collection(lambda entityvalue: entityvalue.field.name)
-                    )
-})
-
-session = Session()
-entity1 = Entity(
-    title = 'this is the first entity',
-    name = 'this is the name',
-    price = 43,
-    data = ('hello', 'there')
-)
-
-entity2 = Entity(
-    title = 'this is the second entity',
-    name = 'this is another name',
-    price = 50,
-    data = ('hoo', 'ha')
-)
-
-session.add_all([entity1, entity2])
-session.commit()
-
-for entity in session.query(Entity):
-    print "Entity id %d:" % entity.entity_id, entity.title, entity.name, entity.price, entity.data
-
-# perform some changes, add a new Entity
-
-entity1.price = 90
-entity1.title = 'another new title'
-entity2.data = {'oof':5,'lala':8}
-
-entity3 = Entity(
-    title = 'third entity',
-    name = 'new name',
-    price = '$1.95',    # note we change 'price' to be a string.
-                        # this creates a new _EntityField separate from the
-                        # one used by integer 'price'.
-    data = 'some data'
-)
-session.add(entity3)
-
-session.commit()
-
-print "----------------"
-for entity in session.query(Entity):
-    print "Entity id %d:" % entity.entity_id, entity.title, entity.name, entity.price, entity.data
-
-print "----------------"
-# illustrate each _EntityField that's been created and list each Entity which uses it
-for ent_id, name, datatype in session.query(_EntityField.field_id, _EntityField.name, _EntityField.datatype):
-    print name, datatype, "(Enitites:",  ",".join([
-        str(entid) for entid in session.query(Entity.entity_id).\
-            join(
-                (_EntityValue, _EntityValue.entity_id==Entity.entity_id), 
-                (_EntityField, _EntityField.field_id==_EntityValue.field_id)
-            ).filter(_EntityField.field_id==ent_id)
-    ]), ")"
-
-# delete all the Entity objects
-for entity in session.query(Entity):
-    session.delete(entity)
-session.commit()
index 6590864d6a239eba81ac902d7391ae9745909dbf..77e15be5a92d62bd5763dfd6cf9fd7f32a39e6e9 100644 (file)
@@ -864,7 +864,7 @@ class Session(object):
             context.append('mapper %s' % c_mapper)
         if clause is not None:
             context.append('SQL expression')
-
+        
         raise sa_exc.UnboundExecutionError(
             "Could not locate a bind configured on %s or this Session" % (
             ', '.join(context)))
diff --git a/lib/sqlalchemy/test/entities.py b/lib/sqlalchemy/test/entities.py
new file mode 100644 (file)
index 0000000..0ec677e
--- /dev/null
@@ -0,0 +1,83 @@
+import sqlalchemy as sa
+from sqlalchemy import exc as sa_exc
+
+_repr_stack = set()
+class BasicEntity(object):
+    def __init__(self, **kw):
+        for key, value in kw.iteritems():
+            setattr(self, key, value)
+
+    def __repr__(self):
+        if id(self) in _repr_stack:
+            return object.__repr__(self)
+        _repr_stack.add(id(self))
+        try:
+            return "%s(%s)" % (
+                (self.__class__.__name__),
+                ', '.join(["%s=%r" % (key, getattr(self, key))
+                           for key in sorted(self.__dict__.keys())
+                           if not key.startswith('_')]))
+        finally:
+            _repr_stack.remove(id(self))
+
+_recursion_stack = set()
+class ComparableEntity(BasicEntity):
+    def __hash__(self):
+        return hash(self.__class__)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __eq__(self, other):
+        """'Deep, sparse compare.
+
+        Deeply compare two entities, following the non-None attributes of the
+        non-persisted object, if possible.
+
+        """
+        if other is self:
+            return True
+        elif not self.__class__ == other.__class__:
+            return False
+
+        if id(self) in _recursion_stack:
+            return True
+        _recursion_stack.add(id(self))
+
+        try:
+            # pick the entity thats not SA persisted as the source
+            try:
+                self_key = sa.orm.attributes.instance_state(self).key
+            except sa.orm.exc.NO_STATE:
+                self_key = None
+                
+            if other is None:
+                a = self
+                b = other
+            elif self_key is not None:
+                a = other
+                b = self
+            else:
+                a = self
+                b = other
+
+            for attr in a.__dict__.keys():
+                if attr.startswith('_'):
+                    continue
+                value = getattr(a, attr)
+
+                try:
+                    # handle lazy loader errors
+                    battr = getattr(b, attr)
+                except (AttributeError, sa_exc.UnboundExecutionError):
+                    return False
+
+                if hasattr(value, '__iter__'):
+                    if list(value) != list(battr):
+                        return False
+                else:
+                    if value is not None and value != battr:
+                        return False
+            return True
+        finally:
+            _recursion_stack.remove(id(self))
index 517492791e0893c21e52772db3272f2fb0905e56..6401118038d1591a44c580612564aabc51ccef94 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -4,3 +4,4 @@ tag_svn_revision = true
 
 [nosetests]
 with-sqlalchemy = true
+exclude = ^examples
\ No newline at end of file
index f08d253d57666adceeaadc52e14d01dd3fd85237..4d0031f5a80d97cd21c6e7d5018f13e6acd9a646 100644 (file)
@@ -7,91 +7,10 @@ from sqlalchemy.test import config, testing
 from sqlalchemy.test.testing import resolve_artifact_names, adict
 from sqlalchemy.test.engines import drop_all_tables
 from sqlalchemy.util import function_named
-
-
-_repr_stack = set()
-class BasicEntity(object):
-    def __init__(self, **kw):
-        for key, value in kw.iteritems():
-            setattr(self, key, value)
-
-    def __repr__(self):
-        if id(self) in _repr_stack:
-            return object.__repr__(self)
-        _repr_stack.add(id(self))
-        try:
-            return "%s(%s)" % (
-                (self.__class__.__name__),
-                ', '.join(["%s=%r" % (key, getattr(self, key))
-                           for key in sorted(self.__dict__.keys())
-                           if not key.startswith('_')]))
-        finally:
-            _repr_stack.remove(id(self))
+from sqlalchemy.test.entities import BasicEntity, ComparableEntity
 
 Entity = BasicEntity
 
-_recursion_stack = set()
-class ComparableEntity(BasicEntity):
-    def __hash__(self):
-        return hash(self.__class__)
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    def __eq__(self, other):
-        """'Deep, sparse compare.
-
-        Deeply compare two entities, following the non-None attributes of the
-        non-persisted object, if possible.
-
-        """
-        if other is self:
-            return True
-        elif not self.__class__ == other.__class__:
-            return False
-
-        if id(self) in _recursion_stack:
-            return True
-        _recursion_stack.add(id(self))
-
-        try:
-            # pick the entity thats not SA persisted as the source
-            try:
-                self_key = sa.orm.attributes.instance_state(self).key
-            except sa.orm.exc.NO_STATE:
-                self_key = None
-                
-            if other is None:
-                a = self
-                b = other
-            elif self_key is not None:
-                a = other
-                b = self
-            else:
-                a = self
-                b = other
-
-            for attr in a.__dict__.keys():
-                if attr.startswith('_'):
-                    continue
-                value = getattr(a, attr)
-
-                try:
-                    # handle lazy loader errors
-                    battr = getattr(b, attr)
-                except (AttributeError, sa_exc.UnboundExecutionError):
-                    return False
-
-                if hasattr(value, '__iter__'):
-                    if list(value) != list(battr):
-                        return False
-                else:
-                    if value is not None and value != battr:
-                        return False
-            return True
-        finally:
-            _recursion_stack.remove(id(self))
-
 
 class ORMTest(testing.TestBase, testing.AssertsExecutionResults):
     __requires__ = ('subqueries',)