From: Mike Bayer Date: Sun, 6 Jan 2019 03:42:08 +0000 (-0500) Subject: Assorted pre-Black fixes X-Git-Tag: rel_1_3_0b2~47 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=404e69426b05a82d905cbb3ad33adafccddb00dd;p=thirdparty%2Fsqlalchemy%2Fsqlalchemy.git Assorted pre-Black fixes Fixes to the test suite, a few errant imports, and setup.py: - mysql and postgresql have unused 'json' imports; remove - postgresql is exporting the 'json' symbol, remove - make sure setup.py can find __version__ using " or ' - retry logic in provision create database for postgresql fixed - refactor test_magazine to use cls.tables rather than globals - remove unused class in test_scoping - add a comment to test_deprecations that this test suite itself is deprecated - don't use mapper() and orm_mapper() in test_unitofwork, just use mapper() - remove dupe test_scalar_set_None test in test_attributes - Python 2.7 and above includes unittest.SkipTest, remove pre-2.7 fallback - use imported SkipTest in profiling - declarative test_reflection tests with "reflectable_autoincrement" already don't run on oracle or firebird; remove conditional logic for these, which also removes an "id" symbol - clean up test in test_functions, remove print statement - remove dupe test_literal_processor_coercion_native_int_out_of_range in test/sql/test_types.py - fix psycopg2_hstore ref Change-Id: I7b3444f8546aac82be81cd1e7b6d8b2ad6834fe6 --- diff --git a/lib/sqlalchemy/dialects/mysql/base.py b/lib/sqlalchemy/dialects/mysql/base.py index d633c2f65c..673d4b9ff8 100644 --- a/lib/sqlalchemy/dialects/mysql/base.py +++ b/lib/sqlalchemy/dialects/mysql/base.py @@ -736,7 +736,6 @@ output:: from collections import defaultdict import re import sys -import json from ... import schema as sa_schema from ... import exc, log, sql, util diff --git a/lib/sqlalchemy/dialects/mysql/json.py b/lib/sqlalchemy/dialects/mysql/json.py index 84dcefc880..534fb989d0 100644 --- a/lib/sqlalchemy/dialects/mysql/json.py +++ b/lib/sqlalchemy/dialects/mysql/json.py @@ -7,8 +7,6 @@ from __future__ import absolute_import -import json - from ...sql import elements from ... import types as sqltypes from ... import util diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py index d2f8057b67..84f7200285 100644 --- a/lib/sqlalchemy/dialects/postgresql/__init__.py +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -14,7 +14,7 @@ from .base import \ TIMESTAMP, TIME, DATE, BYTEA, BOOLEAN, INTERVAL, ENUM, TSVECTOR, \ DropEnumType, CreateEnumType from .hstore import HSTORE, hstore -from .json import JSON, JSONB, json +from .json import JSON, JSONB from .array import array, ARRAY, Any, All from .ext import aggregate_order_by, ExcludeConstraint, array_agg from .dml import insert, Insert @@ -31,7 +31,7 @@ __all__ = ( 'REGCLASS', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME', 'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect', 'array', 'HSTORE', 'hstore', 'INT4RANGE', 'INT8RANGE', 'NUMRANGE', 'DATERANGE', - 'TSRANGE', 'TSTZRANGE', 'json', 'JSON', 'JSONB', 'Any', 'All', + 'TSRANGE', 'TSTZRANGE', 'JSON', 'JSONB', 'Any', 'All', 'DropEnumType', 'CreateEnumType', 'ExcludeConstraint', 'aggregate_order_by', 'array_agg', 'insert', 'Insert' ) diff --git a/lib/sqlalchemy/dialects/postgresql/json.py b/lib/sqlalchemy/dialects/postgresql/json.py index 1a1367f1b3..e9256daf31 100644 --- a/lib/sqlalchemy/dialects/postgresql/json.py +++ b/lib/sqlalchemy/dialects/postgresql/json.py @@ -6,8 +6,6 @@ # the MIT License: http://www.opensource.org/licenses/mit-license.php from __future__ import absolute_import -import json - from .base import ischema_names, colspecs from ... import types as sqltypes from ...sql import operators diff --git a/lib/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py index 2a949c4430..baa0e00d52 100644 --- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -300,7 +300,7 @@ The psycopg2 dialect will log PostgreSQL NOTICE messages via the import logging logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO) -.. _psycopg2_hstore:: +.. _psycopg2_hstore: HSTORE type ------------ diff --git a/lib/sqlalchemy/testing/config.py b/lib/sqlalchemy/testing/config.py index dfe0da94bb..e9cfb3de93 100644 --- a/lib/sqlalchemy/testing/config.py +++ b/lib/sqlalchemy/testing/config.py @@ -6,6 +6,7 @@ # the MIT License: http://www.opensource.org/licenses/mit-license.php import collections +from unittest import SkipTest as _skip_test_exception requirements = None db = None @@ -16,11 +17,6 @@ test_schema = None test_schema_2 = None _current = None -try: - from unittest import SkipTest as _skip_test_exception -except ImportError: - _skip_test_exception = None - class Config(object): def __init__(self, db, db_opts, options, file_config): diff --git a/lib/sqlalchemy/testing/profiling.py b/lib/sqlalchemy/testing/profiling.py index 42265f7cb5..fab99b186e 100644 --- a/lib/sqlalchemy/testing/profiling.py +++ b/lib/sqlalchemy/testing/profiling.py @@ -206,7 +206,7 @@ def function_call_count(variance=0.05): @contextlib.contextmanager def count_functions(variance=0.05): if cProfile is None: - raise SkipTest("cProfile is not installed") + raise config._skip_test_exception("cProfile is not installed") if not _profile_stats.has_stats() and not _profile_stats.write: config.skip_test( diff --git a/lib/sqlalchemy/testing/provision.py b/lib/sqlalchemy/testing/provision.py index 4c749d382d..c0ca7c1cbc 100644 --- a/lib/sqlalchemy/testing/provision.py +++ b/lib/sqlalchemy/testing/provision.py @@ -170,11 +170,16 @@ def _pg_create_db(cfg, eng, ident): pass if not template_db: template_db = conn.scalar("select current_database()") - for attempt in range(3): + + attempt = 0 + while True: try: conn.execute( "CREATE DATABASE %s TEMPLATE %s" % (ident, template_db)) except exc.OperationalError as err: + attempt += 1 + if attempt >= 3: + raise if "accessed by other users" in str(err): log.info( "Waiting to create %s, URI %r, " @@ -183,8 +188,6 @@ def _pg_create_db(cfg, eng, ident): time.sleep(.5) else: break - else: - raise err @_create_db.for_db("mysql") diff --git a/setup.py b/setup.py index 523a5db0d1..909a4ebdaf 100644 --- a/setup.py +++ b/setup.py @@ -114,7 +114,7 @@ with open( os.path.dirname(__file__), 'lib', 'sqlalchemy', '__init__.py')) as v_file: VERSION = re.compile( - r".*__version__ = '(.*?)'", + r""".*__version__ = ["'](.*?)['"]""", re.S).match(v_file.read()).group(1) with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as r_file: diff --git a/test/ext/declarative/test_reflection.py b/test/ext/declarative/test_reflection.py index fd6a37867f..fef9d794c4 100644 --- a/test/ext/declarative/test_reflection.py +++ b/test/ext/declarative/test_reflection.py @@ -56,18 +56,12 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): __tablename__ = 'users' __autoload__ = True - if testing.against('oracle', 'firebird'): - id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) addresses = relationship('Address', backref='user') class Address(Base, fixtures.ComparableEntity): __tablename__ = 'addresses' __autoload__ = True - if testing.against('oracle', 'firebird'): - id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) u1 = User(name='u1', addresses=[Address(email='one'), Address(email='two')]) @@ -87,9 +81,6 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): __tablename__ = 'users' __autoload__ = True - if testing.against('oracle', 'firebird'): - id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) nom = Column('name', String(50), key='nom') addresses = relationship('Address', backref='user') @@ -97,9 +88,6 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): __tablename__ = 'addresses' __autoload__ = True - if testing.against('oracle', 'firebird'): - id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) u1 = User(nom='u1', addresses=[Address(email='one'), Address(email='two')]) @@ -120,18 +108,12 @@ class DeclarativeReflectionTest(DeclarativeReflectionBase): __tablename__ = 'imhandles' __autoload__ = True - if testing.against('oracle', 'firebird'): - id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) user_id = Column('user_id', Integer, ForeignKey('users.id')) class User(Base, fixtures.ComparableEntity): __tablename__ = 'users' __autoload__ = True - if testing.against('oracle', 'firebird'): - id = Column('id', Integer, primary_key=True, - test_needs_autoincrement=True) handles = relationship('IMHandle', backref='user') u1 = User(name='u1', handles=[ diff --git a/test/orm/inheritance/test_magazine.py b/test/orm/inheritance/test_magazine.py index 571fe431d7..27f5e71ce1 100644 --- a/test/orm/inheritance/test_magazine.py +++ b/test/orm/inheritance/test_magazine.py @@ -1,10 +1,17 @@ -from sqlalchemy import * -from sqlalchemy.orm import * - -from sqlalchemy import testing -from sqlalchemy.testing.util import function_named +from sqlalchemy import CHAR +from sqlalchemy import ForeignKey +from sqlalchemy import Integer +from sqlalchemy import String +from sqlalchemy import Text +from sqlalchemy.orm import backref +from sqlalchemy.orm import create_session +from sqlalchemy.orm import mapper +from sqlalchemy.orm import polymorphic_union +from sqlalchemy.orm import relationship from sqlalchemy.testing import fixtures -from sqlalchemy.testing.schema import Table, Column +from sqlalchemy.testing.schema import Column +from sqlalchemy.testing.schema import Table +from sqlalchemy.testing.util import function_named class BaseObject(object): @@ -23,17 +30,22 @@ class Issue(BaseObject): class Location(BaseObject): def __repr__(self): - return "%s(%s, %s)" % (self.__class__.__name__, - str(getattr(self, 'issue_id', None)), - repr(str(self._name.name))) + return "%s(%s, %s)" % ( + self.__class__.__name__, + str(getattr(self, "issue_id", None)), + repr(str(self._name.name)), + ) def _get_name(self): return self._name def _set_name(self, name): session = create_session() - s = session.query(LocationName).filter(LocationName.name == name)\ + s = ( + session.query(LocationName) + .filter(LocationName.name == name) .first() + ) session.expunge_all() if s is not None: self._name = s @@ -62,14 +74,21 @@ class LocationName(BaseObject): class PageSize(BaseObject): def __repr__(self): - return "%s(%sx%s, %s)" % (self.__class__.__name__, self.width, - self.height, self.name) + return "%s(%sx%s, %s)" % ( + self.__class__.__name__, + self.width, + self.height, + self.name, + ) class Magazine(BaseObject): def __repr__(self): - return "%s(%s, %s)" % (self.__class__.__name__, repr(self.location), - repr(self.size)) + return "%s(%s, %s)" % ( + self.__class__.__name__, + repr(self.location), + repr(self.size), + ) class Page(BaseObject): @@ -79,8 +98,11 @@ class Page(BaseObject): class MagazinePage(Page): def __repr__(self): - return "%s(%s, %s)" % (self.__class__.__name__, str(self.page_no), - repr(self.magazine)) + return "%s(%s, %s)" % ( + self.__class__.__name__, + str(self.page_no), + repr(self.magazine), + ) class ClassifiedPage(MagazinePage): @@ -90,181 +112,255 @@ class ClassifiedPage(MagazinePage): class MagazineTest(fixtures.MappedTest): @classmethod def define_tables(cls, metadata): - global publication_table, issue_table, location_table,\ - location_name_table, magazine_table, page_table,\ - magazine_page_table, classified_page_table, page_size_table - - publication_table = Table('publication', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('name', String(45), default='')) - issue_table = Table('issue', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('publication_id', Integer, - ForeignKey('publication.id')), - Column('issue', Integer)) - location_table = Table( - 'location', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('issue_id', Integer, ForeignKey('issue.id')), - Column('ref', CHAR(3), default=''), - Column('location_name_id', Integer, - ForeignKey('location_name.id'))) - location_name_table = Table('location_name', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('name', String(45), default='')) - magazine_table = Table('magazine', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('location_id', Integer, - ForeignKey('location.id')), - Column('page_size_id', Integer, - ForeignKey('page_size.id')),) - page_table = Table('page', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('page_no', Integer), - Column('type', CHAR(1), default='p')) - magazine_page_table = Table('magazine_page', metadata, - Column('page_id', Integer, - ForeignKey('page.id'), - primary_key=True), - Column('magazine_id', Integer, - ForeignKey('magazine.id')), - Column('orders', Text, default='')) - classified_page_table = Table( - 'classified_page', metadata, - Column('magazine_page_id', Integer, - ForeignKey('magazine_page.page_id'), - primary_key=True), - Column('titles', String(45), - default=''),) - page_size_table = Table('page_size', metadata, - Column('id', Integer, primary_key=True, - test_needs_autoincrement=True), - Column('width', Integer), - Column('height', Integer), - Column('name', String(45), default='')) + Table( + "publication", + metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), + Column("name", String(45), default=""), + ) + Table( + "issue", + metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), + Column("publication_id", Integer, ForeignKey("publication.id")), + Column("issue", Integer), + ) + Table( + "location", + metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), + Column("issue_id", Integer, ForeignKey("issue.id")), + Column("ref", CHAR(3), default=""), + Column( + "location_name_id", Integer, ForeignKey("location_name.id") + ), + ) + Table( + "location_name", + metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), + Column("name", String(45), default=""), + ) + Table( + "magazine", + metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), + Column("location_id", Integer, ForeignKey("location.id")), + Column("page_size_id", Integer, ForeignKey("page_size.id")), + ) + Table( + "page", + metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), + Column("page_no", Integer), + Column("type", CHAR(1), default="p"), + ) + Table( + "magazine_page", + metadata, + Column( + "page_id", Integer, ForeignKey("page.id"), primary_key=True + ), + Column("magazine_id", Integer, ForeignKey("magazine.id")), + Column("orders", Text, default=""), + ) + Table( + "classified_page", + metadata, + Column( + "magazine_page_id", + Integer, + ForeignKey("magazine_page.page_id"), + primary_key=True, + ), + Column("titles", String(45), default=""), + ) + Table( + "page_size", + metadata, + Column( + "id", Integer, primary_key=True, test_needs_autoincrement=True + ), + Column("width", Integer), + Column("height", Integer), + Column("name", String(45), default=""), + ) def _generate_round_trip_test(use_unions=False, use_joins=False): def test_roundtrip(self): - publication_mapper = mapper(Publication, publication_table) + publication_mapper = mapper(Publication, self.tables.publication) issue_mapper = mapper( Issue, - issue_table, + self.tables.issue, properties={ - 'publication': relationship(Publication, - backref=backref( - 'issues', - cascade="all, delete-orphan")) - }) + "publication": relationship( + Publication, + backref=backref("issues", cascade="all, delete-orphan"), + ) + }, + ) - location_name_mapper = mapper(LocationName, location_name_table) + location_name_mapper = mapper(LocationName, self.tables.location_name) location_mapper = mapper( - Location, location_table, + Location, + self.tables.location, properties={ - 'issue': - relationship( + "issue": relationship( Issue, backref=backref( - 'locations', lazy='joined', - cascade="all, delete-orphan")), - '_name': relationship(LocationName), }) - - page_size_mapper = mapper(PageSize, page_size_table) - - magazine_mapper = mapper(Magazine, magazine_table, properties={ - 'location': relationship(Location, backref=backref('magazine', - uselist=False)), - 'size': relationship(PageSize), - }) + "locations", + lazy="joined", + cascade="all, delete-orphan", + ), + ), + "_name": relationship(LocationName), + }, + ) + + page_size_mapper = mapper(PageSize, self.tables.page_size) + + magazine_mapper = mapper( + Magazine, + self.tables.magazine, + properties={ + "location": relationship( + Location, backref=backref("magazine", uselist=False) + ), + "size": relationship(PageSize), + }, + ) if use_unions: - page_join = polymorphic_union({'m': page_table.join( - magazine_page_table), - 'c': page_table.join( - magazine_page_table).join( - classified_page_table), - 'p': page_table.select( - page_table.c.type == 'p'), }, - None, 'page_join') - page_mapper = mapper(Page, page_table, - with_polymorphic=('*', page_join), - polymorphic_on=page_join.c.type, - polymorphic_identity='p') + page_join = polymorphic_union( + { + "m": self.tables.page.join(self.tables.magazine_page), + "c": self.tables.page.join(self.tables.magazine_page).join( + self.tables.classified_page + ), + "p": self.tables.page.select( + self.tables.page.c.type == "p" + ), + }, + None, + "page_join", + ) + page_mapper = mapper( + Page, + self.tables.page, + with_polymorphic=("*", page_join), + polymorphic_on=page_join.c.type, + polymorphic_identity="p", + ) elif use_joins: - page_join = page_table.outerjoin( - magazine_page_table).outerjoin(classified_page_table) - page_mapper = mapper(Page, page_table, - with_polymorphic=('*', page_join), - polymorphic_on=page_table.c.type, - polymorphic_identity='p') + page_join = self.tables.page.outerjoin( + self.tables.magazine_page + ).outerjoin(self.tables.classified_page) + page_mapper = mapper( + Page, + self.tables.page, + with_polymorphic=("*", page_join), + polymorphic_on=self.tables.page.c.type, + polymorphic_identity="p", + ) else: page_mapper = mapper( - Page, page_table, polymorphic_on=page_table.c.type, - polymorphic_identity='p') + Page, + self.tables.page, + polymorphic_on=self.tables.page.c.type, + polymorphic_identity="p", + ) if use_unions: - magazine_join = polymorphic_union({'m': page_table.join( - magazine_page_table), - 'c': page_table.join( - magazine_page_table).join( - classified_page_table), }, - None, 'page_join') + magazine_join = polymorphic_union( + { + "m": self.tables.page.join(self.tables.magazine_page), + "c": self.tables.page.join(self.tables.magazine_page).join( + self.tables.classified_page + ), + }, + None, + "page_join", + ) magazine_page_mapper = mapper( - MagazinePage, magazine_page_table, - with_polymorphic=('*', magazine_join), - inherits=page_mapper, polymorphic_identity='m', + MagazinePage, + self.tables.magazine_page, + with_polymorphic=("*", magazine_join), + inherits=page_mapper, + polymorphic_identity="m", properties={ - 'magazine': - relationship( + "magazine": relationship( Magazine, backref=backref( - 'pages', - order_by=magazine_join.c.page_no))}) + "pages", order_by=magazine_join.c.page_no + ), + ) + }, + ) elif use_joins: - magazine_join = page_table.join( - magazine_page_table).outerjoin(classified_page_table) + magazine_join = self.tables.page.join( + self.tables.magazine_page + ).outerjoin(self.tables.classified_page) magazine_page_mapper = mapper( - MagazinePage, magazine_page_table, - with_polymorphic=('*', magazine_join), - inherits=page_mapper, polymorphic_identity='m', + MagazinePage, + self.tables.magazine_page, + with_polymorphic=("*", magazine_join), + inherits=page_mapper, + polymorphic_identity="m", properties={ - 'magazine': - relationship( + "magazine": relationship( Magazine, backref=backref( - 'pages', order_by=page_table.c.page_no))}) + "pages", order_by=self.tables.page.c.page_no + ), + ) + }, + ) else: magazine_page_mapper = mapper( - MagazinePage, magazine_page_table, inherits=page_mapper, - polymorphic_identity='m', + MagazinePage, + self.tables.magazine_page, + inherits=page_mapper, + polymorphic_identity="m", properties={ - 'magazine': - relationship( + "magazine": relationship( Magazine, backref=backref( - 'pages', order_by=page_table.c.page_no))}) - - classified_page_mapper = mapper(ClassifiedPage, - classified_page_table, - inherits=magazine_page_mapper, - polymorphic_identity='c', - primary_key=[page_table.c.id]) + "pages", order_by=self.tables.page.c.page_no + ), + ) + }, + ) + + classified_page_mapper = mapper( + ClassifiedPage, + self.tables.classified_page, + inherits=magazine_page_mapper, + polymorphic_identity="c", + primary_key=[self.tables.page.c.id], + ) session = create_session() - pub = Publication(name='Test') + pub = Publication(name="Test") issue = Issue(issue=46, publication=pub) - location = Location(ref='ABC', name='London', issue=issue) + location = Location(ref="ABC", name="London", issue=issue) - page_size = PageSize(name='A4', width=210, height=297) + page_size = PageSize(name="A4", width=210, height=297) magazine = Magazine(location=location, size=page_size) @@ -283,14 +379,15 @@ def _generate_round_trip_test(use_unions=False, use_joins=False): print(p.issues[0].locations[0].magazine.pages) print([page, page2, page3]) - assert repr( - p.issues[0].locations[0].magazine.pages) == repr( - [page, page2, page3]), repr( - p.issues[0].locations[0].magazine.pages) + assert repr(p.issues[0].locations[0].magazine.pages) == repr( + [page, page2, page3] + ), repr(p.issues[0].locations[0].magazine.pages) test_roundtrip = function_named( - test_roundtrip, "test_%s" % - (not use_union and(use_joins and "joins" or "select") or "unions")) + test_roundtrip, + "test_%s" + % (not use_union and (use_joins and "joins" or "select") or "unions"), + ) setattr(MagazineTest, test_roundtrip.__name__, test_roundtrip) diff --git a/test/orm/test_attributes.py b/test/orm/test_attributes.py index f8d453255e..a830b39b81 100644 --- a/test/orm/test_attributes.py +++ b/test/orm/test_attributes.py @@ -1983,13 +1983,6 @@ class HistoryTest(fixtures.TestBase): self._commit_someattr(f) eq_(self._someattr_history(f), ((), ['old'], ())) - def test_scalar_set_None(self): - Foo = self._fixture(uselist=False, useobject=False, - active_history=False) - f = Foo() - f.someattr = None - eq_(self._someattr_history(f), ([None], (), ())) - def test_scalar_set_None_from_dict_set(self): Foo = self._fixture(uselist=False, useobject=False, active_history=False) diff --git a/test/orm/test_deprecations.py b/test/orm/test_deprecations.py index 08f21ba6f1..e916e8985b 100644 --- a/test/orm/test_deprecations.py +++ b/test/orm/test_deprecations.py @@ -4,6 +4,13 @@ Collects specimens of old ORM code and explicitly covers the recommended modern (i.e. not deprecated) alternative to them. The tests snippets here can be migrated directly to the wiki, docs, etc. +.. deprecated:: + + This test suite is interested in extremely old (pre 0.5) patterns + and in modern use illustrates trivial use cases that don't need + an additional test suite. + + """ from sqlalchemy import Integer, String, ForeignKey, func, text from sqlalchemy.testing.schema import Table diff --git a/test/orm/test_scoping.py b/test/orm/test_scoping.py index 025ad4daaf..507d98f82c 100644 --- a/test/orm/test_scoping.py +++ b/test/orm/test_scoping.py @@ -10,22 +10,6 @@ from sqlalchemy.testing import fixtures from sqlalchemy.testing.mock import Mock -class _ScopedTest(fixtures.MappedTest): - """Adds another lookup bucket to emulate Session globals.""" - - run_setup_mappers = 'once' - - @classmethod - def setup_class(cls): - cls.scoping = _base.adict() - super(_ScopedTest, cls).setup_class() - - @classmethod - def teardown_class(cls): - cls.scoping.clear() - super(_ScopedTest, cls).teardown_class() - - class ScopedSessionTest(fixtures.MappedTest): @classmethod diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index 94b62eb5bf..663b86bfd6 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -3,7 +3,6 @@ from sqlalchemy.testing import eq_, assert_raises, assert_raises_message import datetime -from sqlalchemy.orm import mapper as orm_mapper from sqlalchemy.util import OrderedDict import sqlalchemy as sa @@ -2346,7 +2345,7 @@ class BooleanColTest(fixtures.MappedTest): # use the regular mapper class T(fixtures.ComparableEntity): pass - orm_mapper(T, t1_t) + mapper(T, t1_t) sess = create_session() t1 = T(value=True, name="t1") @@ -2641,8 +2640,8 @@ class TransactionTest(fixtures.MappedTest): cls.tables.t2, cls.tables.t1) - orm_mapper(T1, t1) - orm_mapper(T2, t2) + mapper(T1, t1) + mapper(T2, t2) def test_close_transaction_on_commit_fail(self): T2, t1 = self.classes.T2, self.tables.t1 @@ -2686,7 +2685,7 @@ class PartialNullPKTest(fixtures.MappedTest): @classmethod def setup_mappers(cls): - orm_mapper(cls.classes.T1, cls.tables.t1) + mapper(cls.classes.T1, cls.tables.t1) def test_key_switch(self): T1 = self.classes.T1 @@ -2801,8 +2800,8 @@ class EnsurePKSortableTest(fixtures.MappedTest): @classmethod def setup_mappers(cls): - orm_mapper(cls.classes.T1, cls.tables.t1) - orm_mapper(cls.classes.T2, cls.tables.t2) + mapper(cls.classes.T1, cls.tables.t1) + mapper(cls.classes.T2, cls.tables.t2) def test_exception_persistent_flush_py3k(self): s = Session() diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index ffc72e9eee..6fba7519c6 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -800,6 +800,7 @@ class ExecuteTest(fixtures.TestBase): ret.close() @engines.close_first + @testing.provide_metadata def test_update(self): """ Tests sending functions and SQL expressions to the VALUES and SET @@ -807,7 +808,7 @@ class ExecuteTest(fixtures.TestBase): get overridden. """ - meta = MetaData(testing.db) + meta = self.metadata t = Table('t1', meta, Column('id', Integer, Sequence('t1idseq', optional=True), primary_key=True), @@ -820,51 +821,47 @@ class ExecuteTest(fixtures.TestBase): Column('stuff', String(20), onupdate="thisisstuff") ) meta.create_all() - try: - t.insert(values=dict(value=func.length("one"))).execute() - assert t.select().execute().first()['value'] == 3 - t.update(values=dict(value=func.length("asfda"))).execute() - assert t.select().execute().first()['value'] == 5 - - r = t.insert(values=dict(value=func.length("sfsaafsda"))).execute() - id = r.inserted_primary_key[0] - assert t.select(t.c.id == id).execute().first()['value'] == 9 - t.update(values={t.c.value: func.length("asdf")}).execute() - assert t.select().execute().first()['value'] == 4 - t2.insert().execute() - t2.insert(values=dict(value=func.length("one"))).execute() - t2.insert(values=dict(value=func.length("asfda") + -19)).\ - execute(stuff="hi") - - res = exec_sorted(select([t2.c.value, t2.c.stuff])) - eq_(res, [(-14, 'hi'), (3, None), (7, None)]) - - t2.update(values=dict(value=func.length("asdsafasd"))).\ - execute(stuff="some stuff") - assert select([t2.c.value, t2.c.stuff]).execute().fetchall() == \ - [(9, "some stuff"), (9, "some stuff"), - (9, "some stuff")] - - t2.delete().execute() - - t2.insert(values=dict(value=func.length("one") + 8)).execute() - assert t2.select().execute().first()['value'] == 11 - - t2.update(values=dict(value=func.length("asfda"))).execute() - eq_( - select([t2.c.value, t2.c.stuff]).execute().first(), - (5, "thisisstuff") + t.insert(values=dict(value=func.length("one"))).execute() + assert t.select().execute().first()['value'] == 3 + t.update(values=dict(value=func.length("asfda"))).execute() + assert t.select().execute().first()['value'] == 5 + + r = t.insert(values=dict(value=func.length("sfsaafsda"))).execute() + id = r.inserted_primary_key[0] + assert t.select(t.c.id == id).execute().first()['value'] == 9 + t.update(values={t.c.value: func.length("asdf")}).execute() + assert t.select().execute().first()['value'] == 4 + t2.insert().execute() + t2.insert(values=dict(value=func.length("one"))).execute() + t2.insert(values=dict(value=func.length("asfda") + -19)).\ + execute(stuff="hi") + + res = exec_sorted(select([t2.c.value, t2.c.stuff])) + eq_(res, [(-14, 'hi'), (3, None), (7, None)]) + + t2.update(values=dict(value=func.length("asdsafasd"))).\ + execute(stuff="some stuff") + assert select([t2.c.value, t2.c.stuff]).execute().fetchall() == \ + [(9, "some stuff"), (9, "some stuff"), + (9, "some stuff")] + + t2.delete().execute() + + t2.insert(values=dict(value=func.length("one") + 8)).execute() + assert t2.select().execute().first()['value'] == 11 + + t2.update(values=dict(value=func.length("asfda"))).execute() + eq_( + select([t2.c.value, t2.c.stuff]).execute().first(), + (5, "thisisstuff") + ) + + t2.update(values={t2.c.value: func.length("asfdaasdf"), + t2.c.stuff: "foo"}).execute() + eq_(select([t2.c.value, t2.c.stuff]).execute().first(), + (9, "foo") ) - t2.update(values={t2.c.value: func.length("asfdaasdf"), - t2.c.stuff: "foo"}).execute() - print("HI", select([t2.c.value, t2.c.stuff]).execute().first()) - eq_(select([t2.c.value, t2.c.stuff]).execute().first(), - (9, "foo") - ) - finally: - meta.drop_all() - @testing.fails_on_everything_except('postgresql') def test_as_from(self): # TODO: shouldn't this work on oracle too ? diff --git a/test/sql/test_types.py b/test/sql/test_types.py index b379ebdec0..7b464f8c0b 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -2924,15 +2924,6 @@ class BooleanTest( proc, "foo" ) - def test_literal_processor_coercion_native_int_out_of_range(self): - proc = Boolean().literal_processor( - default.DefaultDialect(supports_native_boolean=True)) - assert_raises_message( - ValueError, - "Value 15 is not None, True, or False", - proc, 15 - ) - class PickleTest(fixtures.TestBase):