From 52a5ec18af4d5c3f9a95ee23d28c7292f66d1b9c Mon Sep 17 00:00:00 2001 From: Mike Bayer Date: Mon, 13 Jul 2009 00:46:44 +0000 Subject: [PATCH] renamed "postgres" to "postgresql", "Postgres" to "PostgreSQL". since that's been the thing's name since 1996 or so. backwards compatibility with the old name should be more or less complete. --- 06CHANGES | 27 +++- CHANGES | 12 +- README.unittests | 12 +- doc/build/dbengine.rst | 22 +-- doc/build/mappers.rst | 2 +- doc/build/metadata.rst | 4 +- doc/build/ormtutorial.rst | 2 +- doc/build/reference/dialects/index.rst | 2 +- doc/build/reference/dialects/postgres.rst | 6 +- doc/build/reference/sqlalchemy/pooling.rst | 2 +- doc/build/reference/sqlalchemy/types.rst | 2 +- doc/build/session.rst | 14 +- doc/build/testdocs.py | 2 +- examples/postgis/postgis.py | 2 +- lib/sqlalchemy/databases/__init__.py | 5 +- lib/sqlalchemy/dialects/__init__.py | 2 +- lib/sqlalchemy/dialects/postgres.py | 9 ++ lib/sqlalchemy/dialects/postgres/__init__.py | 3 - .../dialects/postgresql/__init__.py | 3 + .../dialects/{postgres => postgresql}/base.py | 40 ++++-- .../{postgres => postgresql}/pg8000.py | 18 +-- .../{postgres => postgresql}/psycopg2.py | 16 +-- .../{postgres => postgresql}/pypostgresql.py | 16 +-- .../{postgres => postgresql}/zxjdbc.py | 10 +- .../dialects/type_migration_guidelines.txt | 6 +- lib/sqlalchemy/engine/__init__.py | 4 +- lib/sqlalchemy/engine/base.py | 2 +- lib/sqlalchemy/engine/url.py | 2 +- lib/sqlalchemy/ext/compiler.py | 4 +- lib/sqlalchemy/ext/declarative.py | 2 +- lib/sqlalchemy/orm/__init__.py | 2 +- lib/sqlalchemy/orm/session.py | 6 +- lib/sqlalchemy/schema.py | 8 +- lib/sqlalchemy/test/config.py | 7 +- lib/sqlalchemy/test/requires.py | 4 +- lib/sqlalchemy/test/testing.py | 6 +- test/aaa_profiling/test_zoomark.py | 4 +- test/aaa_profiling/test_zoomark_orm.py | 4 +- .../{test_postgres.py => test_postgresql.py} | 130 +++++++++++------- test/engine/test_ddlevents.py | 6 +- test/engine/test_execute.py | 6 +- test/engine/test_parseconnect.py | 28 ++-- test/engine/test_reconnect.py | 2 +- test/engine/test_reflection.py | 6 +- test/engine/test_transaction.py | 6 +- test/ext/test_compiler.py | 6 +- test/orm/test_mapper.py | 2 +- test/orm/test_query.py | 4 +- test/orm/test_relationships.py | 2 +- test/orm/test_unitofwork.py | 6 +- test/sql/test_constraints.py | 4 +- test/sql/test_defaults.py | 12 +- test/sql/test_functions.py | 6 +- test/sql/test_query.py | 10 +- test/sql/test_quote.py | 2 +- test/sql/test_select.py | 16 +-- test/sql/test_types.py | 4 +- test/sql/test_unicode.py | 2 +- 58 files changed, 311 insertions(+), 243 deletions(-) create mode 100644 lib/sqlalchemy/dialects/postgres.py delete mode 100644 lib/sqlalchemy/dialects/postgres/__init__.py create mode 100644 lib/sqlalchemy/dialects/postgresql/__init__.py rename lib/sqlalchemy/dialects/{postgres => postgresql}/base.py (95%) rename lib/sqlalchemy/dialects/{postgres => postgresql}/pg8000.py (76%) rename lib/sqlalchemy/dialects/{postgres => postgresql}/psycopg2.py (91%) rename lib/sqlalchemy/dialects/{postgres => postgresql}/pypostgresql.py (77%) rename lib/sqlalchemy/dialects/{postgres => postgresql}/zxjdbc.py (56%) rename test/dialect/{test_postgres.py => test_postgresql.py} (89%) diff --git a/06CHANGES b/06CHANGES index 413d2972a1..8cadbcb02c 100644 --- a/06CHANGES +++ b/06CHANGES @@ -79,7 +79,28 @@ and "operators" dictionaries in compiler subclasses with straightforward visitor methods, and also allows compiler subclasses complete control over rendering, as the full _Function or _BinaryExpression object is passed in. + +- postgresql + - the "postgres" dialect is now named "postgresql" ! Connection strings look + like: + + postgresql://scott:tiger@localhost/test + postgresql+pg8000://scott:tiger@localhost/test + The "postgres" name remains for backwards compatiblity in the following ways: + + - There is a "postgres.py" dummy dialect which allows old URLs to work, + i.e. postgres://scott:tiger@localhost/test + + - The "postgres" name can be imported from the old "databases" module, + i.e. "from sqlalchemy.databases import postgres" as well as "dialects", + "from sqlalchemy.dialects.postgres import base as pg", will send + a deprecation warning. + + - Special expression arguments are now named "postgresql_returning" + and "postgresql_where", but the older "postgres_returning" and + "postgres_where" names still work with a deprecation warning. + - mysql - all the _detect_XXX() functions now run once underneath dialect.initialize() @@ -104,9 +125,9 @@ SQLAlchemy operations. - new dialects - - postgres+pg8000 - - postgres+pypostgresql (partial) - - postgres+zxjdbc + - postgresql+pg8000 + - postgresql+pypostgresql (partial) + - postgresql+zxjdbc - mysql+pyodbc - mysql+zxjdbc diff --git a/CHANGES b/CHANGES index ffe4c41fc3..97bc08b128 100644 --- a/CHANGES +++ b/CHANGES @@ -106,10 +106,10 @@ CHANGES - Repaired the printing of SQL exceptions which are not based on parameters or are not executemany() style. -- postgres +- postgresql - Deprecated the hardcoded TIMESTAMP function, which when used as func.TIMESTAMP(value) would render "TIMESTAMP value". - This breaks on some platforms as Postgres doesn't allow + This breaks on some platforms as PostgreSQL doesn't allow bind parameters to be used in this context. The hard-coded uppercase is also inappropriate and there's lots of other PG casts that we'd need to support. So instead, use @@ -431,7 +431,7 @@ CHANGES fail on recent versions of pysqlite which raise an error when fetchone() called with no rows present. -- postgres +- postgresql - Index reflection won't fail when an index with multiple expressions is encountered. @@ -602,7 +602,7 @@ CHANGES - sql - Improved the methodology to handling percent signs in column names from [ticket:1256]. Added more tests. MySQL and - Postgres dialects still do not issue correct CREATE TABLE + PostgreSQL dialects still do not issue correct CREATE TABLE statements for identifiers with percent signs in them. - schema @@ -985,7 +985,7 @@ CHANGES - Calling alias.execute() in conjunction with server_side_cursors won't raise AttributeError. - - Added Index reflection support to Postgres, using a great + - Added Index reflection support to PostgreSQL, using a great patch we long neglected, submitted by Ken Kuhlman. [ticket:714] @@ -1606,7 +1606,7 @@ CHANGES - simple label names in ORDER BY expressions render as themselves, and not as a re-statement of their corresponding expression. This feature is currently enabled only for - SQLite, MySQL, and Postgres. It can be enabled on other + SQLite, MySQL, and PostgreSQL. It can be enabled on other dialects as each is shown to support this behavior. [ticket:1068] diff --git a/README.unittests b/README.unittests index 05bfa4aa23..ca16788d9b 100644 --- a/README.unittests +++ b/README.unittests @@ -76,7 +76,7 @@ DATABASE TARGETS Tests will target an in-memory SQLite database by default. To test against another database, use the --dburi option with any standard SQLAlchemy URL: - --dburi=postgres://user:password@localhost/test + --dburi=postgresql://user:password@localhost/test Use an empty database and a database user with general DBA privileges. The test suite will be creating and dropping many tables and other DDL, and @@ -89,19 +89,19 @@ typing. The --dbs option lists the built-in aliases and their matching URLs: Available --db options (use --dburi to override) mysql mysql://scott:tiger@127.0.0.1:3306/test oracle oracle://scott:tiger@127.0.0.1:1521 - postgres postgres://scott:tiger@127.0.0.1:5432/test + postgresql postgresql://scott:tiger@127.0.0.1:5432/test [...] To run tests against an aliased database: - $ nosetests --db=postgres + $ nosetests --db=postgresql To customize the URLs with your own users or hostnames, make a simple .ini file called `test.cfg` at the top level of the SQLAlchemy source distribution or a `.satest.cfg` in your home directory: [db] - postgres=postgres://myuser:mypass@localhost/mydb + postgresql=postgresql://myuser:mypass@localhost/mydb Your custom entries will override the defaults and you'll see them reflected in the output of --dbs. @@ -168,10 +168,10 @@ IRC! TIPS ---- -Postgres: The tests require an 'alt_schema' and 'alt_schema_2' to be present in +PostgreSQL: The tests require an 'alt_schema' and 'alt_schema_2' to be present in the testing database. -Postgres: When running the tests on postgres, postgres can get slower and +PostgreSQL: When running the tests on postgresql, postgresql can get slower and slower each time you run the tests. This seems to be related to the constant creation/dropping of tables. Running a "VACUUM FULL" on the database will speed it up again. diff --git a/doc/build/dbengine.rst b/doc/build/dbengine.rst index 362526943d..30b3a20da1 100644 --- a/doc/build/dbengine.rst +++ b/doc/build/dbengine.rst @@ -19,9 +19,9 @@ Where above, a :class:`~sqlalchemy.engine.Engine` references both a :class:`~sq Creating an engine is just a matter of issuing a single call, :func:`create_engine()`:: - engine = create_engine('postgres://scott:tiger@localhost:5432/mydatabase') + engine = create_engine('postgresql://scott:tiger@localhost:5432/mydatabase') -The above engine invokes the ``postgres`` dialect and a connection pool which references ``localhost:5432``. +The above engine invokes the ``postgresql`` dialect and a connection pool which references ``localhost:5432``. The engine can be used directly to issue SQL to the database. The most generic way is to use connections, which you get via the ``connect()`` method:: @@ -52,11 +52,11 @@ The ``Engine`` and ``Connection`` can do a lot more than what we illustrated abo Supported Databases ==================== -Recall that the ``Dialect`` is used to describe how to talk to a specific kind of database. Dialects are included with SQLAlchemy for SQLite, Postgres, MySQL, MS-SQL, Firebird, Informix, and Oracle; these can each be seen as a Python module present in the :mod:``~sqlalchemy.databases`` package. Each dialect requires the appropriate DBAPI drivers to be installed separately. +Recall that the ``Dialect`` is used to describe how to talk to a specific kind of database. Dialects are included with SQLAlchemy for SQLite, PostgreSQL, MySQL, MS-SQL, Firebird, Informix, and Oracle; these can each be seen as a Python module present in the :mod:``~sqlalchemy.databases`` package. Each dialect requires the appropriate DBAPI drivers to be installed separately. Downloads for each DBAPI at the time of this writing are as follows: -* Postgres: `psycopg2 `_ +* PostgreSQL: `psycopg2 `_ * SQLite: `sqlite3 `_ (included in Python 2.5 or greater) `pysqlite `_ * MySQL: `MySQLDB `_ * Oracle: `cx_Oracle `_ @@ -76,12 +76,12 @@ SQLAlchemy indicates the source of an Engine strictly via `RFC-1738 :@/" + ) + +from sqlalchemy.dialects.postgresql import * \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/postgres/__init__.py b/lib/sqlalchemy/dialects/postgres/__init__.py deleted file mode 100644 index c9ac0e1e5a..0000000000 --- a/lib/sqlalchemy/dialects/postgres/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from sqlalchemy.dialects.postgres import base, psycopg2 - -base.dialect = psycopg2.dialect \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/postgresql/__init__.py b/lib/sqlalchemy/dialects/postgresql/__init__.py new file mode 100644 index 0000000000..77eca4aee5 --- /dev/null +++ b/lib/sqlalchemy/dialects/postgresql/__init__.py @@ -0,0 +1,3 @@ +from sqlalchemy.dialects.postgresql import base, psycopg2 + +base.dialect = psycopg2.dialect \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/postgres/base.py b/lib/sqlalchemy/dialects/postgresql/base.py similarity index 95% rename from lib/sqlalchemy/dialects/postgres/base.py rename to lib/sqlalchemy/dialects/postgresql/base.py index dd0bd80495..1aa96e8524 100644 --- a/lib/sqlalchemy/dialects/postgres/base.py +++ b/lib/sqlalchemy/dialects/postgresql/base.py @@ -1,4 +1,4 @@ -# postgres.py +# postgresql.py # Copyright (C) 2005, 2006, 2007, 2008, 2009 Michael Bayer mike_mp@zzzcomputing.com # # This module is part of SQLAlchemy and is released under @@ -12,7 +12,7 @@ regarding that driver. Sequences/SERIAL ---------------- -Postgres supports sequences, and SQLAlchemy uses these as the default means of creating +PostgreSQL supports sequences, and SQLAlchemy uses these as the default means of creating new primary key values for integer-based primary key columns. When creating tables, SQLAlchemy will issue the ``SERIAL`` datatype for integer-based primary key columns, which generates a sequence corresponding to the column and associated with it based on @@ -32,7 +32,7 @@ that when an :func:`~sqlalchemy.sql.expression.insert()` construct is executed u "executemany" semantics, the sequence is not pre-executed and normal PG SERIAL behavior is used. -Postgres 8.3 supports an ``INSERT...RETURNING`` syntax which SQLAlchemy supports +PostgreSQL 8.3 supports an ``INSERT...RETURNING`` syntax which SQLAlchemy supports as well. A future release of SQLA will use this feature by default in lieu of sequence pre-execution in order to retrieve new primary key values, when available. @@ -43,22 +43,22 @@ The dialect supports PG 8.3's ``INSERT..RETURNING`` and ``UPDATE..RETURNING`` sy but must be explicitly enabled on a per-statement basis:: # INSERT..RETURNING - result = table.insert(postgres_returning=[table.c.col1, table.c.col2]).\\ + result = table.insert(postgresql_returning=[table.c.col1, table.c.col2]).\\ values(name='foo') print result.fetchall() # UPDATE..RETURNING - result = table.update(postgres_returning=[table.c.col1, table.c.col2]).\\ + result = table.update(postgresql_returning=[table.c.col1, table.c.col2]).\\ where(table.c.name=='foo').values(name='bar') print result.fetchall() Indexes ------- -PostgreSQL supports partial indexes. To create them pass a postgres_where +PostgreSQL supports partial indexes. To create them pass a postgresql_where option to the Index constructor:: - Index('my_index', my_table.c.id, postgres_where=tbl.c.value > 10) + Index('my_index', my_table.c.id, postgresql_where=tbl.c.value > 10) @@ -225,7 +225,7 @@ class PGCompiler(compiler.SQLCompiler): def post_process_text(self, text): if '%%' in text: - util.warn("The SQLAlchemy postgres dialect now automatically escapes '%' in text() expressions to '%%'.") + util.warn("The SQLAlchemy postgresql dialect now automatically escapes '%' in text() expressions to '%%'.") return text.replace('%', '%%') def visit_sequence(self, seq): @@ -264,7 +264,12 @@ class PGCompiler(compiler.SQLCompiler): return super(PGCompiler, self).for_update_clause(select) def _append_returning(self, text, stmt): - returning_cols = stmt.kwargs['postgres_returning'] + try: + returning_cols = stmt.kwargs['postgresql_returning'] + except KeyError: + returning_cols = stmt.kwargs['postgres_returning'] + util.warn_deprecated("The 'postgres_returning' argument has been renamed 'postgresql_returning'") + def flatten_columnlist(collist): for c in collist: if isinstance(c, expression.Selectable): @@ -278,14 +283,14 @@ class PGCompiler(compiler.SQLCompiler): def visit_update(self, update_stmt): text = super(PGCompiler, self).visit_update(update_stmt) - if 'postgres_returning' in update_stmt.kwargs: + if 'postgresql_returning' in update_stmt.kwargs or 'postgres_returning' in update_stmt.kwargs: return self._append_returning(text, update_stmt) else: return text def visit_insert(self, insert_stmt): text = super(PGCompiler, self).visit_insert(insert_stmt) - if 'postgres_returning' in insert_stmt.kwargs: + if 'postgresql_returning' in insert_stmt.kwargs or 'postgres_returning' in insert_stmt.kwargs: return self._append_returning(text, insert_stmt) else: return text @@ -334,8 +339,15 @@ class PGDDLCompiler(compiler.DDLCompiler): % (preparer.quote(self._validate_identifier(index.name, True), index.quote), preparer.format_table(index.table), ', '.join([preparer.format_column(c) for c in index.columns])) - - whereclause = index.kwargs.get('postgres_where', None) + + if "postgres_where" in index.kwargs: + whereclause = index.kwargs['postgres_where'] + util.warn_deprecated("The 'postgres_where' argument has been renamed to 'postgresql_where'.") + elif 'postgresql_where' in index.kwargs: + whereclause = index.kwargs['postgresql_where'] + else: + whereclause = None + if whereclause is not None: compiler = self._compile(whereclause, None) # this might belong to the compiler class @@ -451,7 +463,7 @@ class PGInspector(reflection.Inspector): class PGDialect(default.DefaultDialect): - name = 'postgres' + name = 'postgresql' supports_alter = True max_identifier_length = 63 supports_sane_rowcount = True diff --git a/lib/sqlalchemy/dialects/postgres/pg8000.py b/lib/sqlalchemy/dialects/postgresql/pg8000.py similarity index 76% rename from lib/sqlalchemy/dialects/postgres/pg8000.py rename to lib/sqlalchemy/dialects/postgresql/pg8000.py index 0dd166a9d8..0c21467966 100644 --- a/lib/sqlalchemy/dialects/postgres/pg8000.py +++ b/lib/sqlalchemy/dialects/postgresql/pg8000.py @@ -3,12 +3,12 @@ Connecting ---------- -URLs are of the form `postgres+pg8000://user@password@host:port/dbname[?key=value&key=value...]`. +URLs are of the form `postgresql+pg8000://user@password@host:port/dbname[?key=value&key=value...]`. Unicode ------- -pg8000 requires that the postgres client encoding be configured in the postgresql.conf file +pg8000 requires that the postgresql client encoding be configured in the postgresql.conf file in order to use encodings other than ascii. Set this value to the same value as the "encoding" parameter on create_engine(), usually "utf-8". @@ -22,7 +22,7 @@ from sqlalchemy.engine import default import decimal from sqlalchemy import util from sqlalchemy import types as sqltypes -from sqlalchemy.dialects.postgres.base import PGDialect, PGCompiler +from sqlalchemy.dialects.postgresql.base import PGDialect, PGCompiler class _PGNumeric(sqltypes.Numeric): def bind_processor(self, dialect): @@ -39,15 +39,15 @@ class _PGNumeric(sqltypes.Numeric): return value return process -class Postgres_pg8000ExecutionContext(default.DefaultExecutionContext): +class PostgreSQL_pg8000ExecutionContext(default.DefaultExecutionContext): pass -class Postgres_pg8000Compiler(PGCompiler): +class PostgreSQL_pg8000Compiler(PGCompiler): def visit_mod(self, binary, **kw): return self.process(binary.left) + " %% " + self.process(binary.right) -class Postgres_pg8000(PGDialect): +class PostgreSQL_pg8000(PGDialect): driver = 'pg8000' supports_unicode_statements = True @@ -56,8 +56,8 @@ class Postgres_pg8000(PGDialect): default_paramstyle = 'format' supports_sane_multi_rowcount = False - execution_ctx_cls = Postgres_pg8000ExecutionContext - statement_compiler = Postgres_pg8000Compiler + execution_ctx_cls = PostgreSQL_pg8000ExecutionContext + statement_compiler = PostgreSQL_pg8000Compiler colspecs = util.update_copy( PGDialect.colspecs, @@ -81,4 +81,4 @@ class Postgres_pg8000(PGDialect): def is_disconnect(self, e): return "connection is closed" in str(e) -dialect = Postgres_pg8000 +dialect = PostgreSQL_pg8000 diff --git a/lib/sqlalchemy/dialects/postgres/psycopg2.py b/lib/sqlalchemy/dialects/postgresql/psycopg2.py similarity index 91% rename from lib/sqlalchemy/dialects/postgres/psycopg2.py rename to lib/sqlalchemy/dialects/postgresql/psycopg2.py index 9f5ea56868..a428878ae0 100644 --- a/lib/sqlalchemy/dialects/postgres/psycopg2.py +++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py @@ -12,7 +12,7 @@ Note that psycopg1 is **not** supported. Connecting ---------- -URLs are of the form `postgres+psycopg2://user@password@host:port/dbname[?key=value&key=value...]`. +URLs are of the form `postgresql+psycopg2://user@password@host:port/dbname[?key=value&key=value...]`. psycopg2-specific keyword arguments which are accepted by :func:`~sqlalchemy.create_engine()` are: @@ -42,7 +42,7 @@ from sqlalchemy.engine import base, default from sqlalchemy.sql import expression from sqlalchemy.sql import operators as sql_operators from sqlalchemy import types as sqltypes -from sqlalchemy.dialects.postgres.base import PGDialect, PGCompiler +from sqlalchemy.dialects.postgresql.base import PGDialect, PGCompiler class _PGNumeric(sqltypes.Numeric): def bind_processor(self, dialect): @@ -65,7 +65,7 @@ SERVER_SIDE_CURSOR_RE = re.compile( r'\s*SELECT', re.I | re.UNICODE) -class Postgres_psycopg2ExecutionContext(default.DefaultExecutionContext): +class PostgreSQL_psycopg2ExecutionContext(default.DefaultExecutionContext): def create_cursor(self): # TODO: coverage for server side cursors + select.for_update() is_server_side = \ @@ -93,20 +93,20 @@ class Postgres_psycopg2ExecutionContext(default.DefaultExecutionContext): else: return base.ResultProxy(self) -class Postgres_psycopg2Compiler(PGCompiler): +class PostgreSQL_psycopg2Compiler(PGCompiler): def visit_mod(self, binary, **kw): return self.process(binary.left) + " %% " + self.process(binary.right) def post_process_text(self, text): return text.replace('%', '%%') -class Postgres_psycopg2(PGDialect): +class PostgreSQL_psycopg2(PGDialect): driver = 'psycopg2' supports_unicode_statements = False default_paramstyle = 'pyformat' supports_sane_multi_rowcount = False - execution_ctx_cls = Postgres_psycopg2ExecutionContext - statement_compiler = Postgres_psycopg2Compiler + execution_ctx_cls = PostgreSQL_psycopg2ExecutionContext + statement_compiler = PostgreSQL_psycopg2Compiler colspecs = util.update_copy( PGDialect.colspecs, @@ -143,5 +143,5 @@ class Postgres_psycopg2(PGDialect): else: return False -dialect = Postgres_psycopg2 +dialect = PostgreSQL_psycopg2 diff --git a/lib/sqlalchemy/dialects/postgres/pypostgresql.py b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py similarity index 77% rename from lib/sqlalchemy/dialects/postgres/pypostgresql.py rename to lib/sqlalchemy/dialects/postgresql/pypostgresql.py index b032aa6a6a..975006d927 100644 --- a/lib/sqlalchemy/dialects/postgres/pypostgresql.py +++ b/lib/sqlalchemy/dialects/postgresql/pypostgresql.py @@ -3,7 +3,7 @@ Connecting ---------- -URLs are of the form `postgres+pypostgresql://user@password@host:port/dbname[?key=value&key=value...]`. +URLs are of the form `postgresql+pypostgresql://user@password@host:port/dbname[?key=value&key=value...]`. """ @@ -11,7 +11,7 @@ from sqlalchemy.engine import default import decimal from sqlalchemy import util from sqlalchemy import types as sqltypes -from sqlalchemy.dialects.postgres.base import PGDialect, PGDefaultRunner +from sqlalchemy.dialects.postgresql.base import PGDialect, PGDefaultRunner class PGNumeric(sqltypes.Numeric): def bind_processor(self, dialect): @@ -28,14 +28,14 @@ class PGNumeric(sqltypes.Numeric): return value return process -class Postgres_pypostgresqlExecutionContext(default.DefaultExecutionContext): +class PostgreSQL_pypostgresqlExecutionContext(default.DefaultExecutionContext): pass -class Postgres_pypostgresqlDefaultRunner(PGDefaultRunner): +class PostgreSQL_pypostgresqlDefaultRunner(PGDefaultRunner): def execute_string(self, stmt, params=None): return PGDefaultRunner.execute_string(self, stmt, params or ()) -class Postgres_pypostgresql(PGDialect): +class PostgreSQL_pypostgresql(PGDialect): driver = 'pypostgresql' supports_unicode_statements = True @@ -43,7 +43,7 @@ class Postgres_pypostgresql(PGDialect): supports_unicode_binds = True description_encoding = None - defaultrunner = Postgres_pypostgresqlDefaultRunner + defaultrunner = PostgreSQL_pypostgresqlDefaultRunner default_paramstyle = 'format' @@ -51,7 +51,7 @@ class Postgres_pypostgresql(PGDialect): supports_sane_multi_rowcount = False - execution_ctx_cls = Postgres_pypostgresqlExecutionContext + execution_ctx_cls = PostgreSQL_pypostgresqlExecutionContext colspecs = util.update_copy( PGDialect.colspecs, { @@ -77,4 +77,4 @@ class Postgres_pypostgresql(PGDialect): def is_disconnect(self, e): return "connection is closed" in str(e) -dialect = Postgres_pypostgresql +dialect = PostgreSQL_pypostgresql diff --git a/lib/sqlalchemy/dialects/postgres/zxjdbc.py b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py similarity index 56% rename from lib/sqlalchemy/dialects/postgres/zxjdbc.py rename to lib/sqlalchemy/dialects/postgresql/zxjdbc.py index f968ac9851..efef4b6606 100644 --- a/lib/sqlalchemy/dialects/postgres/zxjdbc.py +++ b/lib/sqlalchemy/dialects/postgresql/zxjdbc.py @@ -1,12 +1,12 @@ -from sqlalchemy.dialects.postgres.base import PGDialect +from sqlalchemy.dialects.postgresql.base import PGDialect from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector from sqlalchemy.engine import default -class Postgres_jdbcExecutionContext(default.DefaultExecutionContext): +class PostgreSQL_jdbcExecutionContext(default.DefaultExecutionContext): pass -class Postgres_jdbc(ZxJDBCConnector, PGDialect): - execution_ctx_cls = Postgres_jdbcExecutionContext +class PostgreSQL_jdbc(ZxJDBCConnector, PGDialect): + execution_ctx_cls = PostgreSQL_jdbcExecutionContext jdbc_db_name = 'postgresql' jdbc_driver_name = "org.postgresql.Driver" @@ -15,4 +15,4 @@ class Postgres_jdbc(ZxJDBCConnector, PGDialect): def _get_server_version_info(self, connection): return tuple(int(x) for x in connection.connection.dbversion.split('.')) -dialect = Postgres_jdbc \ No newline at end of file +dialect = PostgreSQL_jdbc \ No newline at end of file diff --git a/lib/sqlalchemy/dialects/type_migration_guidelines.txt b/lib/sqlalchemy/dialects/type_migration_guidelines.txt index 3ee439dd87..8ed1a17975 100644 --- a/lib/sqlalchemy/dialects/type_migration_guidelines.txt +++ b/lib/sqlalchemy/dialects/type_migration_guidelines.txt @@ -28,8 +28,8 @@ is a subclass of an existing generic type and is only provided for bind/result b the current mixed case naming can remain, i.e. _PGNumeric for Numeric - in this case, end users would never need to use _PGNumeric directly. However, if a dialect-specific type is specifying a type *or* arguments that are not present generically, it should -match the real name of the type on that backend, in uppercase. E.g. postgres.INET, -mysql.ENUM, postgres.ARRAY. +match the real name of the type on that backend, in uppercase. E.g. postgresql.INET, +mysql.ENUM, postgresql.ARRAY. Or follow this handy flowchart: @@ -92,7 +92,7 @@ Postgresql dialect therefore imports types.DATETIME into its base.py. Ideally one should be able to specify a schema using names imported completely from a dialect, all matching the real name on that backend: - from sqlalchemy.dialects.postgres import base as pg + from sqlalchemy.dialects.postgresql import base as pg t = Table('mytable', metadata, Column('id', pg.INTEGER, primary_key=True), diff --git a/lib/sqlalchemy/engine/__init__.py b/lib/sqlalchemy/engine/__init__.py index 4dd5ea2861..694a2f71fa 100644 --- a/lib/sqlalchemy/engine/__init__.py +++ b/lib/sqlalchemy/engine/__init__.py @@ -110,7 +110,7 @@ def create_engine(*args, **kwargs): The URL is a string in the form ``dialect://user:password@host/dbname[?key=value..]``, where - ``dialect`` is a name such as ``mysql``, ``oracle``, ``postgres``, + ``dialect`` is a name such as ``mysql``, ``oracle``, ``postgresql``, etc. Alternatively, the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`. @@ -175,7 +175,7 @@ def create_engine(*args, **kwargs): :param module=None: used by database implementations which support multiple DBAPI modules, this is a reference to a DBAPI2 module to be used instead of the engine's default module. For - Postgres, the default is psycopg2. For Oracle, it's cx_Oracle. + PostgreSQL, the default is psycopg2. For Oracle, it's cx_Oracle. :param pool=None: an already-constructed instance of :class:`~sqlalchemy.pool.Pool`, such as a diff --git a/lib/sqlalchemy/engine/base.py b/lib/sqlalchemy/engine/base.py index f752182c36..470ca811b1 100644 --- a/lib/sqlalchemy/engine/base.py +++ b/lib/sqlalchemy/engine/base.py @@ -477,7 +477,7 @@ class ExecutionContext(object): """Return a new cursor generated from this ExecutionContext's connection. Some dialects may wish to change the behavior of - connection.cursor(), such as postgres which may return a PG + connection.cursor(), such as postgresql which may return a PG "server side" cursor. """ diff --git a/lib/sqlalchemy/engine/url.py b/lib/sqlalchemy/engine/url.py index 68913dbdb6..b0e21f5f72 100644 --- a/lib/sqlalchemy/engine/url.py +++ b/lib/sqlalchemy/engine/url.py @@ -96,7 +96,7 @@ class URL(object): else: dialect, driver = self.drivername, 'base' - module = __import__('sqlalchemy.dialects.%s.%s' % (dialect, driver)).dialects + module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects module = getattr(module, dialect) module = getattr(module, driver) diff --git a/lib/sqlalchemy/ext/compiler.py b/lib/sqlalchemy/ext/compiler.py index f97dfd5377..05df8d2be6 100644 --- a/lib/sqlalchemy/ext/compiler.py +++ b/lib/sqlalchemy/ext/compiler.py @@ -45,11 +45,11 @@ for the dialect in use:: def visit_alter_column(element, compiler, **kw): return "ALTER COLUMN %s ..." % element.column.name - @compiles(AlterColumn, 'postgres') + @compiles(AlterColumn, 'postgresql') def visit_alter_column(element, compiler, **kw): return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name) -The second ``visit_alter_table`` will be invoked when any ``postgres`` dialect is used. +The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used. The ``compiler`` argument is the :class:`~sqlalchemy.engine.base.Compiled` object in use. This object can be inspected for any information about the in-progress diff --git a/lib/sqlalchemy/ext/declarative.py b/lib/sqlalchemy/ext/declarative.py index e22928b487..43369311b3 100644 --- a/lib/sqlalchemy/ext/declarative.py +++ b/lib/sqlalchemy/ext/declarative.py @@ -386,7 +386,7 @@ only intended as an optional syntax for the regular usage of mappers and Table objects. A typical application setup using :func:`~sqlalchemy.orm.scoped_session` might look like:: - engine = create_engine('postgres://scott:tiger@localhost/test') + engine = create_engine('postgresql://scott:tiger@localhost/test') Session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine)) diff --git a/lib/sqlalchemy/orm/__init__.py b/lib/sqlalchemy/orm/__init__.py index 26eb9ddb8f..41a8550fff 100644 --- a/lib/sqlalchemy/orm/__init__.py +++ b/lib/sqlalchemy/orm/__init__.py @@ -338,7 +338,7 @@ def relation(argument, secondary=None, **kwargs): the foreign key in the database, and that the database will handle propagation of an UPDATE from a source column to dependent rows. Note that with databases which enforce - referential integrity (i.e. Postgres, MySQL with InnoDB tables), + referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables), ON UPDATE CASCADE is required for this operation. The relation() will update the value of the attribute on related items which are locally present in the session during a flush. diff --git a/lib/sqlalchemy/orm/session.py b/lib/sqlalchemy/orm/session.py index a658783c2c..5b83df4ac6 100644 --- a/lib/sqlalchemy/orm/session.py +++ b/lib/sqlalchemy/orm/session.py @@ -109,9 +109,9 @@ def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False, like:: sess = Session(binds={ - SomeMappedClass: create_engine('postgres://engine1'), - somemapper: create_engine('postgres://engine2'), - some_table: create_engine('postgres://engine3'), + SomeMappedClass: create_engine('postgresql://engine1'), + somemapper: create_engine('postgresql://engine2'), + some_table: create_engine('postgresql://engine3'), }) Also see the ``bind_mapper()`` and ``bind_table()`` methods. diff --git a/lib/sqlalchemy/schema.py b/lib/sqlalchemy/schema.py index df39a658fd..252fa8407f 100644 --- a/lib/sqlalchemy/schema.py +++ b/lib/sqlalchemy/schema.py @@ -467,7 +467,7 @@ class Column(SchemaItem, expression.ColumnClause): or :meth:`create_all()`. The flag has no relevance at any other time. * The database supports autoincrementing behavior, such as - Postgres or MySQL, and this behavior can be disabled (which does + PostgreSQL or MySQL, and this behavior can be disabled (which does not include SQLite). :param default: A scalar, Python callable, or :class:`~sqlalchemy.sql.expression.ClauseElement` @@ -1469,7 +1469,7 @@ class Index(SchemaItem): unique Defaults to False: create a unique index. - postgres_where + postgresql_where Defaults to None: create a partial index when using PostgreSQL """ @@ -2027,11 +2027,11 @@ class DDL(DDLElement): predicate. If a string, it will be compared to the name of the executing database dialect:: - DDL('something', on='postgres') + DDL('something', on='postgresql') If a tuple, specifies multiple dialect names: - DDL('something', on=('postgres', 'mysql')) + DDL('something', on=('postgresql', 'mysql')) If a callable, it will be invoked with three positional arguments as well as optional keyword arguments: diff --git a/lib/sqlalchemy/test/config.py b/lib/sqlalchemy/test/config.py index 6d60642a59..eec962d807 100644 --- a/lib/sqlalchemy/test/config.py +++ b/lib/sqlalchemy/test/config.py @@ -17,9 +17,10 @@ base_config = """ [db] sqlite=sqlite:///:memory: sqlite_file=sqlite:///querytest.db -postgres=postgres://scott:tiger@127.0.0.1:5432/test -pg8000=postgres+pg8000://scott:tiger@127.0.0.1:5432/test -postgres_jython=postgres+zxjdbc://scott:tiger@127.0.0.1:5432/test +postgresql=postgresql://scott:tiger@127.0.0.1:5432/test +postgres=postgresql://scott:tiger@127.0.0.1:5432/test +pg8000=postgresql+pg8000://scott:tiger@127.0.0.1:5432/test +postgresql_jython=postgresql+zxjdbc://scott:tiger@127.0.0.1:5432/test mysql_jython=mysql+zxjdbc://scott:tiger@127.0.0.1:5432/test mysql=mysql://scott:tiger@127.0.0.1:3306/test oracle=oracle://scott:tiger@127.0.0.1:1521 diff --git a/lib/sqlalchemy/test/requires.py b/lib/sqlalchemy/test/requires.py index b6fbb93dba..bf524e1877 100644 --- a/lib/sqlalchemy/test/requires.py +++ b/lib/sqlalchemy/test/requires.py @@ -59,7 +59,7 @@ def identity(fn): fn, no_support('firebird', 'not supported by database'), no_support('oracle', 'not supported by database'), - no_support('postgres', 'not supported by database'), + no_support('postgresql', 'not supported by database'), no_support('sybase', 'not supported by database'), ) @@ -82,7 +82,7 @@ def row_triggers(fn): exclude('mysql', '<', (5, 0, 10), 'not supported by database'), # huh? TODO: implement triggers for PG tests, remove this - no_support('postgres', 'PG triggers need to be implemented for tests'), + no_support('postgresql', 'PG triggers need to be implemented for tests'), ) def correlated_outer_joins(fn): diff --git a/lib/sqlalchemy/test/testing.py b/lib/sqlalchemy/test/testing.py index 57b1405802..8f3fb3c68c 100644 --- a/lib/sqlalchemy/test/testing.py +++ b/lib/sqlalchemy/test/testing.py @@ -436,7 +436,7 @@ def against(*queries): Also supports comparison to database version when provided with one or more 3-tuples of dialect name, operator, and version specification:: - testing.against('mysql', 'postgres') + testing.against('mysql', 'postgresql') testing.against(('mysql', '>=', (5, 0, 0)) """ @@ -626,7 +626,7 @@ class ComparesTables(object): elif against(('mysql', '<', (5, 0))): # ignore reflection of bogus db-generated DefaultClause() pass - elif not c.primary_key or not against('postgres', 'mssql'): + elif not c.primary_key or not against('postgresql', 'mssql'): #print repr(c) assert reflected_c.default is None, reflected_c.default @@ -718,7 +718,7 @@ class AssertsExecutionResults(object): assertsql.asserter.clear_rules() def assert_sql(self, db, callable_, list_, with_sequences=None): - if with_sequences is not None and config.db.name in ('firebird', 'oracle', 'postgres'): + if with_sequences is not None and config.db.name in ('firebird', 'oracle', 'postgresql'): rules = with_sequences else: rules = list_ diff --git a/test/aaa_profiling/test_zoomark.py b/test/aaa_profiling/test_zoomark.py index e9f5146690..d346498c19 100644 --- a/test/aaa_profiling/test_zoomark.py +++ b/test/aaa_profiling/test_zoomark.py @@ -26,7 +26,7 @@ class ZooMarkTest(TestBase): """ - __only_on__ = 'postgres+psycopg2' + __only_on__ = 'postgresql+psycopg2' __skip_if__ = ((lambda: sys.version_info < (2, 4)), ) def test_baseline_0_setup(self): @@ -316,7 +316,7 @@ class ZooMarkTest(TestBase): global metadata player = lambda: dbapi_session.player() - engine = create_engine('postgres:///', creator=player) + engine = create_engine('postgresql:///', creator=player) metadata = MetaData(engine) @profiling.function_call_count(2991, {'2.4': 1796}) diff --git a/test/aaa_profiling/test_zoomark_orm.py b/test/aaa_profiling/test_zoomark_orm.py index b28d067acc..660f478110 100644 --- a/test/aaa_profiling/test_zoomark_orm.py +++ b/test/aaa_profiling/test_zoomark_orm.py @@ -27,7 +27,7 @@ class ZooMarkTest(TestBase): """ - __only_on__ = 'postgres+psycopg2' + __only_on__ = 'postgresql+psycopg2' __skip_if__ = ((lambda: sys.version_info < (2, 5)), ) # TODO: get 2.4 support def test_baseline_0_setup(self): @@ -281,7 +281,7 @@ class ZooMarkTest(TestBase): global metadata, session player = lambda: dbapi_session.player() - engine = create_engine('postgres:///', creator=player) + engine = create_engine('postgresql:///', creator=player) metadata = MetaData(engine) session = sessionmaker()() diff --git a/test/dialect/test_postgres.py b/test/dialect/test_postgresql.py similarity index 89% rename from test/dialect/test_postgres.py rename to test/dialect/test_postgresql.py index 3136cc9c59..e4b66c398d 100644 --- a/test/dialect/test_postgres.py +++ b/test/dialect/test_postgresql.py @@ -3,7 +3,7 @@ import datetime from sqlalchemy import * from sqlalchemy.orm import * from sqlalchemy import exc, schema -from sqlalchemy.dialects.postgres import base as postgres +from sqlalchemy.dialects.postgresql import base as postgresql from sqlalchemy.engine.strategies import MockEngineStrategy from sqlalchemy.test import * from sqlalchemy.sql import table, column @@ -12,7 +12,7 @@ from sqlalchemy.test.testing import eq_ class SequenceTest(TestBase, AssertsCompiledSQL): def test_basic(self): seq = Sequence("my_seq_no_schema") - dialect = postgres.PGDialect() + dialect = postgresql.PGDialect() assert dialect.identifier_preparer.format_sequence(seq) == "my_seq_no_schema" seq = Sequence("my_seq", schema="some_schema") @@ -22,50 +22,74 @@ class SequenceTest(TestBase, AssertsCompiledSQL): assert dialect.identifier_preparer.format_sequence(seq) == '"Some_Schema"."My_Seq"' class CompileTest(TestBase, AssertsCompiledSQL): - __dialect__ = postgres.dialect() + __dialect__ = postgresql.dialect() def test_update_returning(self): - dialect = postgres.dialect() + dialect = postgresql.dialect() table1 = table('mytable', column('myid', Integer), column('name', String(128)), column('description', String(128)), ) - u = update(table1, values=dict(name='foo'), postgres_returning=[table1.c.myid, table1.c.name]) + u = update(table1, values=dict(name='foo'), postgresql_returning=[table1.c.myid, table1.c.name]) self.assert_compile(u, "UPDATE mytable SET name=%(name)s RETURNING mytable.myid, mytable.name", dialect=dialect) - u = update(table1, values=dict(name='foo'), postgres_returning=[table1]) + u = update(table1, values=dict(name='foo'), postgresql_returning=[table1]) self.assert_compile(u, "UPDATE mytable SET name=%(name)s "\ "RETURNING mytable.myid, mytable.name, mytable.description", dialect=dialect) - u = update(table1, values=dict(name='foo'), postgres_returning=[func.length(table1.c.name)]) + u = update(table1, values=dict(name='foo'), postgresql_returning=[func.length(table1.c.name)]) self.assert_compile(u, "UPDATE mytable SET name=%(name)s RETURNING length(mytable.name)", dialect=dialect) + def test_insert_returning(self): - dialect = postgres.dialect() + dialect = postgresql.dialect() table1 = table('mytable', column('myid', Integer), column('name', String(128)), column('description', String(128)), ) - i = insert(table1, values=dict(name='foo'), postgres_returning=[table1.c.myid, table1.c.name]) + i = insert(table1, values=dict(name='foo'), postgresql_returning=[table1.c.myid, table1.c.name]) self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) RETURNING mytable.myid, mytable.name", dialect=dialect) - i = insert(table1, values=dict(name='foo'), postgres_returning=[table1]) + i = insert(table1, values=dict(name='foo'), postgresql_returning=[table1]) self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) "\ "RETURNING mytable.myid, mytable.name, mytable.description", dialect=dialect) - i = insert(table1, values=dict(name='foo'), postgres_returning=[func.length(table1.c.name)]) + i = insert(table1, values=dict(name='foo'), postgresql_returning=[func.length(table1.c.name)]) self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) RETURNING length(mytable.name)", dialect=dialect) + + @testing.uses_deprecated(r".*'postgres_returning' argument has been renamed.*") + def test_old_returning_names(self): + dialect = postgresql.dialect() + table1 = table('mytable', + column('myid', Integer), + column('name', String(128)), + column('description', String(128)), + ) + u = update(table1, values=dict(name='foo'), postgres_returning=[table1.c.myid, table1.c.name]) + self.assert_compile(u, "UPDATE mytable SET name=%(name)s RETURNING mytable.myid, mytable.name", dialect=dialect) + + i = insert(table1, values=dict(name='foo'), postgres_returning=[table1.c.myid, table1.c.name]) + self.assert_compile(i, "INSERT INTO mytable (name) VALUES (%(name)s) RETURNING mytable.myid, mytable.name", dialect=dialect) + def test_create_partial_index(self): + tbl = Table('testtbl', MetaData(), Column('data',Integer)) + idx = Index('test_idx1', tbl.c.data, postgresql_where=and_(tbl.c.data > 5, tbl.c.data < 10)) + + self.assert_compile(schema.CreateIndex(idx), + "CREATE INDEX test_idx1 ON testtbl (data) WHERE testtbl.data > 5 AND testtbl.data < 10", dialect=postgresql.dialect()) + + @testing.uses_deprecated(r".*'postgres_where' argument has been renamed.*") + def test_old_create_partial_index(self): tbl = Table('testtbl', MetaData(), Column('data',Integer)) idx = Index('test_idx1', tbl.c.data, postgres_where=and_(tbl.c.data > 5, tbl.c.data < 10)) self.assert_compile(schema.CreateIndex(idx), - "CREATE INDEX test_idx1 ON testtbl (data) WHERE testtbl.data > 5 AND testtbl.data < 10", dialect=postgres.dialect()) + "CREATE INDEX test_idx1 ON testtbl (data) WHERE testtbl.data > 5 AND testtbl.data < 10", dialect=postgresql.dialect()) def test_extract(self): t = table('t', column('col1')) @@ -77,9 +101,9 @@ class CompileTest(TestBase, AssertsCompiledSQL): "FROM t" % field) class ReturningTest(TestBase, AssertsExecutionResults): - __only_on__ = 'postgres' + __only_on__ = 'postgresql' - @testing.exclude('postgres', '<', (8, 2), '8.3+ feature') + @testing.exclude('postgresql', '<', (8, 2), '8.3+ feature') def test_update_returning(self): meta = MetaData(testing.db) table = Table('tables', meta, @@ -91,7 +115,7 @@ class ReturningTest(TestBase, AssertsExecutionResults): try: table.insert().execute([{'persons': 5, 'full': False}, {'persons': 3, 'full': False}]) - result = table.update(table.c.persons > 4, dict(full=True), postgres_returning=[table.c.id]).execute() + result = table.update(table.c.persons > 4, dict(full=True), postgresql_returning=[table.c.id]).execute() eq_(result.fetchall(), [(1,)]) result2 = select([table.c.id, table.c.full]).order_by(table.c.id).execute() @@ -99,7 +123,7 @@ class ReturningTest(TestBase, AssertsExecutionResults): finally: table.drop() - @testing.exclude('postgres', '<', (8, 2), '8.3+ feature') + @testing.exclude('postgresql', '<', (8, 2), '8.3+ feature') def test_insert_returning(self): meta = MetaData(testing.db) table = Table('tables', meta, @@ -109,20 +133,20 @@ class ReturningTest(TestBase, AssertsExecutionResults): ) table.create() try: - result = table.insert(postgres_returning=[table.c.id]).execute({'persons': 1, 'full': False}) + result = table.insert(postgresql_returning=[table.c.id]).execute({'persons': 1, 'full': False}) eq_(result.fetchall(), [(1,)]) - @testing.fails_on('postgres', 'Known limitation of psycopg2') + @testing.fails_on('postgresql', 'Known limitation of psycopg2') def test_executemany(): # return value is documented as failing with psycopg2/executemany - result2 = table.insert(postgres_returning=[table]).execute( + result2 = table.insert(postgresql_returning=[table]).execute( [{'persons': 2, 'full': False}, {'persons': 3, 'full': True}]) eq_(result2.fetchall(), [(2, 2, False), (3,3,True)]) test_executemany() - result3 = table.insert(postgres_returning=[(table.c.id*2).label('double_id')]).execute({'persons': 4, 'full': False}) + result3 = table.insert(postgresql_returning=[(table.c.id*2).label('double_id')]).execute({'persons': 4, 'full': False}) eq_([dict(row) for row in result3], [{'double_id':8}]) result4 = testing.db.execute('insert into tables (id, persons, "full") values (5, 10, true) returning persons') @@ -132,7 +156,7 @@ class ReturningTest(TestBase, AssertsExecutionResults): class InsertTest(TestBase, AssertsExecutionResults): - __only_on__ = 'postgres' + __only_on__ = 'postgresql' @classmethod def setup_class(cls): @@ -397,7 +421,7 @@ class InsertTest(TestBase, AssertsExecutionResults): class DomainReflectionTest(TestBase, AssertsExecutionResults): "Test PostgreSQL domains" - __only_on__ = 'postgres' + __only_on__ = 'postgresql' @classmethod def setup_class(cls): @@ -453,10 +477,10 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults): assert table.columns.answer.nullable, "Expected reflected column to be nullable." def test_unknown_types(self): - from sqlalchemy.databases import postgres + from sqlalchemy.databases import postgresql - ischema_names = postgres.PGDialect.ischema_names - postgres.PGDialect.ischema_names = {} + ischema_names = postgresql.PGDialect.ischema_names + postgresql.PGDialect.ischema_names = {} try: m2 = MetaData(testing.db) assert_raises(exc.SAWarning, Table, "testtable", m2, autoload=True) @@ -468,11 +492,11 @@ class DomainReflectionTest(TestBase, AssertsExecutionResults): assert t3.c.answer.type.__class__ == sa.types.NullType finally: - postgres.PGDialect.ischema_names = ischema_names + postgresql.PGDialect.ischema_names = ischema_names class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL): - __only_on__ = 'postgres' + __only_on__ = 'postgresql' def test_date_reflection(self): m1 = MetaData(testing.db) @@ -732,20 +756,20 @@ class MiscTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL): class TimezoneTest(TestBase, AssertsExecutionResults): """Test timezone-aware datetimes. - psycopg will return a datetime with a tzinfo attached to it, if postgres + psycopg will return a datetime with a tzinfo attached to it, if postgresql returns it. python then will not let you compare a datetime with a tzinfo to a datetime that doesnt have one. this test illustrates two ways to have datetime types with and without timezone info. """ - __only_on__ = 'postgres' + __only_on__ = 'postgresql' @classmethod def setup_class(cls): global tztable, notztable, metadata metadata = MetaData(testing.db) - # current_timestamp() in postgres is assumed to return TIMESTAMP WITH TIMEZONE + # current_timestamp() in postgresql is assumed to return TIMESTAMP WITH TIMEZONE tztable = Table('tztable', metadata, Column("id", Integer, primary_key=True), Column("date", DateTime(timezone=True), onupdate=func.current_timestamp()), @@ -776,7 +800,7 @@ class TimezoneTest(TestBase, AssertsExecutionResults): print notztable.select(tztable.c.id==1).execute().fetchone() class ArrayTest(TestBase, AssertsExecutionResults): - __only_on__ = 'postgres' + __only_on__ = 'postgresql' @classmethod def setup_class(cls): @@ -785,8 +809,8 @@ class ArrayTest(TestBase, AssertsExecutionResults): arrtable = Table('arrtable', metadata, Column('id', Integer, primary_key=True), - Column('intarr', postgres.PGArray(Integer)), - Column('strarr', postgres.PGArray(String(convert_unicode=True)), nullable=False) + Column('intarr', postgresql.PGArray(Integer)), + Column('strarr', postgresql.PGArray(String(convert_unicode=True)), nullable=False) ) metadata.create_all() @@ -800,8 +824,8 @@ class ArrayTest(TestBase, AssertsExecutionResults): def test_reflect_array_column(self): metadata2 = MetaData(testing.db) tbl = Table('arrtable', metadata2, autoload=True) - assert isinstance(tbl.c.intarr.type, postgres.PGArray) - assert isinstance(tbl.c.strarr.type, postgres.PGArray) + assert isinstance(tbl.c.intarr.type, postgresql.PGArray) + assert isinstance(tbl.c.strarr.type, postgresql.PGArray) assert isinstance(tbl.c.intarr.type.item_type, Integer) assert isinstance(tbl.c.strarr.type.item_type, String) @@ -812,7 +836,7 @@ class ArrayTest(TestBase, AssertsExecutionResults): eq_(results[0]['intarr'], [1,2,3]) eq_(results[0]['strarr'], ['abc','def']) - @testing.fails_on('postgres+pg8000', 'pg8000 has poor support for PG arrays') + @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays') def test_array_where(self): arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def']) arrtable.insert().execute(intarr=[4,5,6], strarr='ABC') @@ -820,14 +844,14 @@ class ArrayTest(TestBase, AssertsExecutionResults): eq_(len(results), 1) eq_(results[0]['intarr'], [1,2,3]) - @testing.fails_on('postgres+pg8000', 'pg8000 has poor support for PG arrays') + @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays') def test_array_concat(self): arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def']) results = select([arrtable.c.intarr + [4,5,6]]).execute().fetchall() eq_(len(results), 1) eq_(results[0][0], [1,2,3,4,5,6]) - @testing.fails_on('postgres+pg8000', 'pg8000 has poor support for PG arrays') + @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays') def test_array_subtype_resultprocessor(self): arrtable.insert().execute(intarr=[4,5,6], strarr=[[u'm\xe4\xe4'], [u'm\xf6\xf6']]) arrtable.insert().execute(intarr=[1,2,3], strarr=[u'm\xe4\xe4', u'm\xf6\xf6']) @@ -836,12 +860,12 @@ class ArrayTest(TestBase, AssertsExecutionResults): eq_(results[0]['strarr'], [u'm\xe4\xe4', u'm\xf6\xf6']) eq_(results[1]['strarr'], [[u'm\xe4\xe4'], [u'm\xf6\xf6']]) - @testing.fails_on('postgres+pg8000', 'pg8000 has poor support for PG arrays') + @testing.fails_on('postgresql+pg8000', 'pg8000 has poor support for PG arrays') def test_array_mutability(self): class Foo(object): pass footable = Table('foo', metadata, Column('id', Integer, primary_key=True), - Column('intarr', postgres.PGArray(Integer), nullable=True) + Column('intarr', postgresql.PGArray(Integer), nullable=True) ) mapper(Foo, footable) metadata.create_all() @@ -879,7 +903,7 @@ class ArrayTest(TestBase, AssertsExecutionResults): sess.flush() class TimestampTest(TestBase, AssertsExecutionResults): - __only_on__ = 'postgres' + __only_on__ = 'postgresql' def test_timestamp(self): engine = testing.db @@ -890,7 +914,7 @@ class TimestampTest(TestBase, AssertsExecutionResults): eq_(result[0], datetime.datetime(2007, 12, 25, 0, 0)) class ServerSideCursorsTest(TestBase, AssertsExecutionResults): - __only_on__ = 'postgres+psycopg2' + __only_on__ = 'postgresql+psycopg2' @classmethod def setup_class(cls): @@ -935,8 +959,8 @@ class ServerSideCursorsTest(TestBase, AssertsExecutionResults): class SpecialTypesTest(TestBase, ComparesTables): """test DDL and reflection of PG-specific types """ - __only_on__ = 'postgres' - __excluded_on__ = (('postgres', '<', (8, 3, 0)),) + __only_on__ = 'postgresql' + __excluded_on__ = (('postgresql', '<', (8, 3, 0)),) @classmethod def setup_class(cls): @@ -944,11 +968,11 @@ class SpecialTypesTest(TestBase, ComparesTables): metadata = MetaData(testing.db) table = Table('sometable', metadata, - Column('id', postgres.PGUuid, primary_key=True), - Column('flag', postgres.PGBit), - Column('addr', postgres.PGInet), - Column('addr2', postgres.PGMacAddr), - Column('addr3', postgres.PGCidr) + Column('id', postgresql.PGUuid, primary_key=True), + Column('flag', postgresql.PGBit), + Column('addr', postgresql.PGInet), + Column('addr2', postgresql.PGMacAddr), + Column('addr3', postgresql.PGCidr) ) metadata.create_all() @@ -965,8 +989,8 @@ class SpecialTypesTest(TestBase, ComparesTables): class MatchTest(TestBase, AssertsCompiledSQL): - __only_on__ = 'postgres' - __excluded_on__ = (('postgres', '<', (8, 3, 0)),) + __only_on__ = 'postgresql' + __excluded_on__ = (('postgresql', '<', (8, 3, 0)),) @classmethod def setup_class(cls): @@ -1000,11 +1024,11 @@ class MatchTest(TestBase, AssertsCompiledSQL): def teardown_class(cls): metadata.drop_all() - @testing.fails_on('postgres+pg8000', 'uses positional') + @testing.fails_on('postgresql+pg8000', 'uses positional') def test_expression_pyformat(self): self.assert_compile(matchtable.c.title.match('somstr'), "matchtable.title @@ to_tsquery(%(title_1)s)") - @testing.fails_on('postgres+psycopg2', 'uses pyformat') + @testing.fails_on('postgresql+psycopg2', 'uses pyformat') def test_expression_positional(self): self.assert_compile(matchtable.c.title.match('somstr'), "matchtable.title @@ to_tsquery(%s)") diff --git a/test/engine/test_ddlevents.py b/test/engine/test_ddlevents.py index a2cecf89a8..5343fb2c9f 100644 --- a/test/engine/test_ddlevents.py +++ b/test/engine/test_ddlevents.py @@ -236,14 +236,14 @@ class DDLExecutionTest(TestBase): def test_conditional_constraint(self): metadata, users, engine = self.metadata, self.users, self.engine nonpg_mock = engines.mock_engine(dialect_name='sqlite') - pg_mock = engines.mock_engine(dialect_name='postgres') + pg_mock = engines.mock_engine(dialect_name='postgresql') constraint = CheckConstraint('a < b',name="my_test_constraint", table=users) # by placing the constraint in an Add/Drop construct, # the 'inline_ddl' flag is set to False - AddConstraint(constraint, on='postgres').execute_at("after-create", users) - DropConstraint(constraint, on='postgres').execute_at("before-drop", users) + AddConstraint(constraint, on='postgresql').execute_at("after-create", users) + DropConstraint(constraint, on='postgresql').execute_at("before-drop", users) metadata.create_all(bind=nonpg_mock) strings = " ".join(str(x) for x in nonpg_mock.mock) diff --git a/test/engine/test_execute.py b/test/engine/test_execute.py index d35f74bb02..981ef51beb 100644 --- a/test/engine/test_execute.py +++ b/test/engine/test_execute.py @@ -40,8 +40,8 @@ class ExecuteTest(TestBase): assert res.fetchall() == [(1, "jack"), (2, "fred"), (3, "ed"), (4, "horse"), (5, "barney"), (6, "donkey"), (7, 'sally')] conn.execute("delete from users") - @testing.fails_on_everything_except('mysql+mysqldb', 'postgres') - @testing.fails_on('postgres+zxjdbc', 'sprintf not supported') + @testing.fails_on_everything_except('mysql+mysqldb', 'postgresql') + @testing.fails_on('postgresql+zxjdbc', 'sprintf not supported') # some psycopg2 versions bomb this. def test_raw_sprintf(self): for conn in (testing.db, testing.db.connect()): @@ -56,7 +56,7 @@ class ExecuteTest(TestBase): # pyformat is supported for mysql, but skipping because a few driver # versions have a bug that bombs out on this test. (1.2.2b3, 1.2.2c1, 1.2.2) @testing.skip_if(lambda: testing.against('mysql+mysqldb'), 'db-api flaky') - @testing.fails_on_everything_except('postgres+psycopg2') + @testing.fails_on_everything_except('postgresql+psycopg2') def test_raw_python(self): for conn in (testing.db, testing.db.connect()): conn.execute("insert into users (user_id, user_name) values (%(id)s, %(name)s)", {'id':1, 'name':'jack'}) diff --git a/test/engine/test_parseconnect.py b/test/engine/test_parseconnect.py index ae5503ba4e..0d887cb649 100644 --- a/test/engine/test_parseconnect.py +++ b/test/engine/test_parseconnect.py @@ -42,7 +42,7 @@ class CreateEngineTest(TestBase): dbapi = MockDBAPI(foober='12', lala='18', fooz='somevalue') e = create_engine( - 'postgres://scott:tiger@somehost/test?foober=12&lala=18&fooz=somevalue', + 'postgresql://scott:tiger@somehost/test?foober=12&lala=18&fooz=somevalue', module=dbapi, _initialize=False ) @@ -52,7 +52,7 @@ class CreateEngineTest(TestBase): dbapi = MockDBAPI(foober=12, lala=18, hoho={'this':'dict'}, fooz='somevalue') e = create_engine( - 'postgres://scott:tiger@somehost/test?fooz=somevalue', + 'postgresql://scott:tiger@somehost/test?fooz=somevalue', connect_args={'foober':12, 'lala':18, 'hoho':{'this':'dict'}}, module=dbapi, _initialize=False @@ -62,7 +62,7 @@ class CreateEngineTest(TestBase): def test_coerce_config(self): raw = r""" [prefixed] -sqlalchemy.url=postgres://scott:tiger@somehost/test?fooz=somevalue +sqlalchemy.url=postgresql://scott:tiger@somehost/test?fooz=somevalue sqlalchemy.convert_unicode=0 sqlalchemy.echo=false sqlalchemy.echo_pool=1 @@ -72,7 +72,7 @@ sqlalchemy.pool_size=2 sqlalchemy.pool_threadlocal=1 sqlalchemy.pool_timeout=10 [plain] -url=postgres://scott:tiger@somehost/test?fooz=somevalue +url=postgresql://scott:tiger@somehost/test?fooz=somevalue convert_unicode=0 echo=0 echo_pool=1 @@ -86,7 +86,7 @@ pool_timeout=10 ini.readfp(StringIO.StringIO(raw)) expected = { - 'url': 'postgres://scott:tiger@somehost/test?fooz=somevalue', + 'url': 'postgresql://scott:tiger@somehost/test?fooz=somevalue', 'convert_unicode': 0, 'echo': False, 'echo_pool': True, @@ -107,14 +107,14 @@ pool_timeout=10 dbapi = MockDBAPI() config = { - 'sqlalchemy.url':'postgres://scott:tiger@somehost/test?fooz=somevalue', + 'sqlalchemy.url':'postgresql://scott:tiger@somehost/test?fooz=somevalue', 'sqlalchemy.pool_recycle':'50', 'sqlalchemy.echo':'true' } e = engine_from_config(config, module=dbapi) assert e.pool._recycle == 50 - assert e.url == url.make_url('postgres://scott:tiger@somehost/test?fooz=somevalue') + assert e.url == url.make_url('postgresql://scott:tiger@somehost/test?fooz=somevalue') assert e.echo is True def test_custom(self): @@ -123,25 +123,25 @@ pool_timeout=10 def connect(): return dbapi.connect(foober=12, lala=18, fooz='somevalue', hoho={'this':'dict'}) - # start the postgres dialect, but put our mock DBAPI as the module instead of psycopg - e = create_engine('postgres://', creator=connect, module=dbapi, _initialize=False) + # start the postgresql dialect, but put our mock DBAPI as the module instead of psycopg + e = create_engine('postgresql://', creator=connect, module=dbapi, _initialize=False) c = e.connect() def test_recycle(self): dbapi = MockDBAPI(foober=12, lala=18, hoho={'this':'dict'}, fooz='somevalue') - e = create_engine('postgres://', pool_recycle=472, module=dbapi, _initialize=False) + e = create_engine('postgresql://', pool_recycle=472, module=dbapi, _initialize=False) assert e.pool._recycle == 472 def test_badargs(self): assert_raises(ImportError, create_engine, "foobar://", module=MockDBAPI()) # bad arg - assert_raises(TypeError, create_engine, 'postgres://', use_ansi=True, module=MockDBAPI()) + assert_raises(TypeError, create_engine, 'postgresql://', use_ansi=True, module=MockDBAPI()) # bad arg assert_raises(TypeError, create_engine, 'oracle://', lala=5, use_ansi=True, module=MockDBAPI()) - assert_raises(TypeError, create_engine, 'postgres://', lala=5, module=MockDBAPI()) + assert_raises(TypeError, create_engine, 'postgresql://', lala=5, module=MockDBAPI()) assert_raises(TypeError, create_engine,'sqlite://', lala=5) @@ -166,11 +166,11 @@ pool_timeout=10 def test_poolargs(self): """test that connection pool args make it thru""" - e = create_engine('postgres://', creator=None, pool_recycle=50, echo_pool=None, module=MockDBAPI(), _initialize=False) + e = create_engine('postgresql://', creator=None, pool_recycle=50, echo_pool=None, module=MockDBAPI(), _initialize=False) assert e.pool._recycle == 50 # these args work for QueuePool - e = create_engine('postgres://', max_overflow=8, pool_timeout=60, poolclass=tsa.pool.QueuePool, module=MockDBAPI()) + e = create_engine('postgresql://', max_overflow=8, pool_timeout=60, poolclass=tsa.pool.QueuePool, module=MockDBAPI()) # but not SingletonThreadPool assert_raises(TypeError, create_engine, 'sqlite://', max_overflow=8, pool_timeout=60, poolclass=tsa.pool.SingletonThreadPool) diff --git a/test/engine/test_reconnect.py b/test/engine/test_reconnect.py index 252b97b7c0..6afd715155 100644 --- a/test/engine/test_reconnect.py +++ b/test/engine/test_reconnect.py @@ -55,7 +55,7 @@ class MockReconnectTest(TestBase): dbapi = MockDBAPI() # create engine using our current dburi - db = tsa.create_engine('postgres://foo:bar@localhost/test', module=dbapi, _initialize=False) + db = tsa.create_engine('postgresql://foo:bar@localhost/test', module=dbapi, _initialize=False) # monkeypatch disconnect checker db.dialect.is_disconnect = lambda e: isinstance(e, MockDisconnect) diff --git a/test/engine/test_reflection.py b/test/engine/test_reflection.py index 43b427d332..0c2d4340f3 100644 --- a/test/engine/test_reflection.py +++ b/test/engine/test_reflection.py @@ -717,7 +717,7 @@ class UnicodeReflectionTest(TestBase): def test_basic(self): try: # the 'convert_unicode' should not get in the way of the reflection - # process. reflecttable for oracle, postgres (others?) expect non-unicode + # process. reflecttable for oracle, postgresql (others?) expect non-unicode # strings in result sets/bind params bind = engines.utf8_engine(options={'convert_unicode':True}) metadata = MetaData(bind) @@ -780,7 +780,7 @@ class SchemaTest(TestBase): if testing.against('mysql+mysqldb'): schema = testing.db.url.database - elif testing.against('postgres'): + elif testing.against('postgresql'): schema = 'public' elif testing.against('sqlite'): # Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc., @@ -1132,7 +1132,7 @@ class ComponentReflectionTest(TestBase): self._test_get_view_definition(schema=get_schema()) def _test_get_table_oid(self, table_name, schema=None): - if testing.against('postgres'): + if testing.against('postgresql'): meta = MetaData(testing.db) (users, addresses) = createTables(meta, schema) meta.create_all() diff --git a/test/engine/test_transaction.py b/test/engine/test_transaction.py index 7d40adf6d0..9d562cdb35 100644 --- a/test/engine/test_transaction.py +++ b/test/engine/test_transaction.py @@ -356,7 +356,7 @@ class AutoRollbackTest(TestBase): conn1.close() # without auto-rollback in the connection pool's return() logic, this - # deadlocks in Postgres, because conn1 is returned to the pool but + # deadlocks in PostgreSQL, because conn1 is returned to the pool but # still has a lock on "deadlock_users". # comment out the rollback in pool/ConnectionFairy._close() to see ! users.drop(conn2) @@ -366,10 +366,10 @@ foo = None class ExplicitAutoCommitTest(TestBase): """test the 'autocommit' flag on select() and text() objects. - Requires Postgres so that we may define a custom function which modifies the database. + Requires PostgreSQL so that we may define a custom function which modifies the database. """ - __only_on__ = 'postgres' + __only_on__ = 'postgresql' @classmethod def setup_class(cls): diff --git a/test/ext/test_compiler.py b/test/ext/test_compiler.py index 6b9cd0d379..3ee94d0271 100644 --- a/test/ext/test_compiler.py +++ b/test/ext/test_compiler.py @@ -36,12 +36,12 @@ class UserDefinedTest(TestBase, AssertsCompiledSQL): def visit_type(type, compiler, **kw): return "SQLITE_FOO" - @compiles(MyType, 'postgres') + @compiles(MyType, 'postgresql') def visit_type(type, compiler, **kw): return "POSTGRES_FOO" from sqlalchemy.dialects.sqlite import base as sqlite - from sqlalchemy.dialects.postgres import base as postgres + from sqlalchemy.dialects.postgresql import base as postgresql self.assert_compile( MyType(), @@ -52,7 +52,7 @@ class UserDefinedTest(TestBase, AssertsCompiledSQL): self.assert_compile( MyType(), "POSTGRES_FOO", - dialect=postgres.dialect() + dialect=postgresql.dialect() ) diff --git a/test/orm/test_mapper.py b/test/orm/test_mapper.py index 5320f18468..4370088278 100644 --- a/test/orm/test_mapper.py +++ b/test/orm/test_mapper.py @@ -2042,7 +2042,7 @@ class CompositeTypesTest(_base.MappedTest): # test pk with one column NULL # TODO: can't seem to get NULL in for a PK value - # in either mysql or postgres, autoincrement=False etc. + # in either mysql or postgresql, autoincrement=False etc. # notwithstanding @testing.fails_on_everything_except("sqlite") def go(): diff --git a/test/orm/test_query.py b/test/orm/test_query.py index d05e6bded8..cb6f9c7665 100644 --- a/test/orm/test_query.py +++ b/test/orm/test_query.py @@ -140,7 +140,7 @@ class GetTest(QueryTest): @testing.requires.unicode_connections def test_unicode(self): """test that Query.get properly sets up the type for the bind parameter. using unicode would normally fail - on postgres, mysql and oracle unless it is converted to an encoded string""" + on postgresql, mysql and oracle unless it is converted to an encoded string""" metadata = MetaData(engines.utf8_engine()) table = Table('unicode_data', metadata, @@ -1772,7 +1772,7 @@ class MixedEntitiesTest(QueryTest): @testing.fails_on('mssql', 'FIXME: unknown') @testing.fails_on('oracle', "Oracle doesn't support boolean expressions as columns") - @testing.fails_on('postgres+pg8000', "pg8000 parses the SQL itself before passing on to PG, doesn't parse this") + @testing.fails_on('postgresql+pg8000', "pg8000 parses the SQL itself before passing on to PG, doesn't parse this") def test_values_with_boolean_selects(self): """Tests a values clause that works with select boolean evaluations""" sess = create_session() diff --git a/test/orm/test_relationships.py b/test/orm/test_relationships.py index dac2db6e1d..481deb81b1 100644 --- a/test/orm/test_relationships.py +++ b/test/orm/test_relationships.py @@ -455,7 +455,7 @@ class RelationTest4(_base.MappedTest): @testing.fails_on_everything_except('sqlite', 'mysql') @testing.resolve_artifact_names def test_nullPKsOK_BtoA(self): - # postgres cant handle a nullable PK column...? + # postgresql cant handle a nullable PK column...? tableC = Table('tablec', tableA.metadata, Column('id', Integer, primary_key=True), Column('a_id', Integer, ForeignKey('tableA.id'), diff --git a/test/orm/test_unitofwork.py b/test/orm/test_unitofwork.py index efd00c1f09..98d1dfbbf6 100644 --- a/test/orm/test_unitofwork.py +++ b/test/orm/test_unitofwork.py @@ -862,7 +862,7 @@ class DefaultTest(_base.MappedTest): @classmethod def define_tables(cls, metadata): - use_string_defaults = testing.against('postgres', 'oracle', 'sqlite', 'mssql') + use_string_defaults = testing.against('postgresql', 'oracle', 'sqlite', 'mssql') if use_string_defaults: hohotype = String(30) @@ -887,7 +887,7 @@ class DefaultTest(_base.MappedTest): Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(50))) - if testing.against('postgres', 'oracle'): + if testing.against('postgresql', 'oracle'): dt.append_column( Column('secondary_id', Integer, sa.Sequence('sec_id_seq'), unique=True)) @@ -2350,6 +2350,6 @@ class TransactionTest(_base.MappedTest): # todo: on 8.3 at least, the failed commit seems to close the cursor? # needs investigation. leaving in the DDL above now to help verify # that the new deferrable support on FK isn't involved in this issue. - if testing.against('postgres'): + if testing.against('postgresql'): t1.bind.engine.dispose() diff --git a/test/sql/test_constraints.py b/test/sql/test_constraints.py index bdf3e7d3f5..4ad52604d3 100644 --- a/test/sql/test_constraints.py +++ b/test/sql/test_constraints.py @@ -6,7 +6,7 @@ from sqlalchemy.test import config, engines from sqlalchemy.engine import ddl from sqlalchemy.test.testing import eq_ from sqlalchemy.test.assertsql import AllOf, RegexSQL, ExactSQL, CompiledSQL -from sqlalchemy.dialects.postgres import base as postgres +from sqlalchemy.dialects.postgresql import base as postgresql class ConstraintTest(TestBase, AssertsExecutionResults, AssertsCompiledSQL): @@ -289,7 +289,7 @@ class ConstraintCompilationTest(TestBase, AssertsCompiledSQL): Column('b', Integer, ForeignKey('t.a', name='fk_tb')), # to ensure create ordering ... ) - e = engines.mock_engine(dialect_name='postgres') + e = engines.mock_engine(dialect_name='postgresql') m.create_all(e) m.drop_all(e) diff --git a/test/sql/test_defaults.py b/test/sql/test_defaults.py index c778ce2487..82d33609a5 100644 --- a/test/sql/test_defaults.py +++ b/test/sql/test_defaults.py @@ -37,7 +37,7 @@ class DefaultTest(testing.TestBase): # since its a "branched" connection conn.close() - use_function_defaults = testing.against('postgres', 'mssql', 'maxdb') + use_function_defaults = testing.against('postgresql', 'mssql', 'maxdb') is_oracle = testing.against('oracle') # select "count(1)" returns different results on different DBs also @@ -356,18 +356,18 @@ class DefaultTest(testing.TestBase): l = l.fetchone() eq_(55, l['col3']) - @testing.fails_on_everything_except('postgres') + @testing.fails_on_everything_except('postgresql') def test_passive_override(self): """ - Primarily for postgres, tests that when we get a primary key column + Primarily for postgresql, tests that when we get a primary key column back from reflecting a table which has a default value on it, we pre-execute that DefaultClause upon insert, even though DefaultClause - says "let the database execute this", because in postgres we must have + says "let the database execute this", because in postgresql we must have all the primary key values in memory before insert; otherwise we can't locate the just inserted row. """ - # TODO: move this to dialect/postgres + # TODO: move this to dialect/postgresql try: meta = MetaData(testing.db) testing.db.execute(""" @@ -537,7 +537,7 @@ class AutoIncrementTest(_base.TablesTest): try: - # postgres + mysql strict will fail on first row, + # postgresql + mysql strict will fail on first row, # mysql in legacy mode fails on second row nonai.insert().execute(data='row 1') nonai.insert().execute(data='row 2') diff --git a/test/sql/test_functions.py b/test/sql/test_functions.py index 0659a2fa70..90da04f60f 100644 --- a/test/sql/test_functions.py +++ b/test/sql/test_functions.py @@ -50,7 +50,7 @@ class CompileTest(TestBase, AssertsCompiledSQL): for ret, dialect in [ ('CURRENT_TIMESTAMP', sqlite.dialect()), - ('now()', postgres.dialect()), + ('now()', postgresql.dialect()), ('now()', mysql.dialect()), ('CURRENT_TIMESTAMP', oracle.dialect()) ]: @@ -62,7 +62,7 @@ class CompileTest(TestBase, AssertsCompiledSQL): for ret, dialect in [ ('random()', sqlite.dialect()), - ('random()', postgres.dialect()), + ('random()', postgresql.dialect()), ('rand()', mysql.dialect()), ('random', oracle.dialect()) ]: @@ -260,7 +260,7 @@ class ExecuteTest(TestBase): finally: meta.drop_all() - @testing.fails_on_everything_except('postgres') + @testing.fails_on_everything_except('postgresql') def test_as_from(self): # TODO: shouldnt this work on oracle too ? x = testing.db.func.current_date().execute().scalar() diff --git a/test/sql/test_query.py b/test/sql/test_query.py index f679277049..9d3d785f95 100644 --- a/test/sql/test_query.py +++ b/test/sql/test_query.py @@ -287,7 +287,7 @@ class QueryTest(TestBase): eq_(select([users.c.user_id]).where(users.c.user_name.ilike('TWO')).execute().fetchall(), [(2, )]) - if testing.against('postgres'): + if testing.against('postgresql'): eq_(select([users.c.user_id]).where(users.c.user_name.like('one')).execute().fetchall(), [(1, )]) eq_(select([users.c.user_id]).where(users.c.user_name.like('TWO')).execute().fetchall(), []) @@ -674,7 +674,7 @@ class QueryTest(TestBase): class PercentSchemaNamesTest(TestBase): """tests using percent signs, spaces in table and column names. - Doesn't pass for mysql, postgres, but this is really a + Doesn't pass for mysql, postgresql, but this is really a SQLAlchemy bug - we should be escaping out %% signs for this operation the same way we do for text() and column labels. @@ -682,7 +682,7 @@ class PercentSchemaNamesTest(TestBase): @classmethod @testing.crashes('mysql', 'mysqldb calls name % (params)') - @testing.crashes('postgres', 'postgres calls name % (params)') + @testing.crashes('postgresql', 'postgresql calls name % (params)') def setup_class(cls): global percent_table, metadata metadata = MetaData(testing.db) @@ -695,12 +695,12 @@ class PercentSchemaNamesTest(TestBase): @classmethod @testing.crashes('mysql', 'mysqldb calls name % (params)') - @testing.crashes('postgres', 'postgres calls name % (params)') + @testing.crashes('postgresql', 'postgresql calls name % (params)') def teardown_class(cls): metadata.drop_all() @testing.crashes('mysql', 'mysqldb calls name % (params)') - @testing.crashes('postgres', 'postgres calls name % (params)') + @testing.crashes('postgresql', 'postgresql calls name % (params)') def test_roundtrip(self): percent_table.insert().execute( {'percent%':5, '%(oneofthese)s':7, 'spaces % more spaces':12}, diff --git a/test/sql/test_quote.py b/test/sql/test_quote.py index 64e097b85f..3198a07af4 100644 --- a/test/sql/test_quote.py +++ b/test/sql/test_quote.py @@ -129,7 +129,7 @@ class QuoteTest(TestBase, AssertsCompiledSQL): def testlabels(self): """test the quoting of labels. - if labels arent quoted, a query in postgres in particular will fail since it produces: + if labels arent quoted, a query in postgresql in particular will fail since it produces: SELECT LaLa.lowercase, LaLa."UPPERCASE", LaLa."MixedCase", LaLa."ASC" FROM (SELECT DISTINCT "WorstCase1".lowercase AS lowercase, "WorstCase1"."UPPERCASE" AS UPPERCASE, "WorstCase1"."MixedCase" AS MixedCase, "WorstCase1"."ASC" AS ASC \nFROM "WorstCase1") AS LaLa diff --git a/test/sql/test_select.py b/test/sql/test_select.py index c56e811614..a796f2699f 100644 --- a/test/sql/test_select.py +++ b/test/sql/test_select.py @@ -522,12 +522,12 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A (~table1.c.myid.like('somstr', escape='\\'), "mytable.myid NOT LIKE :myid_1 ESCAPE '\\'", None), (table1.c.myid.ilike('somstr', escape='\\'), "lower(mytable.myid) LIKE lower(:myid_1) ESCAPE '\\'", None), (~table1.c.myid.ilike('somstr', escape='\\'), "lower(mytable.myid) NOT LIKE lower(:myid_1) ESCAPE '\\'", None), - (table1.c.myid.ilike('somstr', escape='\\'), "mytable.myid ILIKE %(myid_1)s ESCAPE '\\'", postgres.PGDialect()), - (~table1.c.myid.ilike('somstr', escape='\\'), "mytable.myid NOT ILIKE %(myid_1)s ESCAPE '\\'", postgres.PGDialect()), + (table1.c.myid.ilike('somstr', escape='\\'), "mytable.myid ILIKE %(myid_1)s ESCAPE '\\'", postgresql.PGDialect()), + (~table1.c.myid.ilike('somstr', escape='\\'), "mytable.myid NOT ILIKE %(myid_1)s ESCAPE '\\'", postgresql.PGDialect()), (table1.c.name.ilike('%something%'), "lower(mytable.name) LIKE lower(:name_1)", None), - (table1.c.name.ilike('%something%'), "mytable.name ILIKE %(name_1)s", postgres.PGDialect()), + (table1.c.name.ilike('%something%'), "mytable.name ILIKE %(name_1)s", postgresql.PGDialect()), (~table1.c.name.ilike('%something%'), "lower(mytable.name) NOT LIKE lower(:name_1)", None), - (~table1.c.name.ilike('%something%'), "mytable.name NOT ILIKE %(name_1)s", postgres.PGDialect()), + (~table1.c.name.ilike('%something%'), "mytable.name NOT ILIKE %(name_1)s", postgresql.PGDialect()), ]: self.assert_compile(expr, check, dialect=dialect) @@ -536,7 +536,7 @@ sq.myothertable_othername AS sq_myothertable_othername FROM (" + sqstring + ") A (table1.c.myid.match('somstr'), "mytable.myid MATCH ?", sqlite.SQLiteDialect()), (table1.c.myid.match('somstr'), "MATCH (mytable.myid) AGAINST (%s IN BOOLEAN MODE)", mysql.dialect()), (table1.c.myid.match('somstr'), "CONTAINS (mytable.myid, :myid_1)", mssql.dialect()), - (table1.c.myid.match('somstr'), "mytable.myid @@ to_tsquery(%(myid_1)s)", postgres.dialect()), + (table1.c.myid.match('somstr'), "mytable.myid @@ to_tsquery(%(myid_1)s)", postgresql.dialect()), (table1.c.myid.match('somstr'), "CONTAINS (mytable.myid, :myid_1)", oracle.dialect()), ]: self.assert_compile(expr, check, dialect=dialect) @@ -751,7 +751,7 @@ WHERE mytable.myid = myothertable.otherid) AS t2view WHERE t2view.mytable_myid = params={}, ) - dialect = postgres.dialect() + dialect = postgresql.dialect() self.assert_compile( text("select * from foo where lala=:bar and hoho=:whee", bindparams=[bindparam('bar',4), bindparam('whee',7)]), "select * from foo where lala=%(bar)s and hoho=%(whee)s", @@ -1305,8 +1305,8 @@ UNION SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_2)") else: eq_(str(sel), "SELECT casttest.id, casttest.v1, casttest.v2, casttest.ts, CAST(casttest.v1 AS NUMERIC) AS anon_1 \nFROM casttest") - # first test with Postgres engine - check_results(postgres.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'TEXT', 'VARCHAR(20)'], '%(param_1)s') + # first test with PostgreSQL engine + check_results(postgresql.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'TEXT', 'VARCHAR(20)'], '%(param_1)s') # then the Oracle engine check_results(oracle.dialect(), ['NUMERIC', 'NUMERIC(12, 9)', 'DATE', 'CLOB', 'VARCHAR(20)'], ':param_1') diff --git a/test/sql/test_types.py b/test/sql/test_types.py index 512ef2e7f9..0845c0ebf9 100644 --- a/test/sql/test_types.py +++ b/test/sql/test_types.py @@ -25,7 +25,7 @@ class AdaptTest(TestBase): for dialect in [ oracle.dialect(), mysql.dialect(), - postgres.dialect(), + postgresql.dialect(), sqlite.dialect(), sybase.dialect(), informix.dialect(), @@ -555,7 +555,7 @@ class DateTest(TestBase, AssertsExecutionResults): Column('user_date', Date), Column('user_time', Time)] - if testing.against('sqlite', 'postgres'): + if testing.against('sqlite', 'postgresql'): insert_data.append( (11, 'historic', datetime.datetime(1850, 11, 10, 11, 52, 35, datetime_micro), diff --git a/test/sql/test_unicode.py b/test/sql/test_unicode.py index bb3e066f25..fe9b88e3ba 100644 --- a/test/sql/test_unicode.py +++ b/test/sql/test_unicode.py @@ -130,7 +130,7 @@ class EscapesDefaultsTest(testing.TestBase): assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), unicode) # now execute, run the sequence. it should run in u"Special_col.nextid" or similar as - # a unicode object; cx_oracle asserts that this is None or a String (postgres lets it pass thru). + # a unicode object; cx_oracle asserts that this is None or a String (postgresql lets it pass thru). # ensure that base.DefaultRunner is encoding. t1.insert().execute(data='foo') finally: -- 2.47.3