"""
# PART I - Imports/Configuration
-from __future__ import print_function
import os
import re
"""
# PART I - Imports/Configuration
-
-from __future__ import print_function
-
import os
import re
from xml.etree import ElementTree
can be used with many common vertical schemas as-is or with minor adaptations.
"""
-from __future__ import unicode_literals
class ProxiedDictMixin:
# the MIT License: https://www.opensource.org/licenses/mit-license.php
import re
+from urllib.parse import unquote_plus
from . import Connector
from .. import util
connect_args[param] = util.asbool(keys.pop(param))
if "odbc_connect" in keys:
- connectors = [util.unquote_plus(keys.pop("odbc_connect"))]
+ connectors = [unquote_plus(keys.pop("odbc_connect"))]
else:
def check_quote(token):
from ...types import SMALLINT
from ...types import TEXT
from ...types import VARCHAR
-from ...util import compat
from ...util import update_wrapper
from ...util.langhelpers import public_factory
def process(value):
if isinstance(value, datetime.datetime):
return value.date()
- elif isinstance(value, util.string_types):
+ elif isinstance(value, str):
m = self._reg.match(value)
if not m:
raise ValueError(
def process(value):
if isinstance(value, datetime.datetime):
return value.time()
- elif isinstance(value, util.string_types):
+ elif isinstance(value, str):
m = self._reg.match(value)
if not m:
raise ValueError(
# handle other included columns
if index.dialect_options["mssql"]["include"]:
inclusions = [
- index.table.c[col]
- if isinstance(col, util.string_types)
- else col
+ index.table.c[col] if isinstance(col, str) else col
for col in index.dialect_options["mssql"]["include"]
]
cdict["identity"] = {}
else:
if isinstance(coltype, sqltypes.BigInteger):
- start = compat.long_type(identity_start)
- increment = compat.long_type(identity_increment)
+ start = int(identity_start)
+ increment = int(identity_increment)
elif isinstance(coltype, sqltypes.Integer):
start = int(identity_start)
increment = int(identity_increment)
def process(value):
if value is None:
return None
- elif isinstance(value, util.string_types):
+ elif isinstance(value, str):
# if a string was passed directly, allow it through
return value
elif not value.tzinfo or (not self.timezone and not self.has_tz):
tup[4],
tup[5],
tup[6] // 1000,
- util.timezone(
+ datetime.timezone(
datetime.timedelta(hours=tup[7], minutes=tup[8])
),
)
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from . import aiomysql # noqa
+from . import asyncmy # noqa
from . import base # noqa
from . import cymysql # noqa
from . import mariadbconnector # noqa
from .expression import match
from ...util import compat
-if compat.py3k:
- from . import aiomysql # noqa
- from . import asyncmy # noqa
-
# default dialect
base.dialect = dialect = mysqldb.dialect
""" # noqa
+from contextlib import asynccontextmanager
+
from .pymysql import MySQLDialect_pymysql
from ... import pool
from ... import util
from ...engine import AdaptedConnection
-from ...util.concurrency import asynccontextmanager
from ...util.concurrency import asyncio
from ...util.concurrency import await_fallback
from ...util.concurrency import await_only
keywords at the start of a SELECT.
"""
- if isinstance(select._distinct, util.string_types):
+ if isinstance(select._distinct, str):
util.warn_deprecated(
"Sending string values for 'distinct' is deprecated in the "
"MySQL dialect and will be removed in a future release. "
raise NotImplementedError()
val = row[0]
cursor.close()
- if util.py3k and isinstance(val, bytes):
+ if isinstance(val, bytes):
val = val.decode()
return val.upper().replace("-", " ")
cursor.execute("SELECT VERSION()")
val = cursor.fetchone()[0]
cursor.close()
- if util.py3k and isinstance(val, bytes):
+ if isinstance(val, bytes):
val = val.decode()
return self._parse_server_version(val)
sql.bindparam("table_name", type_=Unicode),
),
{
- "table_schema": util.text_type(schema),
- "table_name": util.text_type(table_name),
+ "table_schema": str(schema),
+ "table_name": str(table_name),
},
)
return bool(rs.scalar())
"TABLE_SCHEMA=:schema_name"
),
dict(
- name=util.text_type(sequence_name),
- schema_name=util.text_type(schema),
+ name=str(sequence_name),
+ schema_name=str(schema),
),
)
return cursor.first() is not None
if isinstance(item, _array):
item = item.tostring()
- if self.charset and isinstance(item, util.binary_type):
+ if self.charset and isinstance(item, bytes):
return item.decode(self.charset)
else:
return item
item = getattr(self.rowproxy, attr)
if isinstance(item, _array):
item = item.tostring()
- if self.charset and isinstance(item, util.binary_type):
+ if self.charset and isinstance(item, bytes):
return item.decode(self.charset)
else:
return item
def process(value):
if value is not None:
v = 0
- for i in util.iterbytes(value):
+ for i in iter(value):
v = v << 8 | i
return v
return value
super_convert = super(SET, self).result_processor(dialect, coltype)
def process(value):
- if isinstance(value, util.string_types):
+ if isinstance(value, str):
# MySQLdb returns a string, let's parse
if super_convert:
value = super_convert(value)
def process(value):
if value is None:
return None
- elif isinstance(value, util.int_types + util.string_types):
+ elif isinstance(value, (int, str)):
if super_convert:
return super_convert(value)
else:
def process(value):
# accept strings and int (actually bitflag) values directly
- if value is not None and not isinstance(
- value, util.int_types + util.string_types
- ):
+ if value is not None and not isinstance(value, (int, str)):
value = ",".join(value)
if super_convert:
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
-
-from __future__ import absolute_import
-
from ... import types as sqltypes
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
- if self.dialect._mysqlconnector_double_percents:
- return (
- self.process(binary.left, **kw)
- + " %% "
- + self.process(binary.right, **kw)
- )
- else:
- return (
- self.process(binary.left, **kw)
- + " % "
- + self.process(binary.right, **kw)
- )
-
- def post_process_text(self, text):
- if self.dialect._mysqlconnector_double_percents:
- return text.replace("%", "%%")
- else:
- return text
-
- def escape_literal_column(self, text):
- if self.dialect._mysqlconnector_double_percents:
- return text.replace("%", "%%")
- else:
- return text
+ return (
+ self.process(binary.left, **kw)
+ + " % "
+ + self.process(binary.right, **kw)
+ )
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
@property
def _double_percents(self):
- return self.dialect._mysqlconnector_double_percents
+ return False
@_double_percents.setter
def _double_percents(self, value):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
- if self.dialect._mysqlconnector_double_percents:
- return value.replace("%", "%%")
- else:
- return value
+ return value
class _myconnpyBIT(BIT):
if m:
return tuple(int(x) for x in m.group(1, 2, 3) if x is not None)
- @util.memoized_property
- def _mysqlconnector_double_percents(self):
- return not util.py3k and self._mysqlconnector_version_info < (2, 0)
-
def _detect_charset(self, connection):
return connection.connection.charset
from .mysqldb import MySQLDialect_mysqldb
from ...util import langhelpers
-from ...util import py3k
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
else:
return False
- if py3k:
-
- def _extract_error_code(self, exception):
- if isinstance(exception.args[0], Exception):
- exception = exception.args[0]
- return exception.args[0]
+ def _extract_error_code(self, exception):
+ if isinstance(exception.args[0], Exception):
+ exception = exception.args[0]
+ return exception.args[0]
dialect = MySQLDialect_pymysql
from ...types import NVARCHAR
from ...types import TIMESTAMP
from ...types import VARCHAR
-from ...util import compat
RESERVED_WORDS = set(
"SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN "
return (
lc_value in self.reserved_words
or value[0] in self.illegal_initial_characters
- or not self.legal_characters.match(util.text_type(value))
+ or not self.legal_characters.match(str(value))
)
def format_savepoint(self, savepoint):
value = value.strip()
if "START WITH" in option:
- identity["start"] = compat.long_type(value)
+ identity["start"] = int(value)
elif "INCREMENT BY" in option:
- identity["increment"] = compat.long_type(value)
+ identity["increment"] = int(value)
elif "MAX_VALUE" in option:
- identity["maxvalue"] = compat.long_type(value)
+ identity["maxvalue"] = int(value)
elif "MIN_VALUE" in option:
- identity["minvalue"] = compat.long_type(value)
+ identity["minvalue"] = int(value)
elif "CYCLE_FLAG" in option:
identity["cycle"] = value == "Y"
elif "CACHE_SIZE" in option:
- identity["cache"] = compat.long_type(value)
+ identity["cache"] = int(value)
elif "ORDER_FLAG" in option:
identity["order"] = value == "Y"
return identity
as better integration of outputtypehandlers.
""" # noqa
-
-from __future__ import absolute_import
-
import decimal
import random
import re
and default_type is not cx_Oracle.NCLOB
):
return cursor.var(
- util.text_type,
+ str,
size,
cursor.arraysize,
**dialect._cursor_var_unicode_kwargs
opts.setdefault("threaded", self._cx_oracle_threaded)
def convert_cx_oracle_constant(value):
- if isinstance(value, util.string_types):
+ if isinstance(value, str):
try:
int_val = int(value)
except ValueError:
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from . import asyncpg # noqa
from . import base
from . import pg8000 # noqa
from . import psycopg2 # noqa
from .ranges import TSTZRANGE
from ...util import compat
-if compat.py3k:
- from . import asyncpg # noqa
-
base.dialect = dialect = psycopg2.dialect
def process(value):
if value is None:
return value
- # isinstance(value, util.string_types) is required to handle
+ # isinstance(value, str) is required to handle
# the case where a TypeDecorator for and Array of Enum is
# used like was required in sa < 1.3.17
return super_rp(
handle_raw_string(value)
- if isinstance(value, util.string_types)
+ if isinstance(value, str)
else value
)
""" # noqa
import collections
+import collections.abc as collections_abc
import decimal
import json as _py_json
import re
class AsyncpgJSONPathType(json.JSONPathType):
def bind_processor(self, dialect):
def process(value):
- assert isinstance(value, util.collections_abc.Sequence)
- tokens = [util.text_type(elem) for elem in value]
+ assert isinstance(value, collections_abc.Sequence)
+ tokens = [str(elem) for elem in value]
return tokens
return process
def coerce_compared_value(self, op, value):
"""See :meth:`.TypeEngine.coerce_compared_value` for a description."""
- if isinstance(value, util.string_types):
+ if isinstance(value, str):
return self
else:
return super(UUID, self).coerce_compared_value(op, value)
def process(value):
if value is not None:
- value = util.text_type(value)
+ value = str(value)
return value
return process
target_text = "(%s)" % ", ".join(
(
self.preparer.quote(c)
- if isinstance(c, util.string_types)
+ if isinstance(c, str)
else self.process(c, include_table=False, use_schema=False)
)
for c in clause.inferred_target_elements
for k, v in set_parameters.items():
key_text = (
self.preparer.quote(k)
- if isinstance(k, util.string_types)
+ if isinstance(k, str)
else self.process(k, use_schema=False)
)
value_text = self.process(
includeclause = index.dialect_options["postgresql"]["include"]
if includeclause:
inclusions = [
- index.table.c[col]
- if isinstance(col, util.string_types)
- else col
+ index.table.c[col] if isinstance(col, str) else col
for col in includeclause
]
text += " INCLUDE (%s)" % ", ".join(
sql.text(query).bindparams(
sql.bindparam(
"schema",
- util.text_type(schema.lower()),
+ str(schema.lower()),
type_=sqltypes.Unicode,
)
)
).bindparams(
sql.bindparam(
"name",
- util.text_type(table_name),
+ str(table_name),
type_=sqltypes.Unicode,
)
)
).bindparams(
sql.bindparam(
"name",
- util.text_type(table_name),
+ str(table_name),
type_=sqltypes.Unicode,
),
sql.bindparam(
"schema",
- util.text_type(schema),
+ str(schema),
type_=sqltypes.Unicode,
),
)
).bindparams(
sql.bindparam(
"name",
- util.text_type(sequence_name),
+ str(sequence_name),
type_=sqltypes.Unicode,
),
sql.bindparam(
"schema",
- util.text_type(schema),
+ str(schema),
type_=sqltypes.Unicode,
),
)
"""
query = sql.text(query)
query = query.bindparams(
- sql.bindparam(
- "typname", util.text_type(type_name), type_=sqltypes.Unicode
- )
+ sql.bindparam("typname", str(type_name), type_=sqltypes.Unicode)
)
if schema is not None:
query = query.bindparams(
- sql.bindparam(
- "nspname", util.text_type(schema), type_=sqltypes.Unicode
- )
+ sql.bindparam("nspname", str(schema), type_=sqltypes.Unicode)
)
cursor = connection.execute(query)
return bool(cursor.scalar())
)
# Since we're binding to unicode, table_name and schema_name must be
# unicode.
- table_name = util.text_type(table_name)
+ table_name = str(table_name)
if schema is not None:
- schema = util.text_type(schema)
+ schema = str(schema)
s = sql.text(query).bindparams(table_name=sqltypes.Unicode)
s = s.columns(oid=sqltypes.Integer)
if schema:
).bindparams(
sql.bindparam(
"schema",
- util.text_type(schema),
+ str(schema),
type_=sqltypes.Unicode,
),
)
def __init__(self, constraint=None, index_elements=None, index_where=None):
if constraint is not None:
- if not isinstance(constraint, util.string_types) and isinstance(
+ if not isinstance(constraint, str) and isinstance(
constraint,
(schema.Index, schema.Constraint, ext.ExcludeConstraint),
):
"'constraint' and 'index_elements' are mutually exclusive"
)
- if isinstance(constraint, util.string_types):
+ if isinstance(constraint, str):
self.constraint_target = constraint
self.inferred_target_elements = None
self.inferred_target_whereclause = None
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from itertools import zip_longest
from .array import ARRAY
-from ... import util
from ...sql import coercions
from ...sql import elements
from ...sql import expression
name,
operator,
)
- for (expr, name, operator), colexpr in util.zip_longest(
+ for (expr, name, operator), colexpr in zip_longest(
self._render_exprs, self.columns
)
]
from .array import ARRAY
from ... import types as sqltypes
-from ... import util
from ...sql import functions as sqlfunc
from ...sql import operators
def esc(s, position):
if position == "value" and s is None:
return "NULL"
- elif isinstance(s, util.string_types):
+ elif isinstance(s, str):
return '"%s"' % s.replace("\\", "\\\\").replace('"', r"\"")
else:
raise ValueError(
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
-from __future__ import absolute_import
+import collections.abc as collections_abc
from ... import types as sqltypes
-from ... import util
from ...sql import operators
super_proc = self.string_bind_processor(dialect)
def process(value):
- assert isinstance(value, util.collections_abc.Sequence)
- tokens = [util.text_type(elem) for elem in value]
+ assert isinstance(value, collections_abc.Sequence)
+ tokens = [str(elem) for elem in value]
value = "{%s}" % (", ".join(tokens))
if super_proc:
value = super_proc(value)
super_proc = self.string_literal_processor(dialect)
def process(value):
- assert isinstance(value, util.collections_abc.Sequence)
- tokens = [util.text_type(elem) for elem in value]
+ assert isinstance(value, collections_abc.Sequence)
+ tokens = [str(elem) for elem in value]
value = "{%s}" % (", ".join(tokens))
if super_proc:
value = super_proc(value)
fns = []
def on_connect(conn):
- conn.py_types[quoted_name] = conn.py_types[util.text_type]
+ conn.py_types[quoted_name] = conn.py_types[str]
fns.append(on_connect)
which may be more performant.
""" # noqa
-from __future__ import absolute_import
-
+import collections.abc as collections_abc
import decimal
import logging
import re
from ... import types as sqltypes
from ... import util
from ...engine import cursor as _cursor
-from ...util import collections_abc
logger = logging.getLogger("sqlalchemy.dialects.postgresql")
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from . import aiosqlite # noqa
from . import base # noqa
from . import pysqlcipher # noqa
from . import pysqlite # noqa
from .base import VARCHAR
from .dml import Insert
from .dml import insert
-from ...util import compat
-
-if compat.py3k:
- from . import aiosqlite # noqa
# default dialect
base.dialect = dialect = pysqlite.dialect
target_text = "(%s)" % ", ".join(
(
self.preparer.quote(c)
- if isinstance(c, util.string_types)
+ if isinstance(c, str)
else self.process(c, include_table=False, use_schema=False)
)
for c in clause.inferred_target_elements
for k, v in set_parameters.items():
key_text = (
self.preparer.quote(k)
- if isinstance(k, util.string_types)
+ if isinstance(k, str)
else self.process(k, use_schema=False)
)
value_text = self.process(
coltype = self._resolve_type_affinity(type_)
if default is not None:
- default = util.text_type(default)
+ default = str(default)
colspec = {
"name": name,
""" # noqa
-from __future__ import absolute_import
-
from .pysqlite import SQLiteDialect_pysqlite
from ... import pool
-from ... import util
class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
@classmethod
def dbapi(cls):
- if util.py3k:
- try:
- import sqlcipher3 as sqlcipher
- except ImportError:
- pass
- else:
- return sqlcipher
-
- from pysqlcipher3 import dbapi2 as sqlcipher
-
+ try:
+ import sqlcipher3 as sqlcipher
+ except ImportError:
+ pass
else:
- from pysqlcipher import dbapi2 as sqlcipher
+ return sqlcipher
+
+ from pysqlcipher3 import dbapi2 as sqlcipher
return sqlcipher
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
-from __future__ import with_statement
-
import contextlib
import sys
raise
except BaseException as e:
self._handle_dbapi_exception(
- e, util.text_type(statement), parameters, None, None
+ e, str(statement), parameters, None, None
)
return # not reached
else:
yield connection
- @util.contextmanager
+ @contextlib.contextmanager
def begin(self):
"""Return a context manager delivering a :class:`_engine.Connection`
with a :class:`.Transaction` established.
import abc
-from ..util import ABC
-
-class ConnectionCharacteristic(ABC):
+class ConnectionCharacteristic(abc.ABC):
"""An abstract base for an object that can set, get and reset a
per-connection characteristic, typically one that gets reset when the
connection is returned to the connection pool.
"_keymap": {
key: (rec[MD_INDEX], rec[MD_RESULT_MAP_INDEX], _UNPICKLED, key)
for key, rec in self._keymap.items()
- if isinstance(key, util.string_types + util.int_types)
+ if isinstance(key, (str, int))
},
"_keys": self._keys,
"_translated_indexes": self._translated_indexes,
import functools
import random
import re
+from time import perf_counter
import weakref
from . import characteristics
self.execution_options = execution_options
- self.unicode_statement = util.text_type(compiled)
+ self.unicode_statement = str(compiled)
if compiled.schema_translate_map:
schema_translate_map = self.execution_options.get(
"schema_translate_map", {}
if self.compiled is None:
return "raw sql"
- now = util.perf_counter()
+ now = perf_counter()
ch = self.cache_hit
for key in compiled_params
)
return self._execute_scalar(
- util.text_type(compiled), type_, parameters=parameters
+ str(compiled), type_, parameters=parameters
)
current_parameters = None
return fn(self, con, *args, **kw)
key = (
fn.__name__,
- tuple(a for a in args if isinstance(a, util.string_types)),
+ tuple(a for a in args if isinstance(a, str)),
tuple((k, v) for k, v in kw.items() if k != "info_cache"),
)
ret = info_cache.get(key)
"""Define generic result set constructs."""
+import collections.abc as collections_abc
import functools
import itertools
import operator
from ..sql.base import _generative
from ..sql.base import HasMemoized
from ..sql.base import InPlaceGenerative
-from ..util import collections_abc
if _baserow_usecext:
"""Define row constructs including :class:`.Row`."""
+import collections.abc as collections_abc
import operator
from .. import util
from ..sql import util as sql_util
-from ..util.compat import collections_abc
MD_INDEX = 0 # integer index in cursor.description
be used directly and is also accepted directly by ``create_engine()``.
"""
+import collections.abc as collections_abc
import re
+from urllib.parse import parse_qsl
+from urllib.parse import quote_plus
+from urllib.parse import unquote
from .interfaces import Dialect
from .. import exc
from .. import util
from ..dialects import plugins
from ..dialects import registry
-from ..util import collections_abc
-from ..util import compat
class URL(
@classmethod
def _assert_str(cls, v, paramname):
- if not isinstance(v, compat.string_types):
+ if not isinstance(v, str):
raise TypeError("%s must be a string" % paramname)
return v
)
def _assert_str(v):
- if not isinstance(v, compat.string_types):
+ if not isinstance(v, str):
raise TypeError("Query dictionary keys must be strings")
return v
:meth:`_engine.URL.update_query_dict`
""" # noqa: E501
- return self.update_query_pairs(
- util.parse_qsl(query_string), append=append
- )
+ return self.update_query_pairs(parse_qsl(query_string), append=append)
def update_query_pairs(self, key_value_pairs, append=False):
"""Return a new :class:`_engine.URL` object with the
keys = list(self.query)
keys.sort()
s += "?" + "&".join(
- "%s=%s" % (util.quote_plus(k), util.quote_plus(element))
+ "%s=%s" % (quote_plus(k), quote_plus(element))
for k in keys
for element in util.to_list(self.query[k])
)
existing URL object is passed, just returns the object.
"""
- if isinstance(name_or_url, util.string_types):
+ if isinstance(name_or_url, str):
return _parse_rfc1738_args(name_or_url)
else:
return name_or_url
if components["query"] is not None:
query = {}
- for key, value in util.parse_qsl(components["query"]):
+ for key, value in parse_qsl(components["query"]):
if key in query:
query[key] = util.to_list(query[key])
query[key].append(value)
return re.sub(r"[:@/]", lambda m: "%%%X" % ord(m.group(0)), text)
-def _rfc_1738_unquote(text):
- return util.unquote(text)
+_rfc_1738_unquote = unquote
def _parse_keyvalue_args(name):
m = re.match(r"(\w+)://(.*)", name)
if m is not None:
(name, args) = m.group(1, 2)
- opts = dict(util.parse_qsl(args))
+ opts = dict(parse_qsl(args))
return URL(name, *opts)
else:
return None
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+import collections.abc as collections_abc
+
from .. import exc
from .. import util
-from ..util import collections_abc
from ..util import immutabledict
"""Public API functions for the event system.
"""
-from __future__ import absolute_import
-
from .base import _registrars
from .registry import _EventKey
from .. import exc
``Pool`` vs. ``QueuePool``) are all implemented here.
"""
-
-from __future__ import absolute_import
-from __future__ import with_statement
-
import collections
from itertools import chain
import weakref
instances of ``_Dispatch``.
"""
-from __future__ import absolute_import
-
import weakref
from .attr import _ClsLevelDispatch
del _registrars[k]
-class Events(util.with_metaclass(_EventMeta, object)):
+class Events(metaclass=_EventMeta):
"""Define event listening functions for a particular target type."""
@staticmethod
an equivalent :class:`._EventKey`.
"""
-
-from __future__ import absolute_import
-
import collections
import types
import weakref
class SQLAlchemyError(HasDescriptionCode, Exception):
"""Generic error class."""
- def _message(self, as_unicode=compat.py3k):
+ def _message(self):
# rules:
#
- # 1. under py2k, for __str__ return single string arg as it was
- # given without converting to unicode. for __unicode__
- # do a conversion but check that it's not unicode already just in
- # case
- #
- # 2. under py3k, single arg string will usually be a unicode
+ # 1. single arg string will usually be a unicode
# object, but since __str__() must return unicode, check for
# bytestring just in case
#
- # 3. for multiple self.args, this is not a case in current
+ # 2. for multiple self.args, this is not a case in current
# SQLAlchemy though this is happening in at least one known external
# library, call str() which does a repr().
#
if len(self.args) == 1:
text = self.args[0]
- if as_unicode and isinstance(text, compat.binary_types):
+ if isinstance(text, bytes):
text = compat.decode_backslashreplace(text, "utf-8")
# This is for when the argument is not a string of any sort.
# Otherwise, converting this exception to string would fail for
# non-string arguments.
- elif compat.py3k or not as_unicode:
- text = str(text)
else:
- text = compat.text_type(text)
+ text = str(text)
return text
else:
# a repr() of the tuple
return str(self.args)
- def _sql_message(self, as_unicode):
- message = self._message(as_unicode)
+ def _sql_message(self):
+ message = self._message()
if self.code:
message = "%s %s" % (message, self._code_str())
return message
def __str__(self):
- return self._sql_message(compat.py3k)
-
- def __unicode__(self):
- return self._sql_message(as_unicode=True)
+ return self._sql_message()
class ArgumentError(SQLAlchemyError):
)
@_preloaded.preload_module("sqlalchemy.sql.util")
- def _sql_message(self, as_unicode):
+ def _sql_message(self):
util = _preloaded.preloaded.sql_util
- details = [self._message(as_unicode=as_unicode)]
+ details = [self._message()]
if self.statement:
- if not as_unicode and not compat.py3k:
- stmt_detail = "[SQL: %s]" % compat.safe_bytestring(
- self.statement
- )
- else:
- stmt_detail = "[SQL: %s]" % self.statement
+ stmt_detail = "[SQL: %s]" % self.statement
details.append(stmt_detail)
if self.params:
if self.hide_parameters:
col.append(item)
def count(self, value):
- return sum(
- [
- 1
- for _ in util.itertools_filter(
- lambda v: v == value, iter(self)
- )
- ]
- )
+ count = 0
+ for v in self:
+ if v == value:
+ count += 1
+ return count
def extend(self, values):
for v in values:
await self.close()
def _maker_context_manager(self):
- # no @contextlib.asynccontextmanager until python3.7, gr
+ # TODO: can this use asynccontextmanager ??
return _AsyncSessionContextManager(self)
"""
+import collections.abc as collections_abc
import logging
from .. import exc as sa_exc
from ..sql import func
from ..sql import literal_column
from ..sql import util as sql_util
-from ..util import collections_abc
log = logging.getLogger(__name__)
WHERE CAST(person.data ->> %(data_1)s AS INTEGER) < %(param_1)s
""" # noqa
-from __future__ import absolute_import
-
from .. import inspect
from .. import util
from ..ext.hybrid import hybrid_property
"""
+from io import BytesIO
+import pickle
import re
from .. import Column
from ..orm.session import Session
from ..util import b64decode
from ..util import b64encode
-from ..util import byte_buffer
-from ..util import pickle
-from ..util import text_type
__all__ = ["Serializer", "Deserializer", "dumps", "loads"]
pickle.dumps(obj._annotations["parententity"].class_)
)
else:
- id_ = "table:" + text_type(obj.key)
+ id_ = f"table:{obj.key}"
elif isinstance(obj, Column) and isinstance(obj.table, Table):
- id_ = (
- "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
- )
+ id_ = f"column:{obj.table.key}:{obj.key}"
elif isinstance(obj, Session):
id_ = "session:"
elif isinstance(obj, Engine):
return None
def persistent_load(id_):
- m = our_ids.match(text_type(id_))
+ m = our_ids.match(str(id_))
if not m:
return None
else:
def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL):
- buf = byte_buffer()
+ buf = BytesIO()
pickler = Serializer(buf, protocol)
pickler.dump(obj)
return buf.getvalue()
def loads(data, metadata=None, scoped_session=None, engine=None):
- buf = byte_buffer(data)
+ buf = BytesIO(data)
unpickler = Deserializer(buf, metadata, scoped_session, engine)
return unpickler.load()
"""
import operator
+from typing import Generic
+from typing import TypeVar
from . import collections
from . import exc as orm_exc
return getattr(entity, key)
-if util.py3k:
- from typing import TypeVar, Generic
-
- _T = TypeVar("_T")
- _Generic_T = Generic[_T]
-else:
- _Generic_T = type("_Generic_T", (), {})
+_T = TypeVar("_T")
+_Generic_T = Generic[_T]
class Mapped(QueryableAttribute, _Generic_T):
if hasattr(iterable, "_sa_iterator"):
iterable = iterable._sa_iterator()
elif setting_type is dict:
- if util.py3k:
- iterable = iterable.values()
- else:
- iterable = getattr(
- iterable, "itervalues", iterable.values
- )()
+ iterable = iterable.values()
else:
iterable = iter(iterable)
new_values = list(iterable)
_set_decorators(),
),
# decorators are required for dicts and object collections.
- dict: ({"iterator": "values"}, _dict_decorators())
- if util.py3k
- else ({"iterator": "itervalues"}, _dict_decorators()),
+ dict: ({"iterator": "values"}, _dict_decorators()),
}
aliased_generation = flags["aliased_generation"]
# do a quick inspect to accommodate for a lambda
- if right is not None and not isinstance(right, util.string_types):
+ if right is not None and not isinstance(right, str):
right = inspect(right)
- if onclause is not None and not isinstance(
- onclause, util.string_types
- ):
+ if onclause is not None and not isinstance(onclause, str):
onclause = inspect(onclause)
# legacy vvvvvvvvvvvvvvvvvvvvvvvvvv
# legacy ^^^^^^^^^^^^^^^^^^^^^^^^^^^
if (
- isinstance(
- right, (interfaces.PropComparator, util.string_types)
- )
+ isinstance(right, (interfaces.PropComparator, str))
and onclause is None
):
onclause = right
else:
of_type = None
- if isinstance(onclause, util.string_types):
+ if isinstance(onclause, str):
# string given, e.g. query(Foo).join("bar").
# we look to the left entity or what we last joined
# towards
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Public API functions and helpers for declarative."""
-from __future__ import absolute_import
-
import itertools
import re
import weakref
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Internal implementation for declarative."""
-from __future__ import absolute_import
-
import collections
import weakref
"""
-from __future__ import absolute_import
-
import collections
from . import exc as orm_exc
as well as some of the attribute loading strategies.
"""
-from __future__ import absolute_import
from . import attributes
from . import exc as orm_exc
available in :class:`~sqlalchemy.orm.`.
"""
-from __future__ import absolute_import
from collections import deque
+from functools import reduce
from itertools import chain
import sys
import weakref
if with_polymorphic == "*":
self.with_polymorphic = ("*", None)
elif isinstance(with_polymorphic, (tuple, list)):
- if isinstance(
- with_polymorphic[0], util.string_types + (tuple, list)
- ):
+ if isinstance(with_polymorphic[0], (str, tuple, list)):
self.with_polymorphic = with_polymorphic
else:
self.with_polymorphic = (with_polymorphic, None)
if self.polymorphic_on is not None:
setter = True
- if isinstance(self.polymorphic_on, util.string_types):
+ if isinstance(self.polymorphic_on, str):
# polymorphic_on specified as a string - link
# it to mapped ColumnProperty
try:
cols = set(table.c)
for m in self.iterate_to_root():
if m._inherits_equated_pairs and cols.intersection(
- util.reduce(
+ reduce(
set.union,
[l.proxy_set for l, r in m._inherits_equated_pairs],
)
"""
+from functools import reduce
from itertools import chain
import logging
@classmethod
def coerce(cls, raw):
- return util.reduce(lambda prev, next: prev[next], raw, cls.root)
+ return reduce(lambda prev, next: prev[next], raw, cls.root)
def token(self, token):
if token.endswith(":" + _WILDCARD_TOKEN):
raise exc.ArgumentError("invalid token: %s" % token)
def __add__(self, other):
- return util.reduce(lambda prev, next: prev[next], other.path, self)
+ return reduce(lambda prev, next: prev[next], other.path, self)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.path)
in unitofwork.py.
"""
-
from itertools import chain
from itertools import groupby
+from itertools import zip_longest
import operator
from . import attributes
last_inserted_params,
inserted_primary_key,
returned_defaults,
- ) in util.zip_longest(
+ ) in zip_longest(
records,
c.context.compiled_parameters,
c.inserted_primary_key_rows,
for k, v in kv_iterator:
k = coercions.expect(roles.DMLColumnRole, k)
- if isinstance(k, util.string_types):
+ if isinstance(k, str):
desc = _entity_namespace_key(mapper, k, default=NO_VALUE)
if desc is NO_VALUE:
values.append(
mapped attributes.
"""
-from __future__ import absolute_import
from . import attributes
from .descriptor_props import CompositeProperty
database to return iterable result sets.
"""
+import collections.abc as collections_abc
import itertools
import operator
import types
from ..sql.selectable import SelectBase
from ..sql.selectable import SelectStatementGrouping
from ..sql.visitors import InternalTraversal
-from ..util import collections_abc
__all__ = ["Query", "QueryContext", "aliased"]
and `secondaryjoin` aspects of :func:`_orm.relationship`.
"""
-from __future__ import absolute_import
-
import collections
import re
import weakref
mapperlib = util.preloaded.orm_mapper
- if isinstance(self.argument, util.string_types):
+ if isinstance(self.argument, str):
argument = self._clsregistry_resolve_name(self.argument)()
elif callable(self.argument) and not isinstance(
):
attr_value = getattr(self, attr)
- if isinstance(attr_value, util.string_types):
+ if isinstance(attr_value, str):
setattr(
self,
attr,
if self.parent.non_primary:
return
if self.backref is not None and not self.back_populates:
- if isinstance(self.backref, util.string_types):
+ if isinstance(self.backref, str):
backref_key, kwargs = self.backref, {}
else:
backref_key, kwargs = self.backref
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Provides the Session class and related utilities."""
-
+import contextlib
import itertools
import sys
import weakref
def __exit__(self, type_, value, traceback):
self.close()
- @util.contextmanager
+ @contextlib.contextmanager
def _maker_context_manager(self):
with self:
with self.begin():
return loading.get_from_identity(self, mapper, key, passive)
@property
- @util.contextmanager
+ @contextlib.contextmanager
def no_autoflush(self):
"""Return a context manager that disables autoflush.
"""sqlalchemy.orm.interfaces.LoaderStrategy
implementations, and related MapperOptions."""
-from __future__ import absolute_import
import collections
import itertools
"refer to a mapped entity" % (path.prop,)
)
- if isinstance(attr, util.string_types):
+ if isinstance(attr, str):
default_token = attr.endswith(_DEFAULT_TOKEN)
attr_str_name = attr
i = -1
for i, (c_token, p_token) in enumerate(zip(to_chop, path.path)):
- if isinstance(c_token, util.string_types):
+ if isinstance(c_token, str):
# TODO: this is approximated from the _UnboundLoad
# version and probably has issues, not fully covered.
cloned.strategy = self.strategy
if self.path:
attr = self.path[-1]
- if isinstance(attr, util.string_types) and attr.endswith(
- _DEFAULT_TOKEN
- ):
+ if isinstance(attr, str) and attr.endswith(_DEFAULT_TOKEN):
attr = attr.split(":")[0] + ":" + _WILDCARD_TOKEN
cloned._generate_path(
parent.path + self.path[0:-1], attr, self.strategy, None
def _generate_path(self, path, attr, for_strategy, wildcard_key):
if (
wildcard_key
- and isinstance(attr, util.string_types)
+ and isinstance(attr, str)
and attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN)
):
if attr == _DEFAULT_TOKEN:
opt = _UnboundLoad()
def _split_key(key):
- if isinstance(key, util.string_types):
+ if isinstance(key, str):
# coerce fooload('*') into "default loader strategy"
if key == _WILDCARD_TOKEN:
return (_DEFAULT_TOKEN,)
for i, (c_token, (p_entity, p_prop)) in enumerate(
zip(to_chop, path.pairs())
):
- if isinstance(c_token, util.string_types):
+ if isinstance(c_token, str):
if i == 0 and c_token.endswith(":" + _DEFAULT_TOKEN):
return to_chop
elif (
# what entity we are referring towards.
token = start_path[0]
- if isinstance(token, util.string_types):
+ if isinstance(token, str):
entity = self._find_entity_basestring(entities, token, raiseerr)
elif isinstance(token, PropComparator):
prop = token.property
)
def __new__(cls, value_list):
- if isinstance(value_list, util.string_types) or value_list is None:
+ if isinstance(value_list, str) or value_list is None:
return cls.from_string(value_list)
values = set(value_list)
if values.difference(cls._allowed_cascades):
# then the "_joined_from_info" concept can go
left_orm_info = getattr(left, "_joined_from_info", left_info)
self._joined_from_info = right_info
- if isinstance(onclause, util.string_types):
+ if isinstance(onclause, str):
onclause = getattr(left_orm_info.entity, onclause)
# ####
.. versionadded:: 1.2
"""
- if isinstance(prop, util.string_types):
+ if isinstance(prop, str):
util.warn_deprecated_20(
"Using strings to indicate relationship names in the ORM "
"with_parent() function is deprecated and will be removed "
"""
+import collections.abc as collections_abc
+from functools import reduce
import itertools
+from itertools import zip_longest
import operator
import re
from ..util import HasMemoized
from ..util import hybridmethod
-
coercions = None
elements = None
type_api = None
)
-class _DialectArgView(util.collections_abc.MutableMapping):
+class _DialectArgView(collections_abc.MutableMapping):
"""A dictionary view of dialect-level arguments in the form
<dialectname>_<argument_name>.
)
-class _DialectArgDict(util.collections_abc.MutableMapping):
+class _DialectArgDict(collections_abc.MutableMapping):
"""A dictionary view of dialect-level arguments for a specific
dialect.
return o1
-class Options(util.with_metaclass(_MetaOptions)):
+class Options(metaclass=_MetaOptions):
"""A cacheable option dictionary with defaults."""
def __init__(self, **kw):
def __eq__(self, other):
# TODO: very inefficient. This is used only in test suites
# right now.
- for a, b in util.zip_longest(self._cache_attrs, other._cache_attrs):
+ for a, b in zip_longest(self._cache_attrs, other._cache_attrs):
if getattr(self, a) != getattr(other, b):
return False
return True
try:
return self._index[key]
except KeyError as err:
- if isinstance(key, util.int_types):
+ if isinstance(key, int):
util.raise_(IndexError(key), replace_context=err)
else:
raise
def __contains__(self, key):
if key not in self._index:
- if not isinstance(key, util.string_types):
+ if not isinstance(key, str):
raise exc.ArgumentError(
"__contains__ requires a string argument"
)
"""Compare this :class:`_expression.ColumnCollection` to another
based on the names of the keys"""
- for l, r in util.zip_longest(self, other):
+ for l, r in zip_longest(self, other):
if l is not r:
return False
else:
def contains_column(self, col):
"""Checks if a column object exists in this collection"""
if col not in self._colset:
- if isinstance(col, util.string_types):
+ if isinstance(col, str):
raise exc.ArgumentError(
"contains_column cannot be used with string arguments. "
"Use ``col_name in table.c`` instead."
# columns that have no reference to the target
# column (also occurs with CompoundSelect)
- col_distance = util.reduce(
+ col_distance = reduce(
operator.add,
[
sc._annotations.get("weight", 1)
if sc.shares_lineage(column)
],
)
- c_distance = util.reduce(
+ c_distance = reduce(
operator.add,
[
sc._annotations.get("weight", 1)
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+import collections.abc as collections_abc
import numbers
import re
from .. import exc
from .. import inspection
from .. import util
-from ..util import collections_abc
elements = None
column = None
resolved = expect(role, expr)
- if isinstance(resolved, util.string_types):
+ if isinstance(resolved, str):
strname = resolved = expr
else:
cols = []
def _implicit_coercions(
self, original_element, resolved, argname=None, **kw
):
- if isinstance(original_element, util.string_types):
+ if isinstance(original_element, str):
return original_element
else:
self._raise_for_expected(original_element, argname, resolved)
__slots__ = ()
def _literal_coercion(self, element, argname=None, **kw):
- if isinstance(element, util.string_types) and issubclass(
+ if isinstance(element, str) and issubclass(
elements.TextClause, self._role_class
):
_no_text_coercion(element, argname)
return _no_text_coercion(element, argname)
def _literal_coercion(self, element, argname=None, **kw):
- if isinstance(element, util.string_types):
+ if isinstance(element, str):
if self._coerce_star and element == "*":
return elements.ColumnClause("*", is_literal=True)
else:
def _literal_coercion(self, element, expr, operator, **kw):
if isinstance(element, collections_abc.Iterable) and not isinstance(
- element, util.string_types
+ element, str
):
non_literal_expressions = {}
element = list(element)
def _implicit_coercions(
self, original_element, resolved, argname=None, **kw
):
- if isinstance(original_element, util.string_types):
+ if isinstance(original_element, str):
return resolved
else:
self._raise_for_expected(original_element, argname, resolved)
def _post_coercion(self, resolved, original_element, argname=None, **kw):
if resolved is not original_element and not isinstance(
- original_element, util.string_types
+ original_element, str
):
# use same method as Connection uses; this will later raise
# ObjectNotExecutableError
:doc:`/ext/compiler`.
"""
-
import collections
+import collections.abc as collections_abc
import contextlib
import itertools
import operator
import re
+from time import perf_counter
from . import base
from . import coercions
self.string = self.preparer._render_schema_translates(
self.string, schema_translate_map
)
- self._gen_time = util.perf_counter()
+ self._gen_time = perf_counter()
def _execute_on_connection(
self, connection, distilled_params, execution_options
return self.construct_params()
-class TypeCompiler(util.with_metaclass(util.EnsureKWArgType, object)):
+class TypeCompiler(metaclass=util.EnsureKWArgType):
"""Produces DDL specification for TypeEngine objects."""
ensure_kwarg = r"visit_\w+"
elif typ_dialect_impl._is_tuple_type or (
typ_dialect_impl._isnull
- and isinstance(values[0], util.collections_abc.Sequence)
- and not isinstance(
- values[0], util.string_types + util.binary_types
- )
+ and isinstance(values[0], collections_abc.Sequence)
+ and not isinstance(values[0], (str, bytes))
):
replacement_expression = (
elif typ_dialect_impl._is_tuple_type or (
typ_dialect_impl._isnull
- and isinstance(values[0], util.collections_abc.Sequence)
- and not isinstance(
- values[0], util.string_types + util.binary_types
- )
+ and isinstance(values[0], collections_abc.Sequence)
+ and not isinstance(values[0], (str, bytes))
):
assert not typ_dialect_impl._is_array
to_update = [
except exc.CompileError as ce:
util.raise_(
exc.CompileError(
- util.u("(in table '%s', column '%s'): %s")
+ "(in table '%s', column '%s'): %s"
% (table.description, column.name, ce.args[0])
),
from_=ce,
def get_column_default_string(self, column):
if isinstance(column.server_default, schema.DefaultClause):
- if isinstance(column.server_default.arg, util.string_types):
+ if isinstance(column.server_default.arg, str):
return self.sql_compiler.render_literal_value(
column.server_default.arg, sqltypes.STRINGTYPE
)
return (
lc_value in self.reserved_words
or value[0] in self.illegal_initial_characters
- or not self.legal_characters.match(util.text_type(value))
+ or not self.legal_characters.match(str(value))
or (lc_value != value)
)
def _requires_quotes_illegal_chars(self, value):
"""Return True if the given identifier requires quoting, but
not taking case convention into account."""
- return not self.legal_characters.match(util.text_type(value))
+ return not self.legal_characters.match(str(value))
def quote_schema(self, schema, force=None):
"""Conditionally quote a schema name.
cols = [
stmt.table.c[key]
for key in parameter_ordering
- if isinstance(key, util.string_types) and key in stmt.table.c
+ if isinstance(key, str) and key in stmt.table.c
] + [c for c in stmt.table.c if c.key not in ordered_keys]
else:
self.state = state
def _should_execute(self, target, bind, **kw):
- if isinstance(self.dialect, util.string_types):
+ if isinstance(self.dialect, str):
if self.dialect != bind.engine.name:
return False
elif isinstance(self.dialect, (tuple, list, set)):
"""
- if not isinstance(statement, util.string_types):
+ if not isinstance(statement, str):
raise exc.ArgumentError(
"Expected a string or unicode SQL statement, got '%r'"
% statement
:class:`_expression.Delete`.
"""
+import collections.abc as collections_abc
+
from sqlalchemy.types import NullType
from . import coercions
from . import roles
from .visitors import InternalTraversal
from .. import exc
from .. import util
-from ..util import collections_abc
class DMLState(CompileState):
"""
-from __future__ import unicode_literals
-
import itertools
import operator
import re
):
# allow compatibility with libraries that
# refer to BinaryExpression directly and pass strings
- if isinstance(operator, util.string_types):
+ if isinstance(operator, str):
operator = operators.custom_op(operator)
self._orig = (left.__hash__(), right.__hash__())
self._propagate_attrs = left._propagate_attrs or right._propagate_attrs
@util.memoized_property
def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode("ascii", "backslashreplace")
+ return self.name
@HasMemoized.memoized_attribute
def _tq_key_label(self):
__visit_name__ = "release_savepoint"
-class quoted_name(util.MemoizedSlots, util.text_type):
+class quoted_name(util.MemoizedSlots, str):
"""Represent a SQL identifier combined with quoting preferences.
:class:`.quoted_name` is a Python unicode/str subclass which
return self
def __reduce__(self):
- return quoted_name, (util.text_type(self), self.quote)
+ return quoted_name, (str(self), self.quote)
def _memoized_method_lower(self):
if self.quote:
return self
else:
- return util.text_type(self).lower()
+ return str(self).lower()
def _memoized_method_upper(self):
if self.quote:
return self
else:
- return util.text_type(self).upper()
+ return str(self).upper()
def _find_columns(clause):
return super(_truncated_label, cls).__new__(cls, value, quote)
def __reduce__(self):
- return self.__class__, (util.text_type(self), self.quote)
+ return self.__class__, (str(self), self.quote)
def apply_map(self, map_):
return self
def __add__(self, other):
if "%" in other and not isinstance(other, _anonymous_label):
- other = util.text_type(other).replace("%", "%%")
+ other = str(other).replace("%", "%%")
else:
- other = util.text_type(other)
+ other = str(other)
return _anonymous_label(
quoted_name(
- util.text_type.__add__(self, other),
+ str.__add__(self, other),
self.quote,
)
)
def __radd__(self, other):
if "%" in other and not isinstance(other, _anonymous_label):
- other = util.text_type(other).replace("%", "%%")
+ other = str(other).replace("%", "%%")
else:
- other = util.text_type(other)
+ other = str(other)
return _anonymous_label(
quoted_name(
- util.text_type.__add__(other, self),
+ str.__add__(other, self),
self.quote,
)
)
"""
reg = _registry[package]
- identifier = util.text_type(identifier).lower()
+ identifier = str(identifier).lower()
# Check if a function with the same identifier is registered.
if identifier in reg:
super(_GenericMeta, cls).__init__(clsname, bases, clsdict)
-class GenericFunction(util.with_metaclass(_GenericMeta, Function)):
+class GenericFunction(Function, metaclass=_GenericMeta):
"""Define a 'generic' function.
A generic function is a pre-established :class:`.Function`
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+import collections.abc as collections_abc
import itertools
import operator
import sys
from .. import exc
from .. import inspection
from .. import util
-from ..util import collections_abc
-from ..util import compat
_closure_per_cache_key = util.LRUCache(1000)
code += " return %s\n" % ", ".join("i%d" % i for i in argrange)
code += " return closure.__closure__"
vars_ = {"o%d" % i: cell_values[i] for i in argrange}
- compat.exec_(code, vars_, vars_)
+ exec(code, vars_, vars_)
closure = vars_["make_cells"]()
func = type(f)(
if escape is None:
escape = "/"
- if not isinstance(other, util.compat.string_types):
+ if not isinstance(other, str):
raise TypeError("String value expected when autoescape=True")
if escape not in ("%", "_"):
as components in SQL expressions.
"""
-from __future__ import absolute_import
-
import collections
import sqlalchemy
type_ = kwargs.pop("type_", None)
args = list(args)
if args:
- if isinstance(args[0], util.string_types):
+ if isinstance(args[0], str):
if name is not None:
raise exc.ArgumentError(
"May not pass name positionally and as a keyword."
)
if self.index:
- if isinstance(self.index, util.string_types):
+ if isinstance(self.index, str):
raise exc.ArgumentError(
"The 'index' keyword argument on Column is boolean only. "
"To create indexes with a specific name, create an "
)
elif self.unique:
- if isinstance(self.unique, util.string_types):
+ if isinstance(self.unique, str):
raise exc.ArgumentError(
"The 'unique' keyword argument on Column is boolean "
"only. To create unique constraints or indexes with a "
self._colspec = coercions.expect(roles.DDLReferredColumnRole, column)
- if isinstance(self._colspec, util.string_types):
+ if isinstance(self._colspec, str):
self._table_column = None
else:
self._table_column = self._colspec
"""
- if isinstance(self._colspec, util.string_types):
+ if isinstance(self._colspec, str):
parenttable, tablekey, colname = self._resolve_col_tokens()
table.foreign_keys.add(self)
# set up remote ".column" attribute, or a note to pick it
# up when the other Table/Column shows up
- if isinstance(self._colspec, util.string_types):
+ if isinstance(self._colspec, str):
parenttable, table_key, colname = self._resolve_col_tokens()
fk_key = (table_key, colname)
if table_key in parenttable.metadata.tables:
has_argument = True
def __init__(self, arg, for_update=False, _reflected=False):
- util.assert_arg_type(
- arg, (util.string_types[0], ClauseElement, TextClause), "arg"
- )
+ util.assert_arg_type(arg, (str, ClauseElement, TextClause), "arg")
super(DefaultClause, self).__init__(for_update)
self.arg = arg
self.reflected = _reflected
def _col_expressions(self, table):
return [
- table.c[col] if isinstance(col, util.string_types) else col
+ table.c[col] if isinstance(col, str) else col
for col in self._pending_colargs
]
return "MetaData()"
def __contains__(self, table_or_key):
- if not isinstance(table_or_key, util.string_types):
+ if not isinstance(table_or_key, str):
table_or_key = table_or_key.key
return table_or_key in self.tables
def _bind_to(self, bind):
"""Bind this MetaData to an Engine, Connection, string or URL."""
url = util.preloaded.engine_url
- if isinstance(bind, util.string_types + (url.URL,)):
+ if isinstance(bind, (str, url.URL)):
self._bind = sqlalchemy.create_engine(bind)
else:
self._bind = bind
def _bind_to(self, bind):
"""Bind to a Connectable in the caller's thread."""
url = util.preloaded.engine_url
- if isinstance(bind, util.string_types + (url.URL,)):
+ if isinstance(bind, (str, url.URL)):
try:
self.context._engine = self.__engines[bind]
except KeyError:
if isinstance(name, _anonymous_label):
name = "anon_1"
- if util.py3k:
- return name
- else:
- return name.encode("ascii", "backslashreplace")
+ return name
@property
def original(self):
@util.memoized_property
def description(self):
- if util.py3k:
- return self.name
- else:
- return self.name.encode("ascii", "backslashreplace")
+ return self.name
def append_column(self, c, **kw):
existing = c.table
isinstance(args[0], list)
or (
hasattr(args[0], "__iter__")
- and not isinstance(
- args[0], util.string_types + (ClauseElement,)
- )
+ and not isinstance(args[0], (str, ClauseElement))
and inspect(args[0], raiseerr=False) is None
and not hasattr(args[0], "__clause_element__")
)
"""
+import collections.abc as collections_abc
import datetime as dt
import decimal
import json
+import pickle
from . import coercions
from . import elements
from .. import inspection
from .. import processors
from .. import util
-from ..util import compat
from ..util import langhelpers
from ..util import OrderedDict
-from ..util import pickle
class _LookupExpressionAdapter:
@property
def python_type(self):
- return util.text_type
+ return str
def get_dbapi_type(self, dbapi):
return dbapi.STRING
@property
def python_type(self):
- return util.binary_type
+ return bytes
# Python 3 - sqlite3 doesn't need the `Binary` conversion
# here, though pg8000 does to indicate "bytea"
def coerce_compared_value(self, op, value):
"""See :meth:`.TypeEngine.coerce_compared_value` for a description."""
- if isinstance(value, util.string_types):
+ if isinstance(value, str):
return self
else:
return super(_Binary, self).coerce_compared_value(op, value)
# here between an INSERT statement and a criteria used in a SELECT,
# for now we're staying conservative w/ behavioral changes (perhaps
# someone has a trigger that handles strings on INSERT)
- if not self.validate_strings and isinstance(
- elem, compat.string_types
- ):
+ if not self.validate_strings and isinstance(elem, str):
return elem
else:
util.raise_(
:param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
- :param pickler: defaults to cPickle.pickle or pickle.pickle if
- cPickle is not available. May be any object with
+ :param pickler: defaults to pickle. May be any object with
pickle-compatible ``dumps`` and ``loads`` methods.
:param comparator: a 2-arg callable predicate used
def process(value):
if int_processor and isinstance(value, int):
value = int_processor(value)
- elif string_processor and isinstance(value, util.string_types):
+ elif string_processor and isinstance(value, str):
value = string_processor(value)
return value
def process(value):
if int_processor and isinstance(value, int):
value = int_processor(value)
- elif string_processor and isinstance(value, util.string_types):
+ elif string_processor and isinstance(value, str):
value = string_processor(value)
return value
"""Define comparison operations for :class:`_types.JSON`."""
def _setup_getitem(self, index):
- if not isinstance(index, util.string_types) and isinstance(
- index, compat.collections_abc.Sequence
+ if not isinstance(index, str) and isinstance(
+ index, collections_abc.Sequence
):
index = coercions.expect(
roles.BinaryElementRole,
dt.time: Time(),
dt.timedelta: Interval(),
util.NoneType: NULLTYPE,
+ bytes: LargeBinary(),
+ str: Unicode(),
}
-if util.py3k:
- _type_map[bytes] = LargeBinary() # noqa
- _type_map[str] = Unicode()
-else:
- _type_map[unicode] = Unicode() # noqa
- _type_map[str] = String()
-
_type_map_get = _type_map.get
from collections import deque
from collections import namedtuple
+import collections.abc as collections_abc
import itertools
+from itertools import zip_longest
import operator
from . import operators
from .visitors import InternalTraversal
from .. import util
from ..inspection import inspect
-from ..util import collections_abc
from ..util import HasMemoized
-from ..util import py37
SKIP_TRAVERSE = util.symbol("skip_traverse")
COMPARE_FAILED = False
s1 = s1[idx]
s2 = s2[idx]
- for idx, (e1, e2) in enumerate(util.zip_longest(s1, s2)):
+ for idx, (e1, e2) in enumerate(zip_longest(s1, s2)):
if idx < pickup_index:
continue
if e1 != e2:
)
def visit_dml_values(self, attrname, obj, parent, anon_map, bindparams):
- if py37:
- # in py37 we can assume two dictionaries created in the same
- # insert ordering will retain that sorting
- return (
- attrname,
- tuple(
- (
- k._gen_cache_key(anon_map, bindparams)
- if hasattr(k, "__clause_element__")
- else k,
- obj[k]._gen_cache_key(anon_map, bindparams),
- )
- for k in obj
- ),
- )
- else:
- expr_values = {k for k in obj if hasattr(k, "__clause_element__")}
- if expr_values:
- # expr values can't be sorted deterministically right now,
- # so no cache
- anon_map[NO_CACHE] = True
- return ()
-
- str_values = expr_values.symmetric_difference(obj)
-
- return (
- attrname,
- tuple(
- (k, obj[k]._gen_cache_key(anon_map, bindparams))
- for k in sorted(str_values)
- ),
- )
+ # in py37 we can assume two dictionaries created in the same
+ # insert ordering will retain that sorting
+ return (
+ attrname,
+ tuple(
+ (
+ k._gen_cache_key(anon_map, bindparams)
+ if hasattr(k, "__clause_element__")
+ else k,
+ obj[k]._gen_cache_key(anon_map, bindparams),
+ )
+ for k in obj
+ ),
+ )
def visit_dml_multi_values(
self, attrname, obj, parent, anon_map, bindparams
for (
(left_attrname, left_visit_sym),
(right_attrname, right_visit_sym),
- ) in util.zip_longest(
+ ) in zip_longest(
left._traverse_internals,
right._traverse_internals,
fillvalue=(None, None),
def visit_has_cache_key_list(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for l, r in util.zip_longest(left, right, fillvalue=None):
+ for l, r in zip_longest(left, right, fillvalue=None):
if l._gen_cache_key(self.anon_map[0], []) != r._gen_cache_key(
self.anon_map[1], []
):
def visit_fromclause_canonical_column_collection(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for lcol, rcol in util.zip_longest(left, right, fillvalue=None):
+ for lcol, rcol in zip_longest(left, right, fillvalue=None):
self.stack.append((lcol, rcol))
def visit_fromclause_derived_column_collection(
def visit_string_clauseelement_dict(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for lstr, rstr in util.zip_longest(
+ for lstr, rstr in zip_longest(
sorted(left), sorted(right), fillvalue=None
):
if lstr != rstr:
def visit_clauseelement_tuples(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for ltup, rtup in util.zip_longest(left, right, fillvalue=None):
+ for ltup, rtup in zip_longest(left, right, fillvalue=None):
if ltup is None or rtup is None:
return COMPARE_FAILED
- for l, r in util.zip_longest(ltup, rtup, fillvalue=None):
+ for l, r in zip_longest(ltup, rtup, fillvalue=None):
self.stack.append((l, r))
def visit_clauseelement_list(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for l, r in util.zip_longest(left, right, fillvalue=None):
+ for l, r in zip_longest(left, right, fillvalue=None):
self.stack.append((l, r))
def visit_clauseelement_tuple(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for l, r in util.zip_longest(left, right, fillvalue=None):
+ for l, r in zip_longest(left, right, fillvalue=None):
self.stack.append((l, r))
def _compare_unordered_sequences(self, seq1, seq2, **kw):
def visit_fromclause_ordered_set(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for l, r in util.zip_longest(left, right, fillvalue=None):
+ for l, r in zip_longest(left, right, fillvalue=None):
self.stack.append((l, r))
def visit_string(
def visit_prefix_sequence(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for (l_clause, l_str), (r_clause, r_str) in util.zip_longest(
+ for (l_clause, l_str), (r_clause, r_str) in zip_longest(
left, right, fillvalue=(None, None)
):
if l_str != r_str:
for (
(l_target, l_onclause, l_from, l_flags),
(r_target, r_onclause, r_from, r_flags),
- ) in util.zip_longest(left, right, fillvalue=(None, None, None, None)):
+ ) in zip_longest(left, right, fillvalue=(None, None, None, None)):
if l_flags != r_flags:
return COMPARE_FAILED
self.stack.append((l_target, r_target))
right_keys = sorted(
right, key=lambda elem: (elem[0].fullname, elem[1])
)
- for (ltable, ldialect), (rtable, rdialect) in util.zip_longest(
+ for (ltable, ldialect), (rtable, rdialect) in zip_longest(
left_keys, right_keys, fillvalue=(None, None)
):
if ldialect != rdialect:
):
# sequence of tuple pairs
- for (lk, lv), (rk, rv) in util.zip_longest(
+ for (lk, lv), (rk, rv) in zip_longest(
left, right, fillvalue=(None, None)
):
if not self._compare_dml_values_or_ce(lk, rk, **kw):
return COMPARE_FAILED
elif isinstance(right, collections_abc.Sequence):
return COMPARE_FAILED
- elif py37:
+ else:
# dictionaries guaranteed to support insert ordering in
# py37 so that we can compare the keys in order. without
# this, we can't compare SQL expression keys because we don't
return COMPARE_FAILED
if not self._compare_dml_values_or_ce(lv, rv, **kw):
return COMPARE_FAILED
- else:
- for lk in left:
- lv = left[lk]
-
- if lk not in right:
- return COMPARE_FAILED
- rv = right[lk]
-
- if not self._compare_dml_values_or_ce(lv, rv, **kw):
- return COMPARE_FAILED
def visit_dml_multi_values(
self, attrname, left_parent, left, right_parent, right, **kw
):
- for lseq, rseq in util.zip_longest(left, right, fillvalue=None):
+ for lseq, rseq in zip_longest(left, right, fillvalue=None):
if lseq is None or rseq is None:
return COMPARE_FAILED
- for ld, rd in util.zip_longest(lseq, rseq, fillvalue=None):
+ for ld, rd in zip_longest(lseq, rseq, fillvalue=None):
if (
self.visit_dml_values(
attrname, left_parent, ld, right_parent, rd, **kw
return NO_CACHE
-class UserDefinedType(
- util.with_metaclass(VisitableCheckKWArg, ExternalType, TypeEngine)
-):
+class UserDefinedType(ExternalType, TypeEngine, metaclass=VisitableCheckKWArg):
"""Base for user defined types.
This should be the base of new types. Note that
def _quote_ddl_expr(element):
- if isinstance(element, util.string_types):
+ if isinstance(element, str):
element = element.replace("'", "''")
return "'%s'" % element
else:
cls._original_compiler_dispatch = cls._compiler_dispatch
return
- if not isinstance(visit_name, util.compat.string_types):
+ if not isinstance(visit_name, str):
raise exc.InvalidRequestError(
"__visit_name__ on class %s must be a string at the class level"
% cls.__name__
super(TraversibleType, cls).__init__(clsname, bases, clsdict)
-class Traversible(util.with_metaclass(TraversibleType)):
+class Traversible(metaclass=TraversibleType):
"""Base class for visitable objects, applies the
:class:`.visitors.TraversibleType` metaclass.
return langhelpers._exec_code_in_env(meth_text, {}, method_name)
-class InternalTraversal(util.with_metaclass(_InternalTraversalType, object)):
+class InternalTraversal(metaclass=_InternalTraversalType):
r"""Defines visitor symbols used for internal traversal.
The :class:`.InternalTraversal` class is used in two ways. One is that
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
+from unittest import mock
from . import config
-from . import mock
from .assertions import assert_raises
from .assertions import assert_raises_context_ok
from .assertions import assert_raises_message
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
-from __future__ import absolute_import
-
import contextlib
+from itertools import filterfalse
import re
import sys
import warnings
from ..engine import default
from ..engine import url
from ..sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
-from ..util import compat
from ..util import decorator
"""
spec = db_spec(db)
- if isinstance(db, util.string_types) and not spec(config._current):
+ if isinstance(db, str) and not spec(config._current):
yield
else:
with expect_warnings(*messages, **kw):
messages,
regex=True,
assert_=True,
- py2konly=False,
raise_on_any_unexpected=False,
):
finally:
_SEEN = _FILTERS = _EXC_CLS = None
- if assert_ and (not py2konly or not compat.py3k):
+ if assert_:
assert not seen, "Warnings were not seen: %s" % ", ".join(
"%r" % (s.pattern if regex else s) for s in seen
)
"""
- if not util.py3k:
- return
-
if (
exception.__context__ is not exception.__cause__
and not exception.__suppress_context__
if msg is not None:
# I'm often pdbing here, and "err" above isn't
# in scope, so assign the string explicitly
- error_as_string = util.text_type(err)
+ error_as_string = str(err)
assert re.search(msg, error_as_string, re.UNICODE), "%r !~ %s" % (
msg,
error_as_string,
)
if check_context and not are_we_already_in_a_traceback:
_assert_proper_exception_context(err)
- print(util.text_type(err).encode("utf-8"))
+ print(str(err).encode("utf-8"))
# it's generally a good idea to not carry traceback objects outside
# of the except: block, but in this case especially we seem to have
dialect.supports_default_metavalue = supports_default_metavalue
elif dialect == "default_enhanced":
dialect = default.StrCompileDialect()
- elif isinstance(dialect, util.string_types):
+ elif isinstance(dialect, str):
dialect = url.URL.create(dialect).get_dialect()()
if default_schema_name:
c = CheckCompilerAccess(clause).compile(dialect=dialect, **kw)
param_str = repr(getattr(c, "params", {}))
- if util.py3k:
- param_str = param_str.encode("utf-8").decode("ascii", "ignore")
- print(
- ("\nSQL String:\n" + util.text_type(c) + param_str).encode(
- "utf-8"
- )
- )
- else:
- print(
- "\nSQL String:\n"
- + util.text_type(c).encode("utf-8")
- + param_str
- )
+ param_str = param_str.encode("utf-8").decode("ascii", "ignore")
+ print(("\nSQL String:\n" + str(c) + param_str).encode("utf-8"))
- cc = re.sub(r"[\n\t]", "", util.text_type(c))
+ cc = re.sub(r"[\n\t]", "", str(c))
eq_(cc, result, "%r != %r on dialect %r" % (cc, result, dialect))
found = util.IdentitySet(result)
expected = {immutabledict(e) for e in expected}
- for wrong in util.itertools_filterfalse(
- lambda o: isinstance(o, cls), found
- ):
+ for wrong in filterfalse(lambda o: isinstance(o, cls), found):
fail(
'Unexpected type "%s", expected "%s"'
% (type(wrong).__name__, cls.__name__)
import re
from .. import event
-from .. import util
from ..engine import url
from ..engine.default import DefaultDialect
from ..schema import _DDLCompiles
for_executemany=context.compiled.for_executemany,
schema_translate_map=map_,
)
- _received_statement = re.sub(r"[\n\t]", "", util.text_type(compiled))
+ _received_statement = re.sub(r"[\n\t]", "", str(compiled))
parameters = execute_observed.parameters
if not parameters:
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
-from __future__ import absolute_import
-
import collections
import re
import warnings
import sqlalchemy as sa
from .. import exc as sa_exc
-from ..util import compat
_repr_stack = set()
except (AttributeError, sa_exc.UnboundExecutionError):
return False
- if hasattr(value, "__iter__") and not isinstance(
- value, compat.string_types
- ):
+ if hasattr(value, "__iter__") and not isinstance(value, str):
if hasattr(value, "__getitem__") and not hasattr(
value, "keys"
):
)
elif isinstance(predicate, tuple):
return SpecPredicate(*predicate)
- elif isinstance(predicate, util.string_types):
+ elif isinstance(predicate, str):
tokens = re.match(
r"([\+\w]+)\s*(?:(>=|==|!=|<=|<|>)\s*([\d\.]+))?", predicate
)
try:
conn.execute(table.delete())
except sa.exc.DBAPIError as ex:
- util.print_(
+ print(
("Error emptying table %s: %r" % (table, ex)),
file=sys.stderr,
)
for table, data in cls.fixtures().items():
if len(data) < 2:
continue
- if isinstance(table, util.string_types):
+ if isinstance(table, str):
table = cls.tables[table]
headers[table] = data[0]
rows[table] = data[1:]
cls_registry[classname] = cls
type.__init__(cls, classname, bases, dict_)
- class _Base(util.with_metaclass(FindFixture, object)):
+ class _Base(metaclass=FindFixture):
pass
class Basic(BasicEntity, _Base):
+++ /dev/null
-# testing/mock.py
-# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
-# <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: https://www.opensource.org/licenses/mit-license.php
-
-"""Import stub for mock library.
-"""
-from __future__ import absolute_import
-
-from ..util import py3k
-
-
-if py3k:
- from unittest.mock import MagicMock
- from unittest.mock import Mock
- from unittest.mock import call
- from unittest.mock import patch
- from unittest.mock import ANY
-else:
- try:
- from mock import MagicMock # noqa
- from mock import Mock # noqa
- from mock import call # noqa
- from mock import patch # noqa
- from mock import ANY # noqa
- except ImportError:
- raise ImportError(
- "SQLAlchemy's test suite requires the "
- "'mock' library as of 0.8.2."
- )
is pytest.
"""
-
-from __future__ import absolute_import
-
import abc
+import configparser
import logging
import re
import sys
+from sqlalchemy.testing import asyncio
+
# flag which indicates we are in the SQLAlchemy testing suite,
# and not that of Alembic or a third party dialect.
bootstrapped_as_sqlalchemy = False
log = logging.getLogger("sqlalchemy.testing.plugin_base")
-
-py3k = sys.version_info >= (3, 0)
-
-if py3k:
- import configparser
-
- ABC = abc.ABC
-else:
- import ConfigParser as configparser
- import collections as collections_abc # noqa
-
- class ABC:
- __metaclass__ = abc.ABCMeta
-
-
# late imports
fixtures = None
engines = None
@post
def _set_disable_asyncio(opt, file_config):
- if opt.disable_asyncio or not py3k:
- from sqlalchemy.testing import asyncio
+ if opt.disable_asyncio:
asyncio.ENABLE_ASYNCIO = False
config._current.push(config_obj, testing)
-class FixtureFunctions(ABC):
+class FixtureFunctions(abc.ABC):
@abc.abstractmethod
def skip_test_exception(self, *arg, **kw):
raise NotImplementedError()
def pytest_runtest_setup(item):
from sqlalchemy.testing import asyncio
- from sqlalchemy.util import string_types
if not isinstance(item, pytest.Function):
return
"__Original test failure__:\n"
+ _current_report.longreprtext,
)
- elif e.args[-1] and isinstance(e.args[-1], string_types):
+ elif e.args[-1] and isinstance(e.args[-1], str):
args = list(e.args)
args[-1] += (
"\n__Original test failure__:\n"
# ended = time.time()
pr.disable()
- # s = compat.StringIO()
+ # s = StringIO()
stats = pstats.Stats(pr, stream=sys.stdout)
# timespent = ended - began
from ..engine import url as sa_url
from ..sql import ddl
from ..sql import schema
-from ..util import compat
log = logging.getLogger(__name__)
return decorate
def __call__(self, cfg, *arg):
- if isinstance(cfg, compat.string_types):
+ if isinstance(cfg, str):
url = sa_url.make_url(cfg)
elif isinstance(cfg, sa_url.URL):
url = cfg
"""
import platform
-import sys
from . import exclusions
from . import only_on
@property
def threading_with_mock(self):
"""Mark tests that use threading and mock at the same time - stability
- issues have been observed with coverage + python 3.3
+ issues have been observed with coverage
"""
return exclusions.skip_if(
- lambda config: util.py3k and config.options.has_coverage,
- "Stability issues with coverage + py3k",
+ lambda config: config.options.has_coverage,
+ "Stability issues with coverage",
)
@property
return exclusions.only_if(check_lib, "patch library needed")
- @property
- def non_broken_pickle(self):
- from sqlalchemy.util import pickle
-
- return exclusions.only_if(
- lambda: util.cpython
- and pickle.__name__ == "cPickle"
- or sys.version_info >= (3, 2),
- "Needs cPickle+cPython or newer Python 3 pickle",
- )
-
@property
def predictable_gc(self):
"""target platform must remove all cycles unconditionally when
sequence. This should be false only for oracle.
"""
return exclusions.open()
-
- @property
- def generic_classes(self):
- "If X[Y] can be implemented with ``__class_getitem__``. py3.7+"
- return exclusions.only_if(lambda: util.py37)
from ... import String
from ... import testing
from ... import text
-from ... import util
class RowFetchTest(fixtures.TablesTest):
):
engine = self._fixture(engine_ss_arg)
with engine.begin() as conn:
- if isinstance(statement, util.string_types):
+ if isinstance(statement, str):
result = conn.exec_driver_sql(statement)
else:
result = conn.execute(statement)
+import collections.abc as collections_abc
import itertools
from .. import AssertsCompiledSQL
from ... import tuple_
from ... import TupleType
from ... import union
-from ... import util
from ... import values
from ...exc import DatabaseError
from ...exc import ProgrammingError
-from ...util import collections_abc
class CollateTest(fixtures.TablesTest):
ly = (func.lower(table.c.q) + table.c.p).label("ly")
self._assert_result(
select(lx, ly).order_by(lx, ly.desc()),
- [(3, util.u("q1p3")), (5, util.u("q2p2")), (7, util.u("q3p1"))],
+ [(3, "q1p3"), (5, "q2p2"), (7, "q3p1")],
)
def test_plain_desc(self):
from ... import TypeDecorator
from ... import Unicode
from ... import UnicodeText
-from ... import util
from ...orm import declarative_base
from ...orm import Session
-from ...util import u
class _LiteralRoundTripFixture:
class _UnicodeFixture(_LiteralRoundTripFixture, fixtures.TestBase):
__requires__ = ("unicode_data",)
- data = u(
+ data = (
"Alors vous imaginez ma 🐍 surprise, au lever du jour, "
"quand une drôle de petite 🐍 voix m’a réveillé. Elle "
"disait: « S’il vous plaît… dessine-moi 🐍 un mouton! »"
row = connection.execute(select(unicode_table.c.unicode_data)).first()
eq_(row, (self.data,))
- assert isinstance(row[0], util.text_type)
+ assert isinstance(row[0], str)
def test_round_trip_executemany(self, connection):
unicode_table = self.tables.unicode_table
).fetchall()
eq_(rows, [(self.data,) for i in range(1, 4)])
for row in rows:
- assert isinstance(row[0], util.text_type)
+ assert isinstance(row[0], str)
def _test_null_strings(self, connection):
unicode_table = self.tables.unicode_table
unicode_table = self.tables.unicode_table
connection.execute(
- unicode_table.insert(), {"id": 1, "unicode_data": u("")}
+ unicode_table.insert(), {"id": 1, "unicode_data": ""}
)
row = connection.execute(select(unicode_table.c.unicode_data)).first()
- eq_(row, (u(""),))
+ eq_(row, ("",))
def test_literal(self, literal_round_trip):
literal_round_trip(self.datatype, [self.data], [self.data])
def test_literal_non_ascii(self, literal_round_trip):
- literal_round_trip(
- self.datatype, [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
- )
+ literal_round_trip(self.datatype, ["réve🐍 illé"], ["réve🐍 illé"])
class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest):
literal_round_trip(Text, ["some text"], ["some text"])
def test_literal_non_ascii(self, literal_round_trip):
- literal_round_trip(
- Text, [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
- )
+ literal_round_trip(Text, ["réve🐍 illé"], ["réve🐍 illé"])
def test_literal_quoting(self, literal_round_trip):
data = """some 'text' hey "hi there" that's text"""
literal_round_trip(String(40), ["some text"], ["some text"])
def test_literal_non_ascii(self, literal_round_trip):
- literal_round_trip(
- String(40), [util.u("réve🐍 illé")], [util.u("réve🐍 illé")]
- )
+ literal_round_trip(String(40), ["réve🐍 illé"], ["réve🐍 illé"])
def test_literal_quoting(self, literal_round_trip):
data = """some 'text' hey "hi there" that's text"""
eq_(row, (data,))
- if util.py3k:
- assert isinstance(row[0], int)
- else:
- assert isinstance(row[0], (long, int)) # noqa
+ assert isinstance(row[0], int)
return run
("boolean", None),
("string", "some string"),
("string", None),
- ("string", util.u("réve illé")),
+ ("string", "réve illé"),
(
"string",
- util.u("réve🐍 illé"),
+ "réve🐍 illé",
testing.requires.json_index_supplementary_unicode_element,
),
("integer", 15),
(-1.0,),
(15.052,),
("a string",),
- (util.u("réve illé"),),
- (util.u("réve🐍 illé"),),
+ ("réve illé",),
+ ("réve🐍 illé",),
)
def test_single_element_round_trip(self, element):
data_table = self.tables.data_table
{
"name": "r1",
"data": {
- util.u("réve🐍 illé"): util.u("réve🐍 illé"),
- "data": {"k1": util.u("drôl🐍e")},
+ "réve🐍 illé": "réve🐍 illé",
+ "data": {"k1": "drôl🐍e"},
},
},
)
eq_(
conn.scalar(select(self.tables.data_table.c.data)),
{
- util.u("réve🐍 illé"): util.u("réve🐍 illé"),
- "data": {"k1": util.u("drôl🐍e")},
+ "réve🐍 illé": "réve🐍 illé",
+ "data": {"k1": "drôl🐍e"},
},
)
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
-from sqlalchemy.util import u
-from sqlalchemy.util import ue
class UnicodeSchemaTest(fixtures.TablesTest):
global t1, t2, t3
t1 = Table(
- u("unitable1"),
+ "unitable1",
metadata,
- Column(u("méil"), Integer, primary_key=True),
- Column(ue("\u6e2c\u8a66"), Integer),
+ Column("méil", Integer, primary_key=True),
+ Column("\u6e2c\u8a66", Integer),
test_needs_fk=True,
)
t2 = Table(
- u("Unitéble2"),
+ "Unitéble2",
metadata,
- Column(u("méil"), Integer, primary_key=True, key="a"),
+ Column("méil", Integer, primary_key=True, key="a"),
Column(
- ue("\u6e2c\u8a66"),
+ "\u6e2c\u8a66",
Integer,
- ForeignKey(u("unitable1.méil")),
+ ForeignKey("unitable1.méil"),
key="b",
),
test_needs_fk=True,
# Few DBs support Unicode foreign keys
if testing.against("sqlite"):
t3 = Table(
- ue("\u6e2c\u8a66"),
+ "\u6e2c\u8a66",
metadata,
Column(
- ue("\u6e2c\u8a66_id"),
+ "\u6e2c\u8a66_id",
Integer,
primary_key=True,
autoincrement=False,
),
Column(
- ue("unitable1_\u6e2c\u8a66"),
+ "unitable1_\u6e2c\u8a66",
Integer,
- ForeignKey(ue("unitable1.\u6e2c\u8a66")),
+ ForeignKey("unitable1.\u6e2c\u8a66"),
),
+ Column("Unitéble2_b", Integer, ForeignKey("Unitéble2.b")),
Column(
- u("Unitéble2_b"), Integer, ForeignKey(u("Unitéble2.b"))
- ),
- Column(
- ue("\u6e2c\u8a66_self"),
+ "\u6e2c\u8a66_self",
Integer,
- ForeignKey(ue("\u6e2c\u8a66.\u6e2c\u8a66_id")),
+ ForeignKey("\u6e2c\u8a66.\u6e2c\u8a66_id"),
),
test_needs_fk=True,
)
else:
t3 = Table(
- ue("\u6e2c\u8a66"),
+ "\u6e2c\u8a66",
metadata,
Column(
- ue("\u6e2c\u8a66_id"),
+ "\u6e2c\u8a66_id",
Integer,
primary_key=True,
autoincrement=False,
),
- Column(ue("unitable1_\u6e2c\u8a66"), Integer),
- Column(u("Unitéble2_b"), Integer),
- Column(ue("\u6e2c\u8a66_self"), Integer),
+ Column("unitable1_\u6e2c\u8a66", Integer),
+ Column("Unitéble2_b", Integer),
+ Column("\u6e2c\u8a66_self", Integer),
test_needs_fk=True,
)
def test_insert(self, connection):
- connection.execute(t1.insert(), {u("méil"): 1, ue("\u6e2c\u8a66"): 5})
- connection.execute(t2.insert(), {u("a"): 1, u("b"): 1})
+ connection.execute(t1.insert(), {"méil": 1, "\u6e2c\u8a66": 5})
+ connection.execute(t2.insert(), {"a": 1, "b": 1})
connection.execute(
t3.insert(),
{
- ue("\u6e2c\u8a66_id"): 1,
- ue("unitable1_\u6e2c\u8a66"): 5,
- u("Unitéble2_b"): 1,
- ue("\u6e2c\u8a66_self"): 1,
+ "\u6e2c\u8a66_id": 1,
+ "unitable1_\u6e2c\u8a66": 5,
+ "Unitéble2_b": 1,
+ "\u6e2c\u8a66_self": 1,
},
)
eq_(connection.execute(t3.select()).fetchall(), [(1, 5, 1, 1)])
def test_col_targeting(self, connection):
- connection.execute(t1.insert(), {u("méil"): 1, ue("\u6e2c\u8a66"): 5})
- connection.execute(t2.insert(), {u("a"): 1, u("b"): 1})
+ connection.execute(t1.insert(), {"méil": 1, "\u6e2c\u8a66": 5})
+ connection.execute(t2.insert(), {"a": 1, "b": 1})
connection.execute(
t3.insert(),
{
- ue("\u6e2c\u8a66_id"): 1,
- ue("unitable1_\u6e2c\u8a66"): 5,
- u("Unitéble2_b"): 1,
- ue("\u6e2c\u8a66_self"): 1,
+ "\u6e2c\u8a66_id": 1,
+ "unitable1_\u6e2c\u8a66": 5,
+ "Unitéble2_b": 1,
+ "\u6e2c\u8a66_self": 1,
},
)
row = connection.execute(t1.select()).first()
- eq_(row._mapping[t1.c[u("méil")]], 1)
- eq_(row._mapping[t1.c[ue("\u6e2c\u8a66")]], 5)
+ eq_(row._mapping[t1.c["méil"]], 1)
+ eq_(row._mapping[t1.c["\u6e2c\u8a66"]], 5)
row = connection.execute(t2.select()).first()
- eq_(row._mapping[t2.c[u("a")]], 1)
- eq_(row._mapping[t2.c[u("b")]], 1)
+ eq_(row._mapping[t2.c["a"]], 1)
+ eq_(row._mapping[t2.c["b"]], 1)
row = connection.execute(t3.select()).first()
- eq_(row._mapping[t3.c[ue("\u6e2c\u8a66_id")]], 1)
- eq_(row._mapping[t3.c[ue("unitable1_\u6e2c\u8a66")]], 5)
- eq_(row._mapping[t3.c[u("Unitéble2_b")]], 1)
- eq_(row._mapping[t3.c[ue("\u6e2c\u8a66_self")]], 1)
+ eq_(row._mapping[t3.c["\u6e2c\u8a66_id"]], 1)
+ eq_(row._mapping[t3.c["unitable1_\u6e2c\u8a66"]], 5)
+ eq_(row._mapping[t3.c["Unitéble2_b"]], 1)
+ eq_(row._mapping[t3.c["\u6e2c\u8a66_self"]], 1)
def test_reflect(self, connection):
- connection.execute(t1.insert(), {u("méil"): 2, ue("\u6e2c\u8a66"): 7})
- connection.execute(t2.insert(), {u("a"): 2, u("b"): 2})
+ connection.execute(t1.insert(), {"méil": 2, "\u6e2c\u8a66": 7})
+ connection.execute(t2.insert(), {"a": 2, "b": 2})
connection.execute(
t3.insert(),
{
- ue("\u6e2c\u8a66_id"): 2,
- ue("unitable1_\u6e2c\u8a66"): 7,
- u("Unitéble2_b"): 2,
- ue("\u6e2c\u8a66_self"): 2,
+ "\u6e2c\u8a66_id": 2,
+ "unitable1_\u6e2c\u8a66": 7,
+ "Unitéble2_b": 2,
+ "\u6e2c\u8a66_self": 2,
},
)
tt2 = Table(t2.name, meta, autoload_with=connection)
tt3 = Table(t3.name, meta, autoload_with=connection)
- connection.execute(tt1.insert(), {u("méil"): 1, ue("\u6e2c\u8a66"): 5})
- connection.execute(tt2.insert(), {u("méil"): 1, ue("\u6e2c\u8a66"): 1})
+ connection.execute(tt1.insert(), {"méil": 1, "\u6e2c\u8a66": 5})
+ connection.execute(tt2.insert(), {"méil": 1, "\u6e2c\u8a66": 1})
connection.execute(
tt3.insert(),
{
- ue("\u6e2c\u8a66_id"): 1,
- ue("unitable1_\u6e2c\u8a66"): 5,
- u("Unitéble2_b"): 1,
- ue("\u6e2c\u8a66_self"): 1,
+ "\u6e2c\u8a66_id": 1,
+ "unitable1_\u6e2c\u8a66": 5,
+ "Unitéble2_b": 1,
+ "\u6e2c\u8a66_self": 1,
},
)
eq_(
- connection.execute(
- tt1.select().order_by(desc(u("méil")))
- ).fetchall(),
+ connection.execute(tt1.select().order_by(desc("méil"))).fetchall(),
[(2, 7), (1, 5)],
)
eq_(
- connection.execute(
- tt2.select().order_by(desc(u("méil")))
- ).fetchall(),
+ connection.execute(tt2.select().order_by(desc("méil"))).fetchall(),
[(2, 2), (1, 1)],
)
eq_(
connection.execute(
- tt3.select().order_by(desc(ue("\u6e2c\u8a66_id")))
+ tt3.select().order_by(desc("\u6e2c\u8a66_id"))
).fetchall(),
[(2, 7, 2, 2), (1, 5, 1, 1)],
)
def test_repr(self):
meta = MetaData()
- t = Table(
- ue("\u6e2c\u8a66"), meta, Column(ue("\u6e2c\u8a66_id"), Integer)
- )
+ t = Table("\u6e2c\u8a66", meta, Column("\u6e2c\u8a66_id", Integer))
eq_(
repr(t),
(
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
-
-from __future__ import absolute_import
-
import warnings
from . import assertions
from collections import defaultdict
-from contextlib import contextmanager
from functools import partial
from functools import update_wrapper
from ._collections import coerce_generator_arg
from ._collections import coerce_to_immutabledict
-from ._collections import collections_abc
from ._collections import column_dict
from ._collections import column_set
from ._collections import EMPTY_DICT
from ._collections import WeakSequence
from ._preloaded import preload_module
from ._preloaded import preloaded
-from .compat import ABC
from .compat import arm
from .compat import b
from .compat import b64decode
from .compat import b64encode
-from .compat import binary_type
-from .compat import binary_types
-from .compat import byte_buffer
-from .compat import callable
from .compat import cmp
from .compat import cpython
from .compat import dataclass_fields
from .compat import dottedgetter
from .compat import has_refcount_gc
from .compat import inspect_getfullargspec
-from .compat import int_types
-from .compat import iterbytes
-from .compat import itertools_filter
-from .compat import itertools_filterfalse
from .compat import local_dataclass_fields
from .compat import namedtuple
from .compat import next
-from .compat import nullcontext
from .compat import osx
-from .compat import parse_qsl
-from .compat import perf_counter
-from .compat import pickle
-from .compat import print_
-from .compat import py37
from .compat import py38
from .compat import py39
-from .compat import py3k
from .compat import pypy
-from .compat import quote_plus
from .compat import raise_
from .compat import raise_from_cause
-from .compat import reduce
from .compat import reraise
-from .compat import string_types
-from .compat import StringIO
-from .compat import text_type
from .compat import threading
-from .compat import timezone
-from .compat import TYPE_CHECKING
-from .compat import u
-from .compat import ue
-from .compat import unquote
-from .compat import unquote_plus
from .compat import win32
-from .compat import with_metaclass
-from .compat import zip_longest
from .concurrency import asyncio
from .concurrency import await_fallback
from .concurrency import await_only
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Collection classes and helpers."""
-
-from __future__ import absolute_import
-
+import collections.abc as collections_abc
+from itertools import filterfalse
import operator
import types
import weakref
-from .compat import binary_types
-from .compat import collections_abc
-from .compat import itertools_filterfalse
-from .compat import py37
-from .compat import string_types
from .compat import threading
-
EMPTY_SET = frozenset()
d.update(items)
-if py37:
- OrderedDict = dict
- sort_dictionary = _ordered_dictionary_sort
-
-else:
- # prevent sort_dictionary from being used against a plain dictionary
- # for Python < 3.7
-
- def sort_dictionary(d, key=None):
- """Sort an OrderedDict in place."""
-
- d._ordered_dictionary_sort(key=key)
-
- class OrderedDict(dict):
- """Dictionary that maintains insertion order.
-
- Superseded by Python dict as of Python 3.7
-
- """
-
- __slots__ = ("_list",)
-
- def _ordered_dictionary_sort(self, key=None):
- _ordered_dictionary_sort(self, key=key)
-
- def __reduce__(self):
- return OrderedDict, (self.items(),)
-
- def __init__(self, ____sequence=None, **kwargs):
- self._list = []
- if ____sequence is None:
- if kwargs:
- self.update(**kwargs)
- else:
- self.update(____sequence, **kwargs)
-
- def clear(self):
- self._list = []
- dict.clear(self)
-
- def copy(self):
- return self.__copy__()
-
- def __copy__(self):
- return OrderedDict(self)
-
- def update(self, ____sequence=None, **kwargs):
- if ____sequence is not None:
- if hasattr(____sequence, "keys"):
- for key in ____sequence.keys():
- self.__setitem__(key, ____sequence[key])
- else:
- for key, value in ____sequence:
- self[key] = value
- if kwargs:
- self.update(kwargs)
-
- def setdefault(self, key, value):
- if key not in self:
- self.__setitem__(key, value)
- return value
- else:
- return self.__getitem__(key)
-
- def __iter__(self):
- return iter(self._list)
-
- def keys(self):
- return list(self)
-
- def values(self):
- return [self[key] for key in self._list]
-
- def items(self):
- return [(key, self[key]) for key in self._list]
-
- def __setitem__(self, key, obj):
- if key not in self:
- try:
- self._list.append(key)
- except AttributeError:
- # work around Python pickle loads() with
- # dict subclass (seems to ignore __setstate__?)
- self._list = [key]
- dict.__setitem__(self, key, obj)
-
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- self._list.remove(key)
-
- def pop(self, key, *default):
- present = key in self
- value = dict.pop(self, key, *default)
- if present:
- self._list.remove(key)
- return value
-
- def popitem(self):
- item = dict.popitem(self)
- self._list.remove(item[0])
- return item
+OrderedDict = dict
+sort_dictionary = _ordered_dictionary_sort
class OrderedSet(set):
if len(self) > len(other):
return False
- for m in itertools_filterfalse(
+ for m in filterfalse(
other._members.__contains__, iter(self._members.keys())
):
return False
if len(self) < len(other):
return False
- for m in itertools_filterfalse(
+ for m in filterfalse(
self._members.__contains__, iter(other._members.keys())
):
return False
if x is None:
return default
if not isinstance(x, collections_abc.Iterable) or isinstance(
- x, string_types + binary_types
+ x, (str, bytes)
):
return [x]
elif isinstance(x, list):
+++ /dev/null
-# util/_compat_py3k.py
-# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
-# <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: https://www.opensource.org/licenses/mit-license.php
-
-from functools import wraps
-
-# vendored from py3.7
-
-
-class _AsyncGeneratorContextManager:
- """Helper for @asynccontextmanager."""
-
- def __init__(self, func, args, kwds):
- self.gen = func(*args, **kwds)
- self.func, self.args, self.kwds = func, args, kwds
- doc = getattr(func, "__doc__", None)
- if doc is None:
- doc = type(self).__doc__
- self.__doc__ = doc
-
- async def __aenter__(self):
- try:
- return await self.gen.__anext__()
- except StopAsyncIteration:
- raise RuntimeError("generator didn't yield") from None
-
- async def __aexit__(self, typ, value, traceback):
- if typ is None:
- try:
- await self.gen.__anext__()
- except StopAsyncIteration:
- return
- else:
- raise RuntimeError("generator didn't stop")
- else:
- if value is None:
- value = typ()
- # See _GeneratorContextManager.__exit__ for comments on subtleties
- # in this implementation
- try:
- await self.gen.athrow(typ, value, traceback)
- raise RuntimeError("generator didn't stop after athrow()")
- except StopAsyncIteration as exc:
- return exc is not value
- except RuntimeError as exc:
- if exc is value:
- return False
- if isinstance(value, (StopIteration, StopAsyncIteration)):
- if exc.__cause__ is value:
- return False
- raise
- except BaseException as exc:
- if exc is not value:
- raise
-
-
-# using the vendored version in all cases at the moment to establish
-# full test coverage
-def asynccontextmanager(func):
- @wraps(func)
- def helper(*args, **kwds):
- return _AsyncGeneratorContextManager(func, args, kwds)
-
- return helper
# the MIT License: https://www.opensource.org/licenses/mit-license.php
import asyncio
+from contextvars import copy_context as _copy_context
import sys
from typing import Any
from typing import Callable
import greenlet
-from . import compat
from .langhelpers import memoized_property
from .. import exc
-if compat.py37:
- try:
- from contextvars import copy_context as _copy_context
+try:
- # If greenlet.gr_context is present in current version of greenlet,
- # it will be set with a copy of the current context on creation.
- # Refs: https://github.com/python-greenlet/greenlet/pull/198
- getattr(greenlet.greenlet, "gr_context")
- except (ImportError, AttributeError):
- _copy_context = None
-else:
- _copy_context = None
+ # If greenlet.gr_context is present in current version of greenlet,
+ # it will be set with a copy of the current context on creation.
+ # Refs: https://github.com/python-greenlet/greenlet/pull/198
+ getattr(greenlet.greenlet, "gr_context")
+except (ImportError, AttributeError):
+ _copy_context = None # noqa
def is_exit_exception(e):
Python 3.10 deprecates get_event_loop() as a standalone.
"""
- if compat.py37:
- try:
- return asyncio.get_running_loop()
- except RuntimeError:
- return asyncio.get_event_loop_policy().get_event_loop()
- else:
- return asyncio.get_event_loop()
+ try:
+ return asyncio.get_running_loop()
+ except RuntimeError:
+ return asyncio.get_event_loop_policy().get_event_loop()
import sys
-from . import compat
-
class _ModuleRegistry:
"""Registry of modules to load in a package init file.
if (
not path or module.startswith(path)
) and key not in self.__dict__:
- compat.import_(module, globals(), locals())
+ __import__(module, globals(), locals())
self.__dict__[key] = sys.modules[module]
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Handle Python version/platform incompatibilities."""
-
+import base64
import collections
-import contextlib
+import dataclasses
import inspect
import operator
import platform
py39 = sys.version_info >= (3, 9)
py38 = sys.version_info >= (3, 8)
-py37 = sys.version_info >= (3, 7)
-py3k = sys.version_info >= (3, 0)
pypy = platform.python_implementation() == "PyPy"
-
-
cpython = platform.python_implementation() == "CPython"
+
win32 = sys.platform.startswith("win")
osx = sys.platform.startswith("darwin")
arm = "aarch" in platform.machine().lower()
has_refcount_gc = bool(cpython)
-contextmanager = contextlib.contextmanager
dottedgetter = operator.attrgetter
namedtuple = collections.namedtuple
next = next # noqa
)
-class nullcontext:
- """Context manager that does no additional processing.
-
- Vendored from Python 3.7.
-
- """
-
- def __init__(self, enter_result=None):
- self.enter_result = enter_result
-
- def __enter__(self):
- return self.enter_result
-
- def __exit__(self, *excinfo):
- pass
-
-
try:
import threading
except ImportError:
nargs = co.co_argcount
names = co.co_varnames
- nkwargs = co.co_kwonlyargcount if py3k else 0
+ nkwargs = co.co_kwonlyargcount
args = list(names[:nargs])
kwonlyargs = list(names[nargs : nargs + nkwargs])
varkw,
func.__defaults__,
kwonlyargs,
- func.__kwdefaults__ if py3k else None,
- func.__annotations__ if py3k else {},
+ func.__kwdefaults__,
+ func.__annotations__,
)
return ep.get(group, ())
-if py3k:
- import base64
- import builtins
- import configparser
- import itertools
- import pickle
-
- from functools import reduce
- from io import BytesIO as byte_buffer
- from io import StringIO
- from itertools import zip_longest
- from time import perf_counter
- from urllib.parse import (
- quote_plus,
- unquote_plus,
- parse_qsl,
- quote,
- unquote,
- )
-
- string_types = (str,)
- binary_types = (bytes,)
- binary_type = bytes
- text_type = str
- int_types = (int,)
- iterbytes = iter
- long_type = int
-
- itertools_filterfalse = itertools.filterfalse
- itertools_filter = filter
- itertools_imap = map
-
- exec_ = getattr(builtins, "exec")
- import_ = getattr(builtins, "__import__")
- print_ = getattr(builtins, "print")
-
- def b(s):
- return s.encode("latin-1")
+def b(s):
+ return s.encode("latin-1")
- def b64decode(x):
- return base64.b64decode(x.encode("ascii"))
- def b64encode(x):
- return base64.b64encode(x).decode("ascii")
+def b64decode(x):
+ return base64.b64decode(x.encode("ascii"))
- def decode_backslashreplace(text, encoding):
- return text.decode(encoding, errors="backslashreplace")
- def cmp(a, b):
- return (a > b) - (a < b)
+def b64encode(x):
+ return base64.b64encode(x).decode("ascii")
- def raise_(
- exception, with_traceback=None, replace_context=None, from_=False
- ):
- r"""implement "raise" with cause support.
- :param exception: exception to raise
- :param with_traceback: will call exception.with_traceback()
- :param replace_context: an as-yet-unsupported feature. This is
- an exception object which we are "replacing", e.g., it's our
- "cause" but we don't want it printed. Basically just what
- ``__suppress_context__`` does but we don't want to suppress
- the enclosing context, if any. So for now we make it the
- cause.
- :param from\_: the cause. this actually sets the cause and doesn't
- hope to hide it someday.
+def decode_backslashreplace(text, encoding):
+ return text.decode(encoding, errors="backslashreplace")
- """
- if with_traceback is not None:
- exception = exception.with_traceback(with_traceback)
- if from_ is not False:
- exception.__cause__ = from_
- elif replace_context is not None:
- # no good solution here, we would like to have the exception
- # have only the context of replace_context.__context__ so that the
- # intermediary exception does not change, but we can't figure
- # that out.
- exception.__cause__ = replace_context
+def cmp(a, b):
+ return (a > b) - (a < b)
- try:
- raise exception
- finally:
- # credit to
- # https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
- # as the __traceback__ object creates a cycle
- del exception, replace_context, from_, with_traceback
- def u(s):
- return s
+def raise_(exception, with_traceback=None, replace_context=None, from_=False):
+ r"""implement "raise" with cause support.
- def ue(s):
- return s
+ :param exception: exception to raise
+ :param with_traceback: will call exception.with_traceback()
+ :param replace_context: an as-yet-unsupported feature. This is
+ an exception object which we are "replacing", e.g., it's our
+ "cause" but we don't want it printed. Basically just what
+ ``__suppress_context__`` does but we don't want to suppress
+ the enclosing context, if any. So for now we make it the
+ cause.
+ :param from\_: the cause. this actually sets the cause and doesn't
+ hope to hide it someday.
- from typing import TYPE_CHECKING
-
- # Unused. Kept for backwards compatibility.
- callable = callable # noqa
-
- from abc import ABC
-
- def _qualname(fn):
- return fn.__qualname__
-
-
-else:
- import base64
- import ConfigParser as configparser # noqa
- import itertools
-
- from StringIO import StringIO # noqa
- from cStringIO import StringIO as byte_buffer # noqa
- from itertools import izip_longest as zip_longest # noqa
- from time import clock as perf_counter # noqa
- from urllib import quote # noqa
- from urllib import quote_plus # noqa
- from urllib import unquote # noqa
- from urllib import unquote_plus # noqa
- from urlparse import parse_qsl # noqa
-
- from abc import ABCMeta
-
- class ABC:
- __metaclass__ = ABCMeta
+ """
+ if with_traceback is not None:
+ exception = exception.with_traceback(with_traceback)
+
+ if from_ is not False:
+ exception.__cause__ = from_
+ elif replace_context is not None:
+ # no good solution here, we would like to have the exception
+ # have only the context of replace_context.__context__ so that the
+ # intermediary exception does not change, but we can't figure
+ # that out.
+ exception.__cause__ = replace_context
try:
- import cPickle as pickle
- except ImportError:
- import pickle # noqa
-
- string_types = (basestring,) # noqa
- binary_types = (bytes,)
- binary_type = str
- text_type = unicode # noqa
- int_types = int, long # noqa
- long_type = long # noqa
-
- callable = callable # noqa
- cmp = cmp # noqa
- reduce = reduce # noqa
-
- b64encode = base64.b64encode
- b64decode = base64.b64decode
-
- itertools_filterfalse = itertools.ifilterfalse
- itertools_filter = itertools.ifilter
- itertools_imap = itertools.imap
-
- def b(s):
- return s
-
- def exec_(func_text, globals_, lcl=None):
- if lcl is None:
- exec("exec func_text in globals_")
- else:
- exec("exec func_text in globals_, lcl")
-
- def iterbytes(buf):
- return (ord(byte) for byte in buf)
-
- def import_(*args):
- if len(args) == 4:
- args = args[0:3] + ([str(arg) for arg in args[3]],)
- return __import__(*args)
-
- def print_(*args, **kwargs):
- fp = kwargs.pop("file", sys.stdout)
- if fp is None:
- return
- for arg in enumerate(args):
- if not isinstance(arg, basestring): # noqa
- arg = str(arg)
- fp.write(arg)
-
- def u(s):
- # this differs from what six does, which doesn't support non-ASCII
- # strings - we only use u() with
- # literal source strings, and all our source files with non-ascii
- # in them (all are tests) are utf-8 encoded.
- return unicode(s, "utf-8") # noqa
-
- def ue(s):
- return unicode(s, "unicode_escape") # noqa
-
- def decode_backslashreplace(text, encoding):
- try:
- return text.decode(encoding)
- except UnicodeDecodeError:
- # regular "backslashreplace" for an incompatible encoding raises:
- # "TypeError: don't know how to handle UnicodeDecodeError in
- # error callback"
- return repr(text)[1:-1].decode()
-
- def safe_bytestring(text):
- # py2k only
- if not isinstance(text, string_types):
- return unicode(text).encode( # noqa: F821
- "ascii", errors="backslashreplace"
- )
- elif isinstance(text, unicode): # noqa: F821
- return text.encode("ascii", errors="backslashreplace")
- else:
- return text
-
- exec(
- "def raise_(exception, with_traceback=None, replace_context=None, "
- "from_=False):\n"
- " if with_traceback:\n"
- " raise type(exception), exception, with_traceback\n"
- " else:\n"
- " raise exception\n"
- )
-
- TYPE_CHECKING = False
-
- def _qualname(meth):
- """return __qualname__ equivalent for a method on a class"""
-
- for cls in meth.im_class.__mro__:
- if meth.__name__ in cls.__dict__:
- break
- else:
- return meth.__name__
-
- return "%s.%s" % (cls.__name__, meth.__name__)
-
-
-if py3k:
+ raise exception
+ finally:
+ # credit to
+ # https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
+ # as the __traceback__ object creates a cycle
+ del exception, replace_context, from_, with_traceback
+
+
+def _formatannotation(annotation, base_module=None):
+ """vendored from python 3.7"""
+
+ if getattr(annotation, "__module__", None) == "typing":
+ return repr(annotation).replace("typing.", "")
+ if isinstance(annotation, type):
+ if annotation.__module__ in ("builtins", base_module):
+ return annotation.__qualname__
+ return annotation.__module__ + "." + annotation.__qualname__
+ return repr(annotation)
+
+
+def inspect_formatargspec(
+ args,
+ varargs=None,
+ varkw=None,
+ defaults=None,
+ kwonlyargs=(),
+ kwonlydefaults={},
+ annotations={},
+ formatarg=str,
+ formatvarargs=lambda name: "*" + name,
+ formatvarkw=lambda name: "**" + name,
+ formatvalue=lambda value: "=" + repr(value),
+ formatreturns=lambda text: " -> " + text,
+ formatannotation=_formatannotation,
+):
+ """Copy formatargspec from python 3.7 standard library.
+
+ Python 3 has deprecated formatargspec and requested that Signature
+ be used instead, however this requires a full reimplementation
+ of formatargspec() in terms of creating Parameter objects and such.
+ Instead of introducing all the object-creation overhead and having
+ to reinvent from scratch, just copy their compatibility routine.
+
+ Ultimately we would need to rewrite our "decorator" routine completely
+ which is not really worth it right now, until all Python 2.x support
+ is dropped.
- def _formatannotation(annotation, base_module=None):
- """vendored from python 3.7"""
-
- if getattr(annotation, "__module__", None) == "typing":
- return repr(annotation).replace("typing.", "")
- if isinstance(annotation, type):
- if annotation.__module__ in ("builtins", base_module):
- return annotation.__qualname__
- return annotation.__module__ + "." + annotation.__qualname__
- return repr(annotation)
-
- def inspect_formatargspec(
- args,
- varargs=None,
- varkw=None,
- defaults=None,
- kwonlyargs=(),
- kwonlydefaults={},
- annotations={},
- formatarg=str,
- formatvarargs=lambda name: "*" + name,
- formatvarkw=lambda name: "**" + name,
- formatvalue=lambda value: "=" + repr(value),
- formatreturns=lambda text: " -> " + text,
- formatannotation=_formatannotation,
- ):
- """Copy formatargspec from python 3.7 standard library.
-
- Python 3 has deprecated formatargspec and requested that Signature
- be used instead, however this requires a full reimplementation
- of formatargspec() in terms of creating Parameter objects and such.
- Instead of introducing all the object-creation overhead and having
- to reinvent from scratch, just copy their compatibility routine.
-
- Ultimately we would need to rewrite our "decorator" routine completely
- which is not really worth it right now, until all Python 2.x support
- is dropped.
-
- """
-
- kwonlydefaults = kwonlydefaults or {}
- annotations = annotations or {}
-
- def formatargandannotation(arg):
- result = formatarg(arg)
- if arg in annotations:
- result += ": " + formatannotation(annotations[arg])
- return result
-
- specs = []
- if defaults:
- firstdefault = len(args) - len(defaults)
- for i, arg in enumerate(args):
- spec = formatargandannotation(arg)
- if defaults and i >= firstdefault:
- spec = spec + formatvalue(defaults[i - firstdefault])
- specs.append(spec)
+ """
- if varargs is not None:
- specs.append(formatvarargs(formatargandannotation(varargs)))
- else:
- if kwonlyargs:
- specs.append("*")
+ kwonlydefaults = kwonlydefaults or {}
+ annotations = annotations or {}
- if kwonlyargs:
- for kwonlyarg in kwonlyargs:
- spec = formatargandannotation(kwonlyarg)
- if kwonlydefaults and kwonlyarg in kwonlydefaults:
- spec += formatvalue(kwonlydefaults[kwonlyarg])
- specs.append(spec)
-
- if varkw is not None:
- specs.append(formatvarkw(formatargandannotation(varkw)))
-
- result = "(" + ", ".join(specs) + ")"
- if "return" in annotations:
- result += formatreturns(formatannotation(annotations["return"]))
+ def formatargandannotation(arg):
+ result = formatarg(arg)
+ if arg in annotations:
+ result += ": " + formatannotation(annotations[arg])
return result
+ specs = []
+ if defaults:
+ firstdefault = len(args) - len(defaults)
+ for i, arg in enumerate(args):
+ spec = formatargandannotation(arg)
+ if defaults and i >= firstdefault:
+ spec = spec + formatvalue(defaults[i - firstdefault])
+ specs.append(spec)
+
+ if varargs is not None:
+ specs.append(formatvarargs(formatargandannotation(varargs)))
+ else:
+ if kwonlyargs:
+ specs.append("*")
-else:
- from inspect import formatargspec as _inspect_formatargspec
-
- def inspect_formatargspec(*spec, **kw):
- # convert for a potential FullArgSpec from compat.getfullargspec()
- return _inspect_formatargspec(*spec[0:4], **kw) # noqa
-
-
-# Fix deprecation of accessing ABCs straight from collections module
-# (which will stop working in 3.8).
-if py3k:
- import collections.abc as collections_abc
-else:
- import collections as collections_abc # noqa
-
-
-if py37:
- import dataclasses
+ if kwonlyargs:
+ for kwonlyarg in kwonlyargs:
+ spec = formatargandannotation(kwonlyarg)
+ if kwonlydefaults and kwonlyarg in kwonlydefaults:
+ spec += formatvalue(kwonlydefaults[kwonlyarg])
+ specs.append(spec)
- def dataclass_fields(cls):
- """Return a sequence of all dataclasses.Field objects associated
- with a class."""
+ if varkw is not None:
+ specs.append(formatvarkw(formatargandannotation(varkw)))
- if dataclasses.is_dataclass(cls):
- return dataclasses.fields(cls)
- else:
- return []
+ result = "(" + ", ".join(specs) + ")"
+ if "return" in annotations:
+ result += formatreturns(formatannotation(annotations["return"]))
+ return result
- def local_dataclass_fields(cls):
- """Return a sequence of all dataclasses.Field objects associated with
- a class, excluding those that originate from a superclass."""
- if dataclasses.is_dataclass(cls):
- super_fields = set()
- for sup in cls.__bases__:
- super_fields.update(dataclass_fields(sup))
- return [
- f for f in dataclasses.fields(cls) if f not in super_fields
- ]
- else:
- return []
+def dataclass_fields(cls):
+ """Return a sequence of all dataclasses.Field objects associated
+ with a class."""
+ if dataclasses.is_dataclass(cls):
+ return dataclasses.fields(cls)
+ else:
+ return []
-else:
- def dataclass_fields(cls):
- return []
+def local_dataclass_fields(cls):
+ """Return a sequence of all dataclasses.Field objects associated with
+ a class, excluding those that originate from a superclass."""
- def local_dataclass_fields(cls):
+ if dataclasses.is_dataclass(cls):
+ super_fields = set()
+ for sup in cls.__bases__:
+ super_fields.update(dataclass_fields(sup))
+ return [f for f in dataclasses.fields(cls) if f not in super_fields]
+ else:
return []
r"""legacy. use raise\_()"""
raise_(value, with_traceback=tb, from_=cause)
-
-
-def with_metaclass(meta, *bases, **kw):
- """Create a base class with a metaclass.
-
- Drops the middle class upon creation.
-
- Source: https://lucumr.pocoo.org/2013/5/21/porting-to-python-3-redux/
-
- """
-
- class metaclass(meta):
- __call__ = type.__call__
- __init__ = type.__init__
-
- def __new__(cls, name, this_bases, d):
- if this_bases is None:
- cls = type.__new__(cls, name, (), d)
- else:
- cls = meta(name, bases, d)
-
- if hasattr(cls, "__init_subclass__") and hasattr(
- cls.__init_subclass__, "__func__"
- ):
- cls.__init_subclass__.__func__(cls, **kw)
- return cls
-
- return metaclass("temporary_class", None, {})
-
-
-if py3k:
- from datetime import timezone
-else:
- from datetime import datetime
- from datetime import timedelta
- from datetime import tzinfo
-
- class timezone(tzinfo):
- """Minimal port of python 3 timezone object"""
-
- __slots__ = "_offset"
-
- def __init__(self, offset):
- if not isinstance(offset, timedelta):
- raise TypeError("offset must be a timedelta")
- if not self._minoffset <= offset <= self._maxoffset:
- raise ValueError(
- "offset must be a timedelta "
- "strictly between -timedelta(hours=24) and "
- "timedelta(hours=24)."
- )
- self._offset = offset
-
- def __eq__(self, other):
- if type(other) != timezone:
- return False
- return self._offset == other._offset
-
- def __hash__(self):
- return hash(self._offset)
-
- def __repr__(self):
- return "sqlalchemy.util.%s(%r)" % (
- self.__class__.__name__,
- self._offset,
- )
-
- def __str__(self):
- return self.tzname(None)
-
- def utcoffset(self, dt):
- return self._offset
-
- def tzname(self, dt):
- return self._name_from_offset(self._offset)
-
- def dst(self, dt):
- return None
-
- def fromutc(self, dt):
- if isinstance(dt, datetime):
- if dt.tzinfo is not self:
- raise ValueError("fromutc: dt.tzinfo " "is not self")
- return dt + self._offset
- raise TypeError(
- "fromutc() argument must be a datetime instance" " or None"
- )
-
- @staticmethod
- def _timedelta_to_microseconds(timedelta):
- """backport of timedelta._to_microseconds()"""
- return (
- timedelta.days * (24 * 3600) + timedelta.seconds
- ) * 1000000 + timedelta.microseconds
-
- @staticmethod
- def _divmod_timedeltas(a, b):
- """backport of timedelta.__divmod__"""
-
- q, r = divmod(
- timezone._timedelta_to_microseconds(a),
- timezone._timedelta_to_microseconds(b),
- )
- return q, timedelta(0, 0, r)
-
- @staticmethod
- def _name_from_offset(delta):
- if not delta:
- return "UTC"
- if delta < timedelta(0):
- sign = "-"
- delta = -delta
- else:
- sign = "+"
- hours, rest = timezone._divmod_timedeltas(
- delta, timedelta(hours=1)
- )
- minutes, rest = timezone._divmod_timedeltas(
- rest, timedelta(minutes=1)
- )
- result = "UTC%s%02d:%02d" % (sign, hours, minutes)
- if rest.seconds:
- result += ":%02d" % (rest.seconds,)
- if rest.microseconds:
- result += ".%06d" % (rest.microseconds,)
- return result
-
- _maxoffset = timedelta(hours=23, minutes=59)
- _minoffset = -_maxoffset
-
- timezone.utc = timezone(timedelta(0))
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
-from . import compat
have_greenlet = False
-if compat.py3k:
- try:
- import greenlet # noqa F401
- except ImportError:
- pass
- else:
- have_greenlet = True
- from ._concurrency_py3k import await_only
- from ._concurrency_py3k import await_fallback
- from ._concurrency_py3k import greenlet_spawn
- from ._concurrency_py3k import is_exit_exception
- from ._concurrency_py3k import AsyncAdaptedLock
- from ._concurrency_py3k import _util_async_run # noqa F401
- from ._concurrency_py3k import (
- _util_async_run_coroutine_function,
- ) # noqa F401, E501
- from ._concurrency_py3k import asyncio # noqa F401
-
- # does not need greennlet, just Python 3
- from ._compat_py3k import asynccontextmanager # noqa F401
+try:
+ import greenlet # noqa F401
+except ImportError:
+ pass
+else:
+ have_greenlet = True
+ from ._concurrency_py3k import await_only
+ from ._concurrency_py3k import await_fallback
+ from ._concurrency_py3k import greenlet_spawn
+ from ._concurrency_py3k import is_exit_exception
+ from ._concurrency_py3k import AsyncAdaptedLock
+ from ._concurrency_py3k import _util_async_run # noqa F401
+ from ._concurrency_py3k import (
+ _util_async_run_coroutine_function,
+ ) # noqa F401, E501
+ from ._concurrency_py3k import asyncio # noqa F401
if not have_greenlet:
if have_greenlet:
return None
- if not compat.py3k:
- raise ValueError("Cannot use this function in py2.")
- else:
- raise ValueError(
- "the greenlet library is required to use this function."
- )
+ raise ValueError(
+ "the greenlet library is required to use this function."
+ )
def is_exit_exception(e): # noqa F811
return not isinstance(e, Exception)
def md5_hex(x):
- if compat.py3k:
- x = x.encode("utf-8")
+ x = x.encode("utf-8")
m = hashlib.md5()
m.update(x)
return m.hexdigest()
with_traceback=exc_tb,
)
else:
- if not compat.py3k and self._exc_info and self._exc_info[1]:
- # emulate Py3K's behavior of telling us when an exception
- # occurs in an exception handler.
- warn(
- "An exception has occurred during handling of a "
- "previous exception. The previous exception "
- "is:\n %s %s\n" % (self._exc_info[0], self._exc_info[1])
- )
self._exc_info = None # remove potential circular references
compat.raise_(value, with_traceback=traceback)
def string_or_unprintable(element):
- if isinstance(element, compat.string_types):
+ if isinstance(element, str):
return element
else:
try:
for base in bases:
pool = itertools.chain(
(base,),
- compat.itertools_imap(lambda i: base + str(i), range(1000)),
+ map(lambda i: base + str(i), range(1000)),
)
for sym in pool:
if sym not in used:
def register(self, name, modulepath, objname):
def load():
- mod = compat.import_(modulepath)
+ mod = __import__(modulepath)
for token in modulepath.split(".")[1:]:
mod = getattr(mod, token)
return getattr(mod, objname)
'apply_pos': '(self, a, b, c, **d)'}
"""
- if compat.callable(fn):
+ if callable(fn):
spec = compat.inspect_getfullargspec(fn)
else:
spec = fn
process.append(b)
hier.add(b)
- if compat.py3k:
- if c.__module__ == "builtins" or not hasattr(c, "__subclasses__"):
- continue
- else:
- if c.__module__ == "__builtin__" or not hasattr(
- c, "__subclasses__"
- ):
- continue
+ if c.__module__ == "builtins" or not hasattr(c, "__subclasses__"):
+ continue
for s in [_ for _ in c.__subclasses__() if _ not in hier]:
process.append(s)
)
env = from_instance is not None and {name: from_instance} or {}
- compat.exec_(py, env)
+ exec(py, env)
try:
env[method].__defaults__ = fn.__defaults__
except AttributeError:
for method, impl in dictlike_iteritems(obj):
if method not in interface:
raise TypeError("%r: unknown in this interface" % method)
- if not compat.callable(impl):
+ if not callable(impl):
raise TypeError("%r=%r is not callable" % (method, impl))
setattr(AnonymousInterface, method, staticmethod(impl))
found.add(method)
# from paste.deploy.converters
def asbool(obj):
- if isinstance(obj, compat.string_types):
+ if isinstance(obj, str):
obj = obj.strip().lower()
if obj in ["true", "yes", "on", "y", "t", "1"]:
return True
def dictlike_iteritems(dictlike):
"""Return a (key, value) iterator for almost any dict-like object."""
- if compat.py3k:
- if hasattr(dictlike, "items"):
- return list(dictlike.items())
- else:
- if hasattr(dictlike, "iteritems"):
- return dictlike.iteritems()
- elif hasattr(dictlike, "items"):
- return iter(dictlike.items())
+ if hasattr(dictlike, "items"):
+ return list(dictlike.items())
getter = getattr(dictlike, "__getitem__", getattr(dictlike, "get", None))
if getter is None:
class _symbol(int):
def __new__(self, name, doc=None, canonical=None):
"""Construct a new named symbol."""
- assert isinstance(name, compat.string_types)
+ assert isinstance(name, str)
if canonical is None:
canonical = hash(name)
v = int.__new__(_symbol, canonical)
return value
-class _hash_limit_string(compat.text_type):
+class _hash_limit_string(str):
"""A string subclass that can only be hashed on a maximum amount
of unique values.
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import profiling
-from sqlalchemy.util import u
NUM_FIELDS = 10
t.insert(),
[
dict(
- ("field%d" % fnum, u("value%d" % fnum))
+ ("field%d" % fnum, "value%d" % fnum)
for fnum in range(NUM_FIELDS)
)
for r_num in range(NUM_RECORDS)
t2.insert(),
[
dict(
- ("field%d" % fnum, u("value%d" % fnum))
+ ("field%d" % fnum, "value%d" % fnum)
for fnum in range(NUM_FIELDS)
)
for r_num in range(NUM_RECORDS)
"""Test event registration and listening."""
+from unittest.mock import call
+from unittest.mock import Mock
+
from sqlalchemy import event
from sqlalchemy import exc
from sqlalchemy import testing
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import is_not
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.testing import combinations_list
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
-from sqlalchemy.util import compat
-from sqlalchemy.util import u
class Error(Exception):
def test_wrap_unicode_arg(self):
# this is not supported by the API but oslo_db is doing it
orig = sa_exceptions.DBAPIError(False, False, False)
- orig.args = [u("méil")]
+ orig.args = ["méil"]
eq_(
- compat.text_type(orig),
- compat.u(
- "méil\n(Background on this error at: "
- "https://sqlalche.me/e/%s/dbapi)"
- % sa_exceptions._version_token
- ),
+ str(orig),
+ "méil\n(Background on this error at: "
+ "https://sqlalche.me/e/%s/dbapi)" % sa_exceptions._version_token,
)
- eq_(orig.args, (u("méil"),))
+ eq_(orig.args, ("méil",))
def test_tostring_large_dict(self):
try:
from sqlalchemy import Column
from sqlalchemy.testing import fixtures
-from sqlalchemy.testing import requires
class TestGenerics(fixtures.TestBase):
- @requires.generic_classes
def test_traversible_is_generic(self):
col = Column[int]
assert col is Column
-from __future__ import print_function
-
import doctest
import logging
import os
checker=_get_unicode_checker(),
)
parser = doctest.DocTestParser()
- globs = {"print_function": print_function}
+ globs = {"print_function": print}
for fname in fnames:
path = os.path.join(sqla_base, "doc/build", fname)
#! coding: utf-8
import copy
-import datetime
import inspect
+import pickle
import sys
from sqlalchemy import exc
from sqlalchemy.util import compat
from sqlalchemy.util import get_callable_argspec
from sqlalchemy.util import langhelpers
-from sqlalchemy.util import timezone
from sqlalchemy.util import WeakSequence
return super_, sub_, twin1, twin2, unique1, unique2
def _assert_unorderable_types(self, callable_):
- if util.py3k:
- assert_raises_message(
- TypeError, "not supported between instances of", callable_
- )
- else:
- assert_raises_message(
- TypeError, "cannot compare sets using cmp()", callable_
- )
+ assert_raises_message(
+ TypeError, "not supported between instances of", callable_
+ )
def test_basic_sanity(self):
IdentitySet = util.IdentitySet
assert sym1 is sym2
# default
- s = util.pickle.dumps(sym1)
- util.pickle.loads(s)
+ s = pickle.dumps(sym1)
+ pickle.loads(s)
for protocol in 0, 1, 2:
print(protocol)
- serial = util.pickle.dumps(sym1)
- rt = util.pickle.loads(serial)
+ serial = pickle.dumps(sym1)
+ rt = pickle.loads(serial)
assert rt is sym1
assert rt is sym2
class _Py3KFixtures:
- def _kw_only_fixture(self):
+ def _kw_only_fixture(self, a, *, b, c):
pass
- def _kw_plus_posn_fixture(self):
+ def _kw_plus_posn_fixture(self, a, *args, b, c):
pass
- def _kw_opt_fixture(self):
+ def _kw_opt_fixture(self, a, *, b, c="c"):
pass
-if util.py3k:
- _locals = {}
- exec(
- """
-def _kw_only_fixture(self, a, *, b, c):
- pass
-
-def _kw_plus_posn_fixture(self, a, *args, b, c):
- pass
-
-def _kw_opt_fixture(self, a, *, b, c="c"):
- pass
-""",
- _locals,
- )
- for k in _locals:
- setattr(_Py3KFixtures, k, _locals[k])
-
py3k_fixtures = _Py3KFixtures()
def test_ascii_to_utf8(self):
eq_(
compat.decode_backslashreplace(util.b("hello world"), "utf-8"),
- util.u("hello world"),
+ "hello world",
)
def test_utf8_to_utf8(self):
eq_(
compat.decode_backslashreplace(
- util.u("some message méil").encode("utf-8"), "utf-8"
+ "some message méil".encode("utf-8"), "utf-8"
),
- util.u("some message méil"),
+ "some message méil",
)
def test_latin1_to_utf8(self):
eq_(
compat.decode_backslashreplace(
- util.u("some message méil").encode("latin-1"), "utf-8"
+ "some message méil".encode("latin-1"), "utf-8"
),
- util.u("some message m\\xe9il"),
+ "some message m\\xe9il",
)
eq_(
compat.decode_backslashreplace(
- util.u("some message méil").encode("latin-1"), "latin-1"
+ "some message méil".encode("latin-1"), "latin-1"
),
- util.u("some message méil"),
+ "some message méil",
)
def test_cp1251_to_utf8(self):
- message = util.u("some message П").encode("cp1251")
+ message = "some message П".encode("cp1251")
eq_(message, b"some message \xcf")
eq_(
compat.decode_backslashreplace(message, "utf-8"),
- util.u("some message \\xcf"),
+ "some message \\xcf",
)
eq_(
compat.decode_backslashreplace(message, "cp1251"),
- util.u("some message П"),
- )
-
-
-class TimezoneTest(fixtures.TestBase):
- """test the python 2 backport of the "timezone" class.
-
- Note under python 3, these tests work against the builtin timezone,
- thereby providing confirmation that the tests are correct.
-
- """
-
- @testing.combinations(
- (datetime.timedelta(0), "UTC"),
- (datetime.timedelta(hours=5), "UTC+05:00"),
- (datetime.timedelta(hours=5, minutes=10), "UTC+05:10"),
- (
- datetime.timedelta(hours=5, minutes=10, seconds=27),
- "UTC+05:10:27",
- testing.requires.granular_timezone,
- ),
- (datetime.timedelta(hours=-3, minutes=10), "UTC-02:50"),
- (
- datetime.timedelta(
- hours=5, minutes=10, seconds=27, microseconds=550
- ),
- "UTC+05:10:27.000550",
- testing.requires.granular_timezone,
- ),
- )
- def test_tzname(self, td, expected):
- eq_(timezone(td).tzname(None), expected)
-
- def test_utcoffset(self):
- eq_(
- timezone(datetime.timedelta(hours=5)).utcoffset(None),
- datetime.timedelta(hours=5),
- )
-
- def test_fromutc(self):
- tzinfo = timezone(datetime.timedelta(hours=5))
- dt = datetime.datetime(2017, 10, 5, 12, 55, 38, tzinfo=tzinfo)
- eq_(
- dt.astimezone(timezone.utc),
- datetime.datetime(2017, 10, 5, 7, 55, 38, tzinfo=timezone.utc),
- )
-
- # this is the same as hours=-3
- del_ = datetime.timedelta(days=-1, seconds=75600)
- eq_(
- dt.astimezone(timezone(datetime.timedelta(hours=-3))),
- datetime.datetime(2017, 10, 5, 4, 55, 38, tzinfo=timezone(del_)),
- )
-
- def test_repr(self):
- eq_(
- repr(timezone(datetime.timedelta(hours=5))),
- "datetime.timezone(%r)" % (datetime.timedelta(hours=5)),
+ "some message П",
)
collect_ignore_glob = []
-# minimum version for a py3k only test is at
-# 3.6 because these are asyncio tests anyway
-if sys.version_info[0:2] < (3, 6):
- collect_ignore_glob.append("*_py3k.py")
-
pytest.register_assert_rewrite("sqlalchemy.testing.assertions")
# -*- encoding: utf-8
+from unittest.mock import Mock
+
from sqlalchemy import Column
from sqlalchemy import engine_from_config
from sqlalchemy import Integer
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
-from sqlalchemy.testing.mock import Mock
def _legacy_schema_aliasing_warning():
# -*- encoding: utf-8
from decimal import Decimal
+from unittest.mock import Mock
from sqlalchemy import Column
from sqlalchemy import event
from sqlalchemy.testing import expect_warnings
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
-from sqlalchemy.testing.mock import Mock
class ParseConnectTest(fixtures.TestBase):
from sqlalchemy import testing
from sqlalchemy import types
from sqlalchemy import types as sqltypes
-from sqlalchemy import util
from sqlalchemy.dialects import mssql
from sqlalchemy.dialects.mssql import base
from sqlalchemy.dialects.mssql.information_schema import tables
eq_(type(col["identity"]["increment"]), int)
elif col["name"] == "id3":
eq_(col["identity"], {"start": 1, "increment": 1})
- eq_(type(col["identity"]["start"]), util.compat.long_type)
- eq_(type(col["identity"]["increment"]), util.compat.long_type)
+ eq_(type(col["identity"]["start"]), int)
+ eq_(type(col["identity"]["increment"]), int)
elif col["name"] == "id4":
eq_(col["identity"], {"start": 1, "increment": 1})
eq_(type(col["identity"]["start"]), int)
from sqlalchemy import types
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
-from sqlalchemy import util
from sqlalchemy.dialects.mssql import base as mssql
from sqlalchemy.dialects.mssql import ROWVERSION
from sqlalchemy.dialects.mssql import TIMESTAMP
2,
32,
123456,
- util.timezone(datetime.timedelta(hours=-5)),
+ datetime.timezone(datetime.timedelta(hours=-5)),
)
return t, (d1, t1, d2, d3)
11,
2,
32,
- tzinfo=util.timezone(datetime.timedelta(hours=-5)),
+ tzinfo=datetime.timezone(datetime.timedelta(hours=-5)),
),
),
(datetime.datetime(2007, 10, 30, 11, 2, 32)),
).first()
if not date.tzinfo:
- eq_(row, (date, date.replace(tzinfo=util.timezone.utc)))
+ eq_(row, (date, date.replace(tzinfo=datetime.timezone.utc)))
else:
eq_(row, (date.replace(tzinfo=None), date))
2,
32,
123456,
- util.timezone(datetime.timedelta(hours=1)),
+ datetime.timezone(datetime.timedelta(hours=1)),
),
1,
False,
2,
32,
123456,
- util.timezone(datetime.timedelta(hours=-5)),
+ datetime.timezone(datetime.timedelta(hours=-5)),
),
-5,
False,
2,
32,
123456,
- util.timezone(datetime.timedelta(seconds=4000)),
+ datetime.timezone(datetime.timedelta(seconds=4000)),
),
None,
True,
2,
32,
123456,
- util.timezone(
+ datetime.timezone(
datetime.timedelta(hours=expected_offset_hours)
),
),
def test_string_text_literal_binds_explicit_unicode_right(self):
self.assert_compile(
- column("x", String()) == util.u("foo"),
+ column("x", String()) == "foo",
"x = 'foo'",
literal_binds=True,
)
# Unicode on Python 3 for plain string, test with unicode
# string just to confirm literal is doing this
self.assert_compile(
- column("x", String()) == literal(util.u("foo")),
+ column("x", String()) == literal("foo"),
"x = N'foo'",
literal_binds=True,
)
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy import UniqueConstraint
-from sqlalchemy import util
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.dialects.mysql import reflection as _reflection
from sqlalchemy.schema import CreateIndex
},
]
ischema_casing_1 = [
- (util.u("Test"), util.u("Track"), "TrackID"),
- (util.u("Test_Schema"), util.u("Track"), "TrackID"),
+ ("Test", "Track", "TrackID"),
+ ("Test_Schema", "Track", "TrackID"),
]
return fkeys_casing_1, ischema_casing_1
from sqlalchemy import TypeDecorator
from sqlalchemy import types as sqltypes
from sqlalchemy import UnicodeText
-from sqlalchemy import util
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import eq_regex
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
-from sqlalchemy.util import u
class TypeCompileTest(fixtures.TestBase, AssertsCompiledSQL):
# in order to test the condition here, need to use
# MySQLdb 1.2.3 and also need to pass either use_unicode=1
# or charset=utf8 to the URL.
- connection.execute(t.insert(), dict(id=1, data=u("some text")))
- assert isinstance(connection.scalar(select(t.c.data)), util.text_type)
+ connection.execute(t.insert(), dict(id=1, data="some text"))
+ assert isinstance(connection.scalar(select(t.c.data)), str)
@testing.metadata_fixture(ddl="class")
def bit_table(self, metadata):
"t",
metadata,
Column("id", Integer, primary_key=True),
- Column("data", mysql.SET(u("réveillé"), u("drôle"), u("S’il"))),
+ Column("data", mysql.SET("réveillé", "drôle", "S’il")),
)
set_table.create(connection)
connection.execute(
- set_table.insert(), {"data": set([u("réveillé"), u("drôle")])}
+ set_table.insert(), {"data": set(["réveillé", "drôle"])}
)
row = connection.execute(set_table.select()).first()
- eq_(row, (1, set([u("réveillé"), u("drôle")])))
+ eq_(row, (1, set(["réveillé", "drôle"])))
def test_int_roundtrip(self, metadata, connection):
set_table = self._set_fixture_one(metadata)
"table",
metadata,
Column("id", Integer, primary_key=True),
- Column("value", Enum(u("réveillé"), u("drôle"), u("S’il"))),
- Column("value2", mysql.ENUM(u("réveillé"), u("drôle"), u("S’il"))),
+ Column("value", Enum("réveillé", "drôle", "S’il")),
+ Column("value2", mysql.ENUM("réveillé", "drôle", "S’il")),
)
metadata.create_all(connection)
connection.execute(
t1.insert(),
[
- dict(value=u("drôle"), value2=u("drôle")),
- dict(value=u("réveillé"), value2=u("réveillé")),
- dict(value=u("S’il"), value2=u("S’il")),
+ dict(value="drôle", value2="drôle"),
+ dict(value="réveillé", value2="réveillé"),
+ dict(value="S’il", value2="S’il"),
],
)
eq_(
connection.execute(t1.select().order_by(t1.c.id)).fetchall(),
[
- (1, u("drôle"), u("drôle")),
- (2, u("réveillé"), u("réveillé")),
- (3, u("S’il"), u("S’il")),
+ (1, "drôle", "drôle"),
+ (2, "réveillé", "réveillé"),
+ (3, "S’il", "S’il"),
],
)
# latin-1 stuff forcing its way in ?
eq_(
- t2.c.value.type.enums[0:2], [u("réveillé"), u("drôle")]
+ t2.c.value.type.enums[0:2], ["réveillé", "drôle"]
) # u'S’il') # eh ?
eq_(
- t2.c.value2.type.enums[0:2], [u("réveillé"), u("drôle")]
+ t2.c.value2.type.enums[0:2], ["réveillé", "drôle"]
) # u'S’il') # eh ?
def test_enum_compile(self):
# coding: utf-8
import re
+from unittest.mock import Mock
from sqlalchemy import bindparam
from sqlalchemy import Computed
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import mock
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.suite import test_select
-from sqlalchemy.util import u
-from sqlalchemy.util import ue
class DialectTest(fixtures.TestBase):
)
def _assert_errorhandler(self, outconverter, has_errorhandler):
- data = ue("\uee2c\u9a66") # this is u"\uee2c\u9a66"
+ data = "\uee2c\u9a66" # this is u"\uee2c\u9a66"
utf8_w_errors = data.encode("utf-16")
)
metadata.create_all(connection)
- connection.execute(table.insert(), {"_underscorecolumn": u("’é")})
+ connection.execute(table.insert(), {"_underscorecolumn": "’é"})
result = connection.execute(
- table.select().where(table.c._underscorecolumn == u("’é"))
+ table.select().where(table.c._underscorecolumn == "’é")
).scalar()
- eq_(result, u("’é"))
+ eq_(result, "’é")
def test_quoted_column_unicode(self, metadata, connection):
table = Table(
"atable",
metadata,
- Column(u("méil"), Unicode(255), primary_key=True),
+ Column("méil", Unicode(255), primary_key=True),
)
metadata.create_all(connection)
- connection.execute(table.insert(), {u("méil"): u("’é")})
+ connection.execute(table.insert(), {"méil": "’é"})
result = connection.execute(
- table.select().where(table.c[u("méil")] == u("’é"))
+ table.select().where(table.c["méil"] == "’é")
).scalar()
- eq_(result, u("’é"))
+ eq_(result, "’é")
class CXOracleConnectArgsTest(fixtures.TestBase):
from sqlalchemy import types as sqltypes
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
-from sqlalchemy import util
from sqlalchemy import VARCHAR
from sqlalchemy.dialects.oracle import base as oracle
from sqlalchemy.dialects.oracle import cx_oracle
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.util import b
-from sqlalchemy.util import u
def exec_sql(conn, sql, *args, **kwargs):
cx_oracle._OracleNChar,
)
- data = u("m’a réveillé.")
+ data = "m’a réveillé."
connection.execute(t2.insert(), dict(nv_data=data, c_data=data))
nv_data, c_data = connection.execute(t2.select()).first()
eq_(nv_data, data)
eq_(c_data, data + (" " * 7)) # char is space padded
- assert isinstance(nv_data, util.text_type)
- assert isinstance(c_data, util.text_type)
+ assert isinstance(nv_data, str)
+ assert isinstance(c_data, str)
def test_reflect_unicode_no_nvarchar(self, metadata, connection):
Table("tnv", metadata, Column("data", sqltypes.Unicode(255)))
cx_oracle._OracleString,
)
- data = u("m’a réveillé.")
+ data = "m’a réveillé."
connection.execute(t2.insert(), {"data": data})
res = connection.execute(t2.select()).first().data
eq_(res, data)
- assert isinstance(res, util.text_type)
+ assert isinstance(res, str)
def test_char_length(self, metadata, connection):
t1 = Table(
{},
),
]:
- if isinstance(stmt, util.string_types):
+ if isinstance(stmt, str):
test_exp = conn.exec_driver_sql(stmt, kw).scalar()
else:
test_exp = conn.scalar(stmt, **kw)
(oracle.BINARY_DOUBLE, 25.34534, "NATIVE_FLOAT", False),
(oracle.BINARY_FLOAT, 25.34534, "NATIVE_FLOAT", False),
(oracle.DOUBLE_PRECISION, 25.34534, None, False),
- (Unicode(30), u("test"), "NCHAR", True),
- (UnicodeText(), u("test"), "NCLOB", True),
- (Unicode(30), u("test"), None, False),
- (UnicodeText(), u("test"), "CLOB", False),
+ (Unicode(30), "test", "NCHAR", True),
+ (UnicodeText(), "test", "NCLOB", True),
+ (Unicode(30), "test", None, False),
+ (UnicodeText(), "test", "CLOB", False),
(String(30), "test", None, False),
(CHAR(30), "test", "FIXED_CHAR", False),
- (NCHAR(30), u("test"), "FIXED_NCHAR", False),
+ (NCHAR(30), "test", "FIXED_NCHAR", False),
(oracle.LONG(), "test", None, False),
argnames="datatype, value, sis_value_text, set_nchar_flag",
)
from sqlalchemy.testing.assertions import expect_warnings
from sqlalchemy.testing.assertions import is_
from sqlalchemy.util import OrderedDict
-from sqlalchemy.util import u
class SequenceTest(fixtures.TestBase, AssertsCompiledSQL):
def test_create_drop_enum(self):
# test escaping and unicode within CREATE TYPE for ENUM
- typ = postgresql.ENUM(
- "val1", "val2", "val's 3", u("méil"), name="myname"
- )
+ typ = postgresql.ENUM("val1", "val2", "val's 3", "méil", name="myname")
self.assert_compile(
postgresql.CreateEnumType(typ),
- u(
- "CREATE TYPE myname AS "
- "ENUM ('val1', 'val2', 'val''s 3', 'méil')"
- ),
+ "CREATE TYPE myname AS ENUM ('val1', 'val2', 'val''s 3', 'méil')",
)
typ = postgresql.ENUM("val1", "val2", "val's 3", name="PleaseQuoteMe")
from sqlalchemy.testing.assertions import eq_
from sqlalchemy.testing.assertions import eq_regex
from sqlalchemy.testing.assertions import ne_
-from sqlalchemy.util import u
-from sqlalchemy.util import ue
if True:
from sqlalchemy.dialects.postgresql.psycopg2 import (
)
Table(
- u("Unitéble2"),
+ "Unitéble2",
metadata,
- Column(u("méil"), Integer, primary_key=True),
- Column(ue("\u6e2c\u8a66"), Integer),
+ Column("méil", Integer, primary_key=True),
+ Column("\u6e2c\u8a66", Integer),
)
def test_insert(self, connection):
)
def test_insert_unicode_keys(self, connection):
- table = self.tables[u("Unitéble2")]
+ table = self.tables["Unitéble2"]
stmt = table.insert()
connection.execute(
stmt,
[
- {u("méil"): 1, ue("\u6e2c\u8a66"): 1},
- {u("méil"): 2, ue("\u6e2c\u8a66"): 2},
- {u("méil"): 3, ue("\u6e2c\u8a66"): 3},
+ {"méil": 1, "\u6e2c\u8a66": 1},
+ {"méil": 2, "\u6e2c\u8a66": 2},
+ {"méil": 3, "\u6e2c\u8a66": 3},
],
)
# coding: utf-8
import datetime
import decimal
+from enum import Enum as _PY_Enum
import re
import uuid
Column(
"value",
Enum(
- util.u("réveillé"),
- util.u("drôle"),
- util.u("S’il"),
+ "réveillé",
+ "drôle",
+ "S’il",
name="onetwothreetype",
),
),
)
metadata.create_all(connection)
- connection.execute(t1.insert(), dict(value=util.u("drôle")))
- connection.execute(t1.insert(), dict(value=util.u("réveillé")))
- connection.execute(t1.insert(), dict(value=util.u("S’il")))
+ connection.execute(t1.insert(), dict(value="drôle"))
+ connection.execute(t1.insert(), dict(value="réveillé"))
+ connection.execute(t1.insert(), dict(value="S’il"))
eq_(
connection.execute(t1.select().order_by(t1.c.id)).fetchall(),
[
- (1, util.u("drôle")),
- (2, util.u("réveillé")),
- (3, util.u("S’il")),
+ (1, "drôle"),
+ (2, "réveillé"),
+ (3, "S’il"),
],
)
m2 = MetaData()
t2 = Table("table", m2, autoload_with=connection)
eq_(
t2.c.value.type.enums,
- [util.u("réveillé"), util.u("drôle"), util.u("S’il")],
+ ["réveillé", "drôle", "S’il"],
)
def test_non_native_enum(self, metadata, connection):
"bar",
Enum(
"B",
- util.u("Ü"),
+ "Ü",
name="myenum",
create_constraint=True,
native_enum=False,
go,
[
(
- util.u(
- "CREATE TABLE foo (\tbar "
- "VARCHAR(1), \tCONSTRAINT myenum CHECK "
- "(bar IN ('B', 'Ü')))"
- ),
+ "CREATE TABLE foo (\tbar "
+ "VARCHAR(1), \tCONSTRAINT myenum CHECK "
+ "(bar IN ('B', 'Ü')))",
{},
)
],
)
- connection.execute(t1.insert(), {"bar": util.u("Ü")})
- eq_(connection.scalar(select(t1.c.bar)), util.u("Ü"))
+ connection.execute(t1.insert(), {"bar": "Ü"})
+ eq_(connection.scalar(select(t1.c.bar)), "Ü")
def test_disable_create(self, metadata, connection):
metadata = self.metadata
arrtable.insert(),
dict(
intarr=[1, 2, 3],
- strarr=[util.u("abc"), util.u("def")],
+ strarr=["abc", "def"],
),
)
results = connection.execute(arrtable.select()).fetchall()
eq_(len(results), 1)
eq_(results[0].intarr, [1, 2, 3])
- eq_(results[0].strarr, [util.u("abc"), util.u("def")])
+ eq_(results[0].strarr, ["abc", "def"])
def test_insert_array_w_null(self, connection):
arrtable = self.tables.arrtable
arrtable.insert(),
dict(
intarr=[1, None, 3],
- strarr=[util.u("abc"), None],
+ strarr=["abc", None],
),
)
results = connection.execute(arrtable.select()).fetchall()
eq_(len(results), 1)
eq_(results[0].intarr, [1, None, 3])
- eq_(results[0].strarr, [util.u("abc"), None])
+ eq_(results[0].strarr, ["abc", None])
def test_array_where(self, connection):
arrtable = self.tables.arrtable
arrtable.insert(),
dict(
intarr=[1, 2, 3],
- strarr=[util.u("abc"), util.u("def")],
+ strarr=["abc", "def"],
),
)
connection.execute(
- arrtable.insert(), dict(intarr=[4, 5, 6], strarr=util.u("ABC"))
+ arrtable.insert(), dict(intarr=[4, 5, 6], strarr="ABC")
)
results = connection.execute(
arrtable.select().where(arrtable.c.intarr == [1, 2, 3])
arrtable = self.tables.arrtable
connection.execute(
arrtable.insert(),
- dict(intarr=[1, 2, 3], strarr=[util.u("abc"), util.u("def")]),
+ dict(intarr=[1, 2, 3], strarr=["abc", "def"]),
)
results = connection.execute(
select(arrtable.c.intarr + [4, 5, 6])
arrtable = self.tables.arrtable
connection.execute(
arrtable.insert(),
- dict(
- id=5, intarr=[1, 2, 3], strarr=[util.u("abc"), util.u("def")]
- ),
+ dict(id=5, intarr=[1, 2, 3], strarr=["abc", "def"]),
)
results = connection.execute(
select(arrtable.c.id).where(arrtable.c.intarr < [4, 5, 6])
arrtable.insert(),
dict(
intarr=[4, 5, 6],
- strarr=[[util.ue("m\xe4\xe4")], [util.ue("m\xf6\xf6")]],
+ strarr=[["m\xe4\xe4"], ["m\xf6\xf6"]],
),
)
connection.execute(
arrtable.insert(),
dict(
intarr=[1, 2, 3],
- strarr=[util.ue("m\xe4\xe4"), util.ue("m\xf6\xf6")],
+ strarr=["m\xe4\xe4", "m\xf6\xf6"],
),
)
results = connection.execute(
arrtable.select().order_by(arrtable.c.intarr)
).fetchall()
eq_(len(results), 2)
- eq_(results[0].strarr, [util.ue("m\xe4\xe4"), util.ue("m\xf6\xf6")])
+ eq_(results[0].strarr, ["m\xe4\xe4", "m\xf6\xf6"])
eq_(
results[1].strarr,
- [[util.ue("m\xe4\xe4")], [util.ue("m\xf6\xf6")]],
+ [["m\xe4\xe4"], ["m\xf6\xf6"]],
)
def test_array_literal_roundtrip(self, connection):
arrtable.insert(),
dict(
intarr=[4, 5, 6],
- strarr=[util.u("abc"), util.u("def")],
+ strarr=["abc", "def"],
),
)
eq_(connection.scalar(select(arrtable.c.intarr[2:3])), [5, 6])
def unicode_values(x):
return [
- util.u("réveillé"),
- util.u("drôle"),
- util.u("S’il %s" % x),
- util.u("🐍 %s" % x),
- util.u("« S’il vous"),
+ "réveillé",
+ "drôle",
+ "S’il %s" % x,
+ "🐍 %s" % x,
+ "« S’il vous",
]
def json_values(x):
@testing.fixture
def array_of_enum_fixture(self, metadata, connection):
def go(array_cls, enum_cls):
+ class MyEnum(_PY_Enum):
+ a = "aaa"
+ b = "bbb"
+ c = "ccc"
+
tbl = Table(
"enum_table",
metadata,
"enum_col",
array_cls(enum_cls("foo", "bar", "baz", name="an_enum")),
),
+ Column(
+ "pyenum_col",
+ array_cls(enum_cls(MyEnum)),
+ ),
)
- if util.py3k:
- from enum import Enum
-
- class MyEnum(Enum):
- a = "aaa"
- b = "bbb"
- c = "ccc"
-
- tbl.append_column(
- Column(
- "pyenum_col",
- array_cls(enum_cls(MyEnum)),
- ),
- )
- else:
- MyEnum = None
metadata.create_all(connection)
connection.execute(
def _test_unicode_round_trip(self, connection):
s = select(
hstore(
- array([util.u("réveillé"), util.u("drôle"), util.u("S’il")]),
- array([util.u("réveillé"), util.u("drôle"), util.u("S’il")]),
+ array(["réveillé", "drôle", "S’il"]),
+ array(["réveillé", "drôle", "S’il"]),
)
)
eq_(
connection.scalar(s),
{
- util.u("réveillé"): util.u("réveillé"),
- util.u("drôle"): util.u("drôle"),
- util.u("S’il"): util.u("S’il"),
+ "réveillé": "réveillé",
+ "drôle": "drôle",
+ "S’il": "S’il",
},
)
result = connection.execute(
select(data_table.c.data["k1"].astext)
).first()
- assert isinstance(result[0], util.text_type)
+ assert isinstance(result[0], str)
def test_query_returned_as_int(self, connection):
self._fixture_data(connection)
s = select(
cast(
{
- util.u("réveillé"): util.u("réveillé"),
- "data": {"k1": util.u("drôle")},
+ "réveillé": "réveillé",
+ "data": {"k1": "drôle"},
},
self.data_type,
)
eq_(
connection.scalar(s),
{
- util.u("réveillé"): util.u("réveillé"),
- "data": {"k1": util.u("drôle")},
+ "réveillé": "réveillé",
+ "data": {"k1": "drôle"},
},
)
from sqlalchemy import tuple_
from sqlalchemy import types as sqltypes
from sqlalchemy import UniqueConstraint
-from sqlalchemy import util
from sqlalchemy.dialects.sqlite import base as sqlite
from sqlalchemy.dialects.sqlite import insert
from sqlalchemy.dialects.sqlite import provision
from sqlalchemy.types import Integer
from sqlalchemy.types import String
from sqlalchemy.types import Time
-from sqlalchemy.util import u
-from sqlalchemy.util import ue
def exec_sql(engine, sql, *args, **kwargs):
),
)
r = conn.execute(func.current_date()).scalar()
- assert isinstance(r, util.string_types)
+ assert isinstance(r, str)
@testing.provide_metadata
def test_custom_datetime(self, connection):
sqltypes.UnicodeText(),
):
bindproc = t.dialect_impl(dialect).bind_processor(dialect)
- assert not bindproc or isinstance(
- bindproc(util.u("some string")), util.text_type
- )
+ assert not bindproc or isinstance(bindproc("some string"), str)
class JSONTest(fixtures.TestBase):
t = Table(
"x",
self.metadata,
- Column(u("méil"), Integer, primary_key=True),
- Column(ue("\u6e2c\u8a66"), Integer),
+ Column("méil", Integer, primary_key=True),
+ Column("\u6e2c\u8a66", Integer),
)
self.metadata.create_all(testing.db)
result = connection.execute(t.select())
- assert u("méil") in result.keys()
- assert ue("\u6e2c\u8a66") in result.keys()
+ assert "méil" in result.keys()
+ assert "\u6e2c\u8a66" in result.keys()
def test_pool_class(self):
e = create_engine("sqlite+pysqlite://")
import re
+from unittest.mock import Mock
import sqlalchemy as tsa
import sqlalchemy as sa
from sqlalchemy.testing.assertions import expect_deprecated
from sqlalchemy.testing.assertions import expect_raises_message
from sqlalchemy.testing.engines import testing_engine
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
# coding: utf-8
+import collections.abc as collections_abc
from contextlib import contextmanager
+from contextlib import nullcontext
+from io import StringIO
import re
import threading
+from unittest.mock import call
+from unittest.mock import Mock
+from unittest.mock import patch
import weakref
import sqlalchemy as tsa
from sqlalchemy.testing import mock
from sqlalchemy.testing.assertions import expect_deprecated
from sqlalchemy.testing.assertsql import CompiledSQL
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
-from sqlalchemy.testing.mock import patch
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.testing.util import picklers
-from sqlalchemy.util import collections_abc
class SomeException(Exception):
return "foo"
def __unicode__(self):
- return util.u("fóó")
+ return "fóó"
class ExecuteTest(fixtures.TablesTest):
eq_(conn.execute(obj).scalar(), 1)
def test_stmt_exception_bytestring_raised(self):
- name = util.u("méil")
+ name = "méil"
users = self.tables.users
with testing.db.connect() as conn:
assert_raises_message(
def test_stmt_exception_bytestring_utf8(self):
# uncommon case for Py3K, bytestring object passed
# as the error message
- message = util.u("some message méil").encode("utf-8")
+ message = "some message méil".encode("utf-8")
err = tsa.exc.SQLAlchemyError(message)
- eq_(str(err), util.u("some message méil"))
+ eq_(str(err), "some message méil")
def test_stmt_exception_bytestring_latin1(self):
# uncommon case for Py3K, bytestring object passed
# as the error message
- message = util.u("some message méil").encode("latin-1")
+ message = "some message méil".encode("latin-1")
err = tsa.exc.SQLAlchemyError(message)
- eq_(str(err), util.u("some message m\\xe9il"))
+ eq_(str(err), "some message m\\xe9il")
def test_stmt_exception_unicode_hook_unicode(self):
# uncommon case for Py2K, Unicode object passed
# as the error message
- message = util.u("some message méil")
+ message = "some message méil"
err = tsa.exc.SQLAlchemyError(message)
- eq_(str(err), util.u("some message méil"))
+ eq_(str(err), "some message méil")
def test_stmt_exception_object_arg(self):
err = tsa.exc.SQLAlchemyError(Foo())
eq_(str(err), "('some message', 206)")
def test_stmt_exception_str_multi_args_bytestring(self):
- message = util.u("some message méil").encode("utf-8")
+ message = "some message méil".encode("utf-8")
err = tsa.exc.SQLAlchemyError(message, 206)
eq_(str(err), str((message, 206)))
def test_stmt_exception_str_multi_args_unicode(self):
- message = util.u("some message méil")
+ message = "some message méil"
err = tsa.exc.SQLAlchemyError(message, 206)
eq_(str(err), str((message, 206)))
class MockStrategyTest(fixtures.TestBase):
def _engine_fixture(self):
- buf = util.StringIO()
+ buf = StringIO()
def dump(sql, *multiparams, **params):
- buf.write(util.text_type(sql.compile(dialect=engine.dialect)))
+ buf.write(str(sql.compile(dialect=engine.dialect)))
engine = create_mock_engine("postgresql+psycopg2://", executor=dump)
return engine, buf
lambda self: None,
)
else:
- patcher = util.nullcontext()
+ patcher = nullcontext()
with patcher:
e1 = testing_engine(config.db_url)
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
-from sqlalchemy import util
from sqlalchemy.sql import util as sql_util
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
% (largeparam[0:149], largeparam[-149:]),
)
- if util.py3k:
- eq_(
- self.buf.buffer[5].message,
- "Row ('%s ... (4702 characters truncated) ... %s',)"
- % (largeparam[0:149], largeparam[-149:]),
- )
- else:
- eq_(
- self.buf.buffer[5].message,
- "Row (u'%s ... (4703 characters truncated) ... %s',)"
- % (largeparam[0:148], largeparam[-149:]),
- )
+ eq_(
+ self.buf.buffer[5].message,
+ "Row ('%s ... (4702 characters truncated) ... %s',)"
+ % (largeparam[0:149], largeparam[-149:]),
+ )
- if util.py3k:
- eq_(
- repr(row),
- "('%s ... (4702 characters truncated) ... %s',)"
- % (largeparam[0:149], largeparam[-149:]),
- )
- else:
- eq_(
- repr(row),
- "(u'%s ... (4703 characters truncated) ... %s',)"
- % (largeparam[0:148], largeparam[-149:]),
- )
+ eq_(
+ repr(row),
+ "('%s ... (4702 characters truncated) ... %s',)"
+ % (largeparam[0:149], largeparam[-149:]),
+ )
def test_error_large_dict(self):
assert_raises_message(
+from unittest.mock import call
+from unittest.mock import MagicMock
+from unittest.mock import Mock
+
import sqlalchemy as tsa
from sqlalchemy import create_engine
from sqlalchemy import engine_from_config
from sqlalchemy.testing import ne_
from sqlalchemy.testing.assertions import expect_deprecated
from sqlalchemy.testing.assertions import expect_raises_message
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import MagicMock
-from sqlalchemy.testing.mock import Mock
dialect = None
import random
import threading
import time
+from unittest.mock import ANY
+from unittest.mock import call
+from unittest.mock import Mock
+from unittest.mock import patch
import weakref
import sqlalchemy as tsa
from sqlalchemy.testing import is_true
from sqlalchemy.testing import mock
from sqlalchemy.testing.engines import testing_engine
-from sqlalchemy.testing.mock import ANY
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
-from sqlalchemy.testing.mock import patch
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.testing.util import lazy_gc
import time
+from unittest.mock import call
+from unittest.mock import Mock
import sqlalchemy as tsa
from sqlalchemy import create_engine
from sqlalchemy.testing import mock
from sqlalchemy.testing import ne_
from sqlalchemy.testing.engines import testing_engine
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import gc_collect
from sqlalchemy.testing import skip
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
-from sqlalchemy.util import ue
class ReflectionTest(fixtures.TestBase, ComparesTables):
no_has_table = [
(
"no_has_table_1",
- ue("col_Unit\u00e9ble"),
- ue("ix_Unit\u00e9ble"),
+ "col_Unit\u00e9ble",
+ "ix_Unit\u00e9ble",
),
- ("no_has_table_2", ue("col_\u6e2c\u8a66"), ue("ix_\u6e2c\u8a66")),
+ ("no_has_table_2", "col_\u6e2c\u8a66", "ix_\u6e2c\u8a66"),
]
no_case_sensitivity = [
(
- ue("\u6e2c\u8a66"),
- ue("col_\u6e2c\u8a66"),
- ue("ix_\u6e2c\u8a66"),
+ "\u6e2c\u8a66",
+ "col_\u6e2c\u8a66",
+ "ix_\u6e2c\u8a66",
),
(
- ue("unit\u00e9ble"),
- ue("col_unit\u00e9ble"),
- ue("ix_unit\u00e9ble"),
+ "unit\u00e9ble",
+ "col_unit\u00e9ble",
+ "ix_unit\u00e9ble",
),
]
full = [
(
- ue("Unit\u00e9ble"),
- ue("col_Unit\u00e9ble"),
- ue("ix_Unit\u00e9ble"),
+ "Unit\u00e9ble",
+ "col_Unit\u00e9ble",
+ "ix_Unit\u00e9ble",
),
(
- ue("\u6e2c\u8a66"),
- ue("col_\u6e2c\u8a66"),
- ue("ix_\u6e2c\u8a66"),
+ "\u6e2c\u8a66",
+ "col_\u6e2c\u8a66",
+ "ix_\u6e2c\u8a66",
),
]
savepoint = savepoint[0]
assert not savepoint.is_active
- if util.py3k:
- # ensure cause comes from the DBAPI
- assert isinstance(exc_.__cause__, testing.db.dialect.dbapi.Error)
+ # ensure cause comes from the DBAPI
+ assert isinstance(exc_.__cause__, testing.db.dialect.dbapi.Error)
def test_retains_through_options(self, local_connection):
connection = local_connection
from sqlalchemy import Column
from sqlalchemy import Enum
-from sqlalchemy.orm import declarative_base, Mapped
+from sqlalchemy.orm import declarative_base
+from sqlalchemy.orm import Mapped
from . import enum_col_import1
-from .enum_col_import1 import IntEnum, StrEnum
+from .enum_col_import1 import IntEnum
+from .enum_col_import1 import StrEnum
Base = declarative_base()
import copy
import pickle
+from unittest.mock import call
+from unittest.mock import Mock
from sqlalchemy import cast
from sqlalchemy import exc
from sqlalchemy.testing import is_
from sqlalchemy.testing import is_false
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import gc_collect
import random
import threading
import time
+from unittest.mock import Mock
+from unittest.mock import patch
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
-from sqlalchemy.testing.mock import Mock
-from sqlalchemy.testing.mock import patch
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from ..orm._fixtures import FixtureTest
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import testing
-from sqlalchemy import util
from sqlalchemy.ext.mutable import MutableComposite
from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy.ext.mutable import MutableList
# print(repr(pickles))
# return
- if util.py3k:
- pickles = [
- b"\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\x16"
- b"sqlalchemy.ext.mutable\x94\x8c\x0bMutableList\x94\x93\x94)"
- b"\x81\x94(K\x01K\x02e]\x94(K\x01K\x02eb.",
- b"ccopy_reg\n_reconstructor\np0\n(csqlalchemy.ext.mutable\n"
- b"MutableList\np1\nc__builtin__\nlist\np2\n(lp3\nI1\naI2\n"
- b"atp4\nRp5\n(lp6\nI1\naI2\nab.",
- b"ccopy_reg\n_reconstructor\nq\x00(csqlalchemy.ext.mutable\n"
- b"MutableList\nq\x01c__builtin__\nlist\nq\x02]q\x03(K\x01K"
- b"\x02etq\x04Rq\x05]q\x06(K\x01K\x02eb.",
- b"\x80\x02csqlalchemy.ext.mutable\nMutableList\nq\x00)\x81q"
- b"\x01(K\x01K\x02e]q\x02(K\x01K\x02eb.",
- b"\x80\x03csqlalchemy.ext.mutable\nMutableList\nq\x00)\x81q"
- b"\x01(K\x01K\x02e]q\x02(K\x01K\x02eb.",
- b"\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\x16"
- b"sqlalchemy.ext.mutable\x94\x8c\x0bMutableList\x94\x93\x94)"
- b"\x81\x94(K\x01K\x02e]\x94(K\x01K\x02eb.",
- ]
- else:
- pickles = [
- "\x80\x02csqlalchemy.ext.mutable\nMutableList\nq\x00]q\x01"
- "(K\x01K\x02e\x85q\x02Rq\x03.",
- "\x80\x02csqlalchemy.ext.mutable\nMutableList"
- "\nq\x00]q\x01(K\x01K\x02e\x85q\x02Rq\x03.",
- "csqlalchemy.ext.mutable\nMutableList\np0\n"
- "((lp1\nI1\naI2\natp2\nRp3\n.",
- "csqlalchemy.ext.mutable\nMutableList\nq\x00(]"
- "q\x01(K\x01K\x02etq\x02Rq\x03.",
- "\x80\x02csqlalchemy.ext.mutable\nMutableList"
- "\nq\x01]q\x02(K\x01K\x02e\x85Rq\x03.",
- "\x80\x02csqlalchemy.ext.mutable\nMutableList\n"
- "q\x01]q\x02(K\x01K\x02e\x85Rq\x03.",
- "csqlalchemy.ext.mutable\nMutableList\np1\n"
- "((lp2\nI1\naI2\natRp3\n.",
- "csqlalchemy.ext.mutable\nMutableList\nq\x01"
- "(]q\x02(K\x01K\x02etRq\x03.",
- ]
+ pickles = [
+ b"\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\x16"
+ b"sqlalchemy.ext.mutable\x94\x8c\x0bMutableList\x94\x93\x94)"
+ b"\x81\x94(K\x01K\x02e]\x94(K\x01K\x02eb.",
+ b"ccopy_reg\n_reconstructor\np0\n(csqlalchemy.ext.mutable\n"
+ b"MutableList\np1\nc__builtin__\nlist\np2\n(lp3\nI1\naI2\n"
+ b"atp4\nRp5\n(lp6\nI1\naI2\nab.",
+ b"ccopy_reg\n_reconstructor\nq\x00(csqlalchemy.ext.mutable\n"
+ b"MutableList\nq\x01c__builtin__\nlist\nq\x02]q\x03(K\x01K"
+ b"\x02etq\x04Rq\x05]q\x06(K\x01K\x02eb.",
+ b"\x80\x02csqlalchemy.ext.mutable\nMutableList\nq\x00)\x81q"
+ b"\x01(K\x01K\x02e]q\x02(K\x01K\x02eb.",
+ b"\x80\x03csqlalchemy.ext.mutable\nMutableList\nq\x00)\x81q"
+ b"\x01(K\x01K\x02e]q\x02(K\x01K\x02eb.",
+ b"\x80\x04\x95<\x00\x00\x00\x00\x00\x00\x00\x8c\x16"
+ b"sqlalchemy.ext.mutable\x94\x8c\x0bMutableList\x94\x93\x94)"
+ b"\x81\x94(K\x01K\x02e]\x94(K\x01K\x02eb.",
+ ]
for pickle_ in pickles:
obj = pickle.loads(pickle_)
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
-from sqlalchemy.util import ue
def pickle_protocols():
],
)
- @testing.requires.non_broken_pickle
def test_query_two(self):
q = (
Session.query(User)
eq_(q2.all(), [User(name="fred")])
eq_(list(q2.with_entities(User.id, User.name)), [(9, "fred")])
- @testing.requires.non_broken_pickle
def test_query_three(self):
ua = aliased(User)
q = (
pickled_failing = serializer.dumps(j, prot)
serializer.loads(pickled_failing, users.metadata, None)
- @testing.requires.non_broken_pickle
def test_orm_join(self):
from sqlalchemy.orm.util import join
[(u7, u8), (u7, u9), (u7, u10), (u8, u9), (u8, u10)],
)
- @testing.requires.non_broken_pickle
def test_any(self):
r = User.addresses.any(Address.email == "x")
ser = serializer.dumps(r, -1)
def test_unicode(self):
m = MetaData()
- t = Table(
- ue("\u6e2c\u8a66"), m, Column(ue("\u6e2c\u8a66_id"), Integer)
- )
+ t = Table("\u6e2c\u8a66", m, Column("\u6e2c\u8a66_id", Integer))
- expr = select(t).where(t.c[ue("\u6e2c\u8a66_id")] == 5)
+ expr = select(t).where(t.c["\u6e2c\u8a66_id"] == 5)
expr2 = serializer.loads(serializer.dumps(expr, -1), m)
self.assert_compile(
expr2,
- ue(
- 'SELECT "\u6e2c\u8a66"."\u6e2c\u8a66_id" FROM "\u6e2c\u8a66" '
- 'WHERE "\u6e2c\u8a66"."\u6e2c\u8a66_id" = :\u6e2c\u8a66_id_1'
- ),
+ 'SELECT "\u6e2c\u8a66"."\u6e2c\u8a66_id" FROM "\u6e2c\u8a66" '
+ 'WHERE "\u6e2c\u8a66"."\u6e2c\u8a66_id" = :\u6e2c\u8a66_id_1',
dialect="default",
)
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import UniqueConstraint
-from sqlalchemy import util
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import backref
from sqlalchemy.orm import class_mapper
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
-from sqlalchemy.util import with_metaclass
Base = None
elif self.base_style == "explicit":
mapper_registry = registry()
- class Base(with_metaclass(DeclarativeMeta)):
+ class Base(metaclass=DeclarativeMeta):
__abstract__ = True
registry = mapper_registry
metadata = mapper_registry.metadata
"id", Integer, primary_key=True, test_needs_autoincrement=True
)
name = Column("name", String(50))
- addresses = relationship(util.u("Address"), backref="user")
+ addresses = relationship("Address", backref="user")
class Address(Base, fixtures.ComparableEntity):
__tablename__ = "addresses"
)
user = relationship(
User,
- backref=backref("addresses", order_by=util.u("Address.email")),
+ backref=backref("addresses", order_by="Address.email"),
)
assert Address.user.property.mapper.class_ is User
# even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
# fails
- class BrokenParent(with_metaclass(BrokenMeta)):
+ class BrokenParent(metaclass=BrokenMeta):
xyzzy = "magic"
# _as_declarative() inspects obj.__class__.__bases__
r"registry is not a sqlalchemy.orm.registry\(\) object",
):
- class Base(with_metaclass(DeclarativeMeta)):
+ class Base(metaclass=DeclarativeMeta):
metadata = sa.MetaData()
def test_shared_class_registry(self):
try:
hasattr(User.addresses, "property")
except exc.InvalidRequestError:
- assert sa.util.compat.py3k
+ assert True
# the exception is preserved. Remains the
# same through repeated calls.
+from contextlib import nullcontext
+
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import Integer
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import testing
-from sqlalchemy import util
from sqlalchemy.orm import aliased
from sqlalchemy.orm import backref
from sqlalchemy.orm import configure_mappers
with _aliased_join_warning(
"Manager->managers"
- ) if autoalias else util.nullcontext():
+ ) if autoalias else nullcontext():
self.assert_compile(
q,
"SELECT people.type AS people_type, engineers.id AS "
+from contextlib import nullcontext
+
from sqlalchemy import and_
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import true
-from sqlalchemy import util
from sqlalchemy.orm import aliased
from sqlalchemy.orm import Bundle
from sqlalchemy.orm import joinedload
with _aliased_join_warning(
"Engineer->engineer"
- ) if autoalias else util.nullcontext():
+ ) if autoalias else nullcontext():
self.assert_compile(
q,
"SELECT manager.id AS manager_id, employee.id AS employee_id, "
with _aliased_join_warning(
"Boss->manager"
- ) if autoalias else util.nullcontext():
+ ) if autoalias else nullcontext():
self.assert_compile(
q,
"SELECT engineer.id AS engineer_id, "
import pickle
+from unittest.mock import call
+from unittest.mock import Mock
from sqlalchemy import event
from sqlalchemy import exc as sa_exc
from sqlalchemy.testing import is_not
from sqlalchemy.testing import is_true
from sqlalchemy.testing import not_in
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.util import all_partial_orderings
from sqlalchemy.testing.util import gc_collect
+from unittest.mock import Mock
+
import sqlalchemy as sa
from sqlalchemy import delete
from sqlalchemy import ForeignKey
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from test.orm import _fixtures
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import testing
-from sqlalchemy import util
from sqlalchemy.orm import attributes
from sqlalchemy.orm import backref
from sqlalchemy.orm import class_mapper
Address = self.classes.Address
rel = relationship(Address)
- rel.cascade = util.u("save-update, merge, expunge")
+ rel.cascade = "save-update, merge, expunge"
eq_(rel.cascade, set(["save-update", "merge", "expunge"]))
import contextlib
+from functools import reduce
from operator import and_
from sqlalchemy import event
control[0] = e
assert_eq()
- if util.reduce(
+ if reduce(
and_,
[
hasattr(direct, a)
+from contextlib import nullcontext
+import pickle
+from unittest.mock import call
+from unittest.mock import Mock
+
import sqlalchemy as sa
from sqlalchemy import and_
from sqlalchemy import cast
from sqlalchemy import testing
from sqlalchemy import text
from sqlalchemy import true
-from sqlalchemy import util
from sqlalchemy.engine import default
from sqlalchemy.engine import result_tuple
from sqlalchemy.orm import aliased
from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing.fixtures import ComparableEntity
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import resolve_lambda
-from sqlalchemy.util import pickle
from . import _fixtures
from .inheritance import _poly_fixtures
from .inheritance._poly_fixtures import _Polymorphic
sess.expunge_all()
return sess, User, Address, Dingaling
- @testing.requires.non_broken_pickle
def test_became_bound_options(self):
sess, User, Address, Dingaling = self._option_test_fixture()
u1 = sess.query(User).options(opt).first()
pickle.loads(pickle.dumps(u1))
- @testing.requires.non_broken_pickle
@testing.combinations(
lambda: sa.orm.joinedload("addresses"),
lambda: sa.orm.defer("name"),
u1 = sess.query(User).options(opt).first()
pickle.loads(pickle.dumps(u1))
- @testing.requires.non_broken_pickle
@testing.combinations(
lambda User: sa.orm.Load(User).joinedload("addresses"),
lambda User: sa.orm.Load(User).joinedload("addresses").raiseload("*"),
b3 = B(key="b3")
if future:
- dep_ctx = util.nullcontext
+ dep_ctx = nullcontext
else:
def dep_ctx():
+from unittest.mock import ANY
+from unittest.mock import call
+from unittest.mock import Mock
+
import sqlalchemy as sa
from sqlalchemy import delete
from sqlalchemy import event
from sqlalchemy.testing import is_not
from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.testing.mock import ANY
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import gc_collect
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import MetaData
-from sqlalchemy import util
from sqlalchemy.orm import attributes
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm import clear_mappers
assert_raises(TypeError, cls, "a", "b", c="c")
+ def _kw_only_fixture(self):
+ class A(object):
+ def __init__(self, a, *, b, c):
+ self.a = a
+ self.b = b
+ self.c = c
-if util.py3k:
- _locals = {}
- exec(
- """
-def _kw_only_fixture(self):
- class A:
- def __init__(self, a, *, b, c):
- self.a = a
- self.b = b
- self.c = c
- return self._instrument(A)
-
-def _kw_plus_posn_fixture(self):
- class A:
- def __init__(self, a, *args, b, c):
- self.a = a
- self.b = b
- self.c = c
- return self._instrument(A)
-
-def _kw_opt_fixture(self):
- class A:
- def __init__(self, a, *, b, c="c"):
- self.a = a
- self.b = b
- self.c = c
- return self._instrument(A)
-""",
- _locals,
- )
- for k in _locals:
- setattr(Py3KFunctionInstTest, k, _locals[k])
+ return self._instrument(A)
+
+ def _kw_plus_posn_fixture(self):
+ class A(object):
+ def __init__(self, a, *args, b, c):
+ self.a = a
+ self.b = b
+ self.c = c
+
+ return self._instrument(A)
+
+ def _kw_opt_fixture(self):
+ class A(object):
+ def __init__(self, a, *, b, c="c"):
+ self.a = a
+ self.b = b
+ self.c = c
+
+ return self._instrument(A)
class MiscTest(fixtures.MappedTest):
try:
hasattr(Address.user, "property")
except sa.orm.exc.UnmappedClassError:
- assert util.compat.py3k
+ assert True
for i in range(3):
assert_raises_message(
self.mapper(
User,
users,
- properties={
- util.u("addresses"): relationship(
- Address, backref=util.u("user")
- )
- },
+ properties={"addresses": relationship(Address, backref="user")},
)
u1 = User()
a1 = Address()
import copy
+import pickle
import sqlalchemy as sa
from sqlalchemy import ForeignKey
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import picklers
-from sqlalchemy.util import pickle
from test.orm import _fixtures
from .inheritance._poly_fixtures import _Polymorphic
from .inheritance._poly_fixtures import Company
u2.addresses.append(Address())
eq_(len(u2.addresses), 2)
- @testing.requires.non_broken_pickle
def test_instance_deferred_cols(self):
users, addresses = (self.tables.users, self.tables.addresses)
u2 = state.obj()
eq_(sa.inspect(u2).info["some_key"], "value")
- @testing.requires.non_broken_pickle
@testing.combinations(
lambda User: sa.orm.joinedload(User.addresses),
lambda User: sa.orm.defer(User.name),
u1 = sess.query(User).options(opt).first()
pickle.loads(pickle.dumps(u1))
- @testing.requires.non_broken_pickle
@testing.combinations(
lambda User: sa.orm.Load(User).joinedload(User.addresses),
lambda User: sa.orm.Load(User)
u1 = sess.query(User).options(opt).first()
pickle.loads(pickle.dumps(u1))
- @testing.requires.non_broken_pickle
def test_became_bound_options(self):
sess, User, Address, Dingaling = self._option_test_fixture()
class OptionsTest(_Polymorphic):
- @testing.requires.non_broken_pickle
def test_options_of_type(self):
with_poly = with_polymorphic(Person, [Engineer, Manager], flat=True)
+import collections.abc as collections_abc
import contextlib
import functools
from sqlalchemy.testing.schema import Table
from sqlalchemy.types import NullType
from sqlalchemy.types import TypeDecorator
-from sqlalchemy.util import collections_abc
from test.orm import _fixtures
+from unittest.mock import Mock
+
import sqlalchemy as sa
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
-from sqlalchemy.testing.mock import Mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
import inspect as _py_inspect
+import pickle
import sqlalchemy as sa
from sqlalchemy import event
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import gc_collect
-from sqlalchemy.util import pickle
from sqlalchemy.util.compat import inspect_getfullargspec
from test.orm import _fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.util import OrderedDict
-from sqlalchemy.util import u
-from sqlalchemy.util import ue
from test.orm import _fixtures
self.mapper_registry.map_imperatively(Test, uni_t1)
- txt = ue("\u0160\u0110\u0106\u010c\u017d")
+ txt = "\u0160\u0110\u0106\u010c\u017d"
t1 = Test(id=1, txt=txt)
self.assert_(t1.txt == txt)
)
self.mapper_registry.map_imperatively(Test2, uni_t2)
- txt = ue("\u0160\u0110\u0106\u010c\u017d")
+ txt = "\u0160\u0110\u0106\u010c\u017d"
t1 = Test(txt=txt)
t1.t2s.append(Test2())
t1.t2s.append(Test2())
"unitable1",
metadata,
Column(
- u("méil"),
+ "méil",
Integer,
primary_key=True,
key="a",
test_needs_autoincrement=True,
),
- Column(ue("\u6e2c\u8a66"), Integer, key="b"),
+ Column("\u6e2c\u8a66", Integer, key="b"),
Column("type", String(20)),
test_needs_fk=True,
test_needs_autoincrement=True,
)
t2 = Table(
- u("Unitéble2"),
+ "Unitéble2",
metadata,
Column(
- u("méil"),
+ "méil",
Integer,
primary_key=True,
key="cc",
test_needs_autoincrement=True,
),
Column(
- ue("\u6e2c\u8a66"), Integer, ForeignKey("unitable1.a"), key="d"
+ "\u6e2c\u8a66", Integer, ForeignKey("unitable1.a"), key="d"
),
- Column(ue("\u6e2c\u8a66_2"), Integer, key="e"),
+ Column("\u6e2c\u8a66_2", Integer, key="e"),
test_needs_fk=True,
test_needs_autoincrement=True,
)
a.data = "bar"
b.data = "foo"
- if sa.util.py3k:
- message = (
- r"Could not sort objects by primary key; primary key "
- r"values must be sortable in Python \(was: '<' not "
- r"supported between instances of 'MyNotSortableEnum'"
- r" and 'MyNotSortableEnum'\)"
- )
+ message = (
+ r"Could not sort objects by primary key; primary key "
+ r"values must be sortable in Python \(was: '<' not "
+ r"supported between instances of 'MyNotSortableEnum'"
+ r" and 'MyNotSortableEnum'\)"
+ )
- assert_raises_message(
- sa.exc.InvalidRequestError,
- message,
- s.flush,
- )
- else:
- s.flush()
+ assert_raises_message(
+ sa.exc.InvalidRequestError,
+ message,
+ s.flush,
+ )
s.close()
def test_persistent_flush_sortable(self):
+from unittest.mock import Mock
+from unittest.mock import patch
+
from sqlalchemy import cast
from sqlalchemy import DateTime
from sqlalchemy import event
from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing.assertsql import Conditional
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.testing.mock import Mock
-from sqlalchemy.testing.mock import patch
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from test.orm import _fixtures
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.util import compat
from test.orm import _fixtures
from .inheritance import _poly_fixtures
return cls.x
raise AttributeError(key)
- class Point(compat.with_metaclass(MetaPoint)):
+ class Point(metaclass=MetaPoint):
pass
self._fixture(Point)
return cls._impl_double_x
raise AttributeError(key)
- class Point(compat.with_metaclass(MetaPoint)):
+ class Point(metaclass=MetaPoint):
@hybrid_property
def _impl_double_x(self):
return self.x * 2
return double_x.__get__(None, cls)
raise AttributeError(key)
- class Point(compat.with_metaclass(MetaPoint)):
+ class Point(metaclass=MetaPoint):
pass
self._fixture(Point)
+from unittest.mock import call
+from unittest.mock import Mock
+
from sqlalchemy import exc
from sqlalchemy.orm import collections
from sqlalchemy.orm import relationship
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import ne_
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.testing.mock import call
-from sqlalchemy.testing.mock import Mock
from test.orm import _fixtures
import contextlib
import datetime
+from unittest.mock import patch
import uuid
import sqlalchemy as sa
from sqlalchemy.testing import is_true
from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing.fixtures import fixture_session
-from sqlalchemy.testing.mock import patch
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
-from __future__ import print_function
-
import logging
import random
import sys
.where(table_b_b.c.c > 10),
)
- if util.py37:
- fixtures.append(_update_dml_w_dicts)
+ fixtures.append(_update_dml_w_dicts)
def _lambda_fixtures():
def one():
from sqlalchemy.testing import is_true
from sqlalchemy.testing import mock
from sqlalchemy.testing import ne_
-from sqlalchemy.util import u
table1 = table(
"mytable",
def test_reraise_of_column_spec_issue_unicode(self):
MyType = self._illegal_type_fixture()
- t1 = Table("t", MetaData(), Column(u("méil"), MyType()))
+ t1 = Table("t", MetaData(), Column("méil", MyType()))
assert_raises_message(
exc.CompileError,
- u(r"\(in table 't', column 'méil'\): Couldn't compile type"),
+ r"\(in table 't', column 'méil'\): Couldn't compile type",
schema.CreateTable(t1).compile,
)
+from unittest.mock import Mock
+
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy.sql.ddl import SchemaDropper
from sqlalchemy.sql.ddl import SchemaGenerator
from sqlalchemy.testing import fixtures
-from sqlalchemy.testing.mock import Mock
class EmitDDLTest(fixtures.TestBase):
from sqlalchemy.testing import mock
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
-from sqlalchemy.util import compat
from .test_update import _UpdateFromTestBase
obj = cls.__new__(cls)
with mock.patch.object(cls, "_copy") as _copy:
with testing.expect_deprecated(
- r"The %s\(\) method is deprecated" % compat._qualname(cls.copy)
+ r"The %s\(\) method is deprecated" % cls.copy.__qualname__
):
obj.copy(schema="s", target_table="tt", arbitrary="arb")
+import pickle
import re
from sqlalchemy import and_
from sqlalchemy.testing import is_
from sqlalchemy.testing import is_not
from sqlalchemy.testing.schema import eq_clause_element
-from sqlalchemy.util import pickle
A = B = t1 = t2 = t3 = table1 = table2 = table3 = table4 = None
from copy import deepcopy
import datetime
import decimal
+import pickle
from sqlalchemy import ARRAY
from sqlalchemy import bindparam
from sqlalchemy import Text
from sqlalchemy import true
from sqlalchemy import types as sqltypes
-from sqlalchemy import util
from sqlalchemy.dialects import mysql
from sqlalchemy.dialects import oracle
from sqlalchemy.dialects import postgresql
# test pickling
self.assert_compile(
- util.pickle.loads(util.pickle.dumps(f1)),
+ pickle.loads(pickle.dumps(f1)),
"my_func(:my_func_1, :my_func_2, NULL, :my_func_3)",
)
f1 = func.row_number().over()
self.assert_compile(
- util.pickle.loads(util.pickle.dumps(f1)),
+ pickle.loads(pickle.dumps(f1)),
"row_number() OVER ()",
)
f1 = func.percentile_cont(literal(1)).within_group()
self.assert_compile(
- util.pickle.loads(util.pickle.dumps(f1)),
+ pickle.loads(pickle.dumps(f1)),
"percentile_cont(:param_1) WITHIN GROUP (ORDER BY )",
)
column("q"), column("p").desc()
)
self.assert_compile(
- util.pickle.loads(util.pickle.dumps(f1)),
+ pickle.loads(pickle.dumps(f1)),
"percentile_cont(:param_1) WITHIN GROUP (ORDER BY q, p DESC)",
)
+import collections.abc as collections_abc
import datetime
import operator
+import pickle
from sqlalchemy import and_
from sqlalchemy import between
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import text
-from sqlalchemy import util
from sqlalchemy.dialects import mssql
from sqlalchemy.dialects import mysql
from sqlalchemy.dialects import oracle
return self.op("->")(index)
col = Column("x", MyType())
- assert not isinstance(col, util.collections_abc.Iterable)
+ assert not isinstance(col, collections_abc.Iterable)
def test_lshift(self):
class MyType(UserDefinedType):
& self.table1.c.myid.between(15, 20)
& self.table1.c.myid.like("hoho")
)
- eq_(str(clause), str(util.pickle.loads(util.pickle.dumps(clause))))
+ eq_(str(clause), str(pickle.loads(pickle.dumps(clause))))
def test_pickle_operators_two(self):
clause = tuple_(1, 2, 3)
- eq_(str(clause), str(util.pickle.loads(util.pickle.dumps(clause))))
+ eq_(str(clause), str(pickle.loads(pickle.dumps(clause))))
@testing.combinations(
(operator.lt, "<", ">"),
import collections
+import collections.abc as collections_abc
from contextlib import contextmanager
import csv
+from io import StringIO
import operator
+import pickle
+from unittest.mock import Mock
+from unittest.mock import patch
from sqlalchemy import CHAR
from sqlalchemy import column
from sqlalchemy import tuple_
from sqlalchemy import type_coerce
from sqlalchemy import TypeDecorator
-from sqlalchemy import util
from sqlalchemy import VARCHAR
from sqlalchemy.engine import cursor as _cursor
from sqlalchemy.engine import default
from sqlalchemy.testing import mock
from sqlalchemy.testing import ne_
from sqlalchemy.testing import not_in
-from sqlalchemy.testing.mock import Mock
-from sqlalchemy.testing.mock import patch
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
-from sqlalchemy.util import collections_abc
class CursorResultTest(fixtures.TablesTest):
],
)
- for pickle in False, True:
+ for use_pickle in False, True:
for use_labels in False, True:
result = connection.execute(
users.select()
)
).fetchall()
- if pickle:
- result = util.pickle.loads(util.pickle.dumps(result))
+ if use_pickle:
+ result = pickle.loads(pickle.dumps(result))
eq_(result, [(7, "jack"), (8, "ed"), (9, "fred")])
if use_labels:
# previously would warn
- if pickle:
+ if use_pickle:
with expect_raises_message(
exc.NoSuchColumnError,
"Row was unpickled; lookup by ColumnElement is "
else:
eq_(result[0]._mapping[users.c.user_id], 7)
- if pickle:
+ if use_pickle:
with expect_raises_message(
exc.NoSuchColumnError,
"Row was unpickled; lookup by ColumnElement is "
lambda: r._mapping[fake_table.c.user_id],
)
- r = util.pickle.loads(util.pickle.dumps(r))
+ r = pickle.loads(pickle.dumps(r))
assert_raises_message(
exc.InvalidRequestError,
"Ambiguous column name",
users.select().where(users.c.user_id == 1)
).fetchone()
- s = util.StringIO()
+ s = StringIO()
writer = csv.writer(s)
# csv performs PySequenceCheck call
writer.writerow(row)
"""Test various algorithmic properties of selectables."""
+from itertools import zip_longest
+
from sqlalchemy import and_
from sqlalchemy import bindparam
from sqlalchemy import Boolean
"""tests for #6808"""
s1 = select(*cols_expr).select_from(*select_from)
- for ff, efp in util.zip_longest(s1.get_final_froms(), exp_final_froms):
+ for ff, efp in zip_longest(s1.get_final_froms(), exp_final_froms):
assert ff.compare(efp)
eq_(s1.columns_clause_froms, exp_cc_froms)
from sqlalchemy import Sequence
from sqlalchemy import String
from sqlalchemy import testing
-from sqlalchemy import util
from sqlalchemy.dialects import sqlite
from sqlalchemy.schema import CreateSequence
from sqlalchemy.schema import DropSequence
def _assert_seq_result(self, ret):
"""asserts return of next_value is an int"""
- assert isinstance(ret, util.int_types)
+ assert isinstance(ret, int)
assert ret >= testing.db.dialect.default_sequence_base
def test_execute(self, connection):
eq_(types.Integer().python_type, int)
eq_(types.Numeric().python_type, decimal.Decimal)
eq_(types.Numeric(asdecimal=False).python_type, float)
- eq_(types.LargeBinary().python_type, util.binary_type)
+ eq_(types.LargeBinary().python_type, bytes)
eq_(types.Float().python_type, float)
eq_(types.Interval().python_type, datetime.timedelta)
eq_(types.Date().python_type, datetime.date)
eq_(types.DateTime().python_type, datetime.datetime)
eq_(types.String().python_type, str)
- eq_(types.Unicode().python_type, util.text_type)
+ eq_(types.Unicode().python_type, str)
eq_(types.Enum("one", "two", "three").python_type, str)
assert_raises(
user_id=2,
goofy="jack",
goofy2="jack",
- goofy4=util.u("jack"),
- goofy7=util.u("jack"),
+ goofy4="jack",
+ goofy7="jack",
goofy8=12,
goofy9=12,
goofy10=12,
user_id=3,
goofy="lala",
goofy2="lala",
- goofy4=util.u("lala"),
- goofy7=util.u("lala"),
+ goofy4="lala",
+ goofy7="lala",
goofy8=15,
goofy9=15,
goofy10=15,
user_id=4,
goofy="fred",
goofy2="fred",
- goofy4=util.u("fred"),
- goofy7=util.u("fred"),
+ goofy4="fred",
+ goofy7="fred",
goofy8=9,
goofy9=9,
goofy10=9,
user_id=2,
goofy="jack",
goofy2="jack",
- goofy4=util.u("jack"),
- goofy7=util.u("jack"),
+ goofy4="jack",
+ goofy7="jack",
goofy8=12,
goofy9=12,
goofy10=12,
user_id=3,
goofy="lala",
goofy2="lala",
- goofy4=util.u("lala"),
- goofy7=util.u("lala"),
+ goofy4="lala",
+ goofy7="lala",
goofy8=15,
goofy9=15,
goofy10=15,
user_id=4,
goofy="fred",
goofy2="fred",
- goofy4=util.u("fred"),
- goofy7=util.u("fred"),
+ goofy4="fred",
+ goofy7="fred",
goofy8=9,
goofy9=9,
goofy10=9,
metadata = self.metadata
self._fixture(connection, metadata, Integer, 45)
val = connection.exec_driver_sql("select val from t").scalar()
- assert isinstance(val, util.int_types)
+ assert isinstance(val, int)
eq_(val, 45)
@testing.provide_metadata
+from itertools import zip_longest
+
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
-from sqlalchemy import util
from sqlalchemy.sql import base as sql_base
from sqlalchemy.sql import coercions
from sqlalchemy.sql import column
unwrapped = sql_util.unwrap_order_by(expr)
- for a, b in util.zip_longest(unwrapped, expected):
+ for a, b in zip_longest(unwrapped, expected):
assert a is not None and a.compare(b)