from ..util import b64encode
from ..util import byte_buffer
from ..util import pickle
+from ..util import py2k
from ..util import text_type
-
__all__ = ["Serializer", "Deserializer", "dumps", "loads"]
-def Serializer(*args, **kw):
- pickler = pickle.Pickler(*args, **kw)
-
- def persistent_id(obj):
- # print "serializing:", repr(obj)
- if isinstance(obj, Mapper) and not obj.non_primary:
- id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
- elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
- id_ = (
- "mapperprop:"
- + b64encode(pickle.dumps(obj.parent.class_))
- + ":"
- + obj.key
- )
- elif isinstance(obj, Table):
- if "parententity" in obj._annotations:
- id_ = "mapper_selectable:" + b64encode(
- pickle.dumps(obj._annotations["parententity"].class_)
+if py2k:
+
+ def Serializer(*args, **kw):
+ pickler = pickle.Pickler(*args, **kw)
+
+ def persistent_id(obj):
+ # print "serializing:", repr(obj)
+ if isinstance(obj, Mapper) and not obj.non_primary:
+ id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
+ elif (
+ isinstance(obj, MapperProperty) and not obj.parent.non_primary
+ ):
+ id_ = (
+ "mapperprop:"
+ + b64encode(pickle.dumps(obj.parent.class_))
+ + ":"
+ + obj.key
)
+ elif isinstance(obj, Table):
+ if "parententity" in obj._annotations:
+ id_ = "mapper_selectable:" + b64encode(
+ pickle.dumps(obj._annotations["parententity"].class_)
+ )
+ else:
+ id_ = "table:" + text_type(obj.key)
+ elif isinstance(obj, Column) and isinstance(obj.table, Table):
+ id_ = (
+ "column:"
+ + text_type(obj.table.key)
+ + ":"
+ + text_type(obj.key)
+ )
+ elif isinstance(obj, Session):
+ id_ = "session:"
+ elif isinstance(obj, Engine):
+ id_ = "engine:"
else:
- id_ = "table:" + text_type(obj.key)
- elif isinstance(obj, Column) and isinstance(obj.table, Table):
- id_ = (
- "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
- )
- elif isinstance(obj, Session):
- id_ = "session:"
- elif isinstance(obj, Engine):
- id_ = "engine:"
- else:
- return None
- return id_
-
- pickler.persistent_id = persistent_id
- return pickler
-
-
-our_ids = re.compile(
- r"(mapperprop|mapper|mapper_selectable|table|column|"
- r"session|attribute|engine):(.*)"
-)
-
-
-def Deserializer(file, metadata=None, scoped_session=None, engine=None):
- unpickler = pickle.Unpickler(file)
-
- def get_engine():
- if engine:
- return engine
- elif scoped_session and scoped_session().bind:
- return scoped_session().bind
- elif metadata and metadata.bind:
- return metadata.bind
- else:
- return None
-
- def persistent_load(id_):
- m = our_ids.match(text_type(id_))
- if not m:
- return None
- else:
- type_, args = m.group(1, 2)
- if type_ == "attribute":
- key, clsarg = args.split(":")
- cls = pickle.loads(b64decode(clsarg))
- return getattr(cls, key)
- elif type_ == "mapper":
- cls = pickle.loads(b64decode(args))
- return class_mapper(cls)
- elif type_ == "mapper_selectable":
- cls = pickle.loads(b64decode(args))
- return class_mapper(cls).__clause_element__()
- elif type_ == "mapperprop":
- mapper, keyname = args.split(":")
- cls = pickle.loads(b64decode(mapper))
- return class_mapper(cls).attrs[keyname]
- elif type_ == "table":
- return metadata.tables[args]
- elif type_ == "column":
- table, colname = args.split(":")
- return metadata.tables[table].c[colname]
- elif type_ == "session":
- return scoped_session()
- elif type_ == "engine":
- return get_engine()
+ return None
+ return id_
+
+ pickler.persistent_id = persistent_id
+ return pickler
+
+ our_ids = re.compile(
+ r"(mapperprop|mapper|mapper_selectable|table|column|"
+ r"session|attribute|engine):(.*)"
+ )
+
+ def Deserializer(file, metadata=None, scoped_session=None, engine=None):
+ unpickler = pickle.Unpickler(file)
+
+ def get_engine():
+ if engine:
+ return engine
+ elif scoped_session and scoped_session().bind:
+ return scoped_session().bind
+ elif metadata and metadata.bind:
+ return metadata.bind
else:
- raise Exception("Unknown token: %s" % type_)
+ return None
- unpickler.persistent_load = persistent_load
- return unpickler
+ def persistent_load(id_):
+ m = our_ids.match(text_type(id_))
+ if not m:
+ return None
+ else:
+ type_, args = m.group(1, 2)
+ if type_ == "attribute":
+ key, clsarg = args.split(":")
+ cls = pickle.loads(b64decode(clsarg))
+ return getattr(cls, key)
+ elif type_ == "mapper":
+ cls = pickle.loads(b64decode(args))
+ return class_mapper(cls)
+ elif type_ == "mapper_selectable":
+ cls = pickle.loads(b64decode(args))
+ return class_mapper(cls).__clause_element__()
+ elif type_ == "mapperprop":
+ mapper, keyname = args.split(":")
+ cls = pickle.loads(b64decode(mapper))
+ return class_mapper(cls).attrs[keyname]
+ elif type_ == "table":
+ return metadata.tables[args]
+ elif type_ == "column":
+ table, colname = args.split(":")
+ return metadata.tables[table].c[colname]
+ elif type_ == "session":
+ return scoped_session()
+ elif type_ == "engine":
+ return get_engine()
+ else:
+ raise Exception("Unknown token: %s" % type_)
+
+ unpickler.persistent_load = persistent_load
+ return unpickler
+
+
+else:
+
+ class Serializer(pickle.Pickler):
+ def persistent_id(self, obj):
+ # print "serializing:", repr(obj)
+ if isinstance(obj, Mapper) and not obj.non_primary:
+ id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
+ elif (
+ isinstance(obj, MapperProperty) and not obj.parent.non_primary
+ ):
+ id_ = (
+ "mapperprop:"
+ + b64encode(pickle.dumps(obj.parent.class_))
+ + ":"
+ + obj.key
+ )
+ elif isinstance(obj, Table):
+ if "parententity" in obj._annotations:
+ id_ = "mapper_selectable:" + b64encode(
+ pickle.dumps(obj._annotations["parententity"].class_)
+ )
+ else:
+ id_ = "table:" + text_type(obj.key)
+ elif isinstance(obj, Column) and isinstance(obj.table, Table):
+ id_ = (
+ "column:"
+ + text_type(obj.table.key)
+ + ":"
+ + text_type(obj.key)
+ )
+ elif isinstance(obj, Session):
+ id_ = "session:"
+ elif isinstance(obj, Engine):
+ id_ = "engine:"
+ else:
+ return None
+ return id_
+
+ our_ids = re.compile(
+ r"(mapperprop|mapper|mapper_selectable|table|column|"
+ r"session|attribute|engine):(.*)"
+ )
+
+ class Deserializer(pickle.Unpickler):
+ def __init__(
+ self, file, metadata=None, scoped_session=None, engine=None
+ ):
+ super().__init__(file)
+ self.metadata = metadata
+ self.scoped_session = scoped_session
+ self.engine = engine
+
+ def get_engine(self):
+ if self.engine:
+ return self.engine
+ elif self.scoped_session and self.scoped_session().bind:
+ return self.scoped_session().bind
+ elif self.metadata and self.metadata.bind:
+ return self.metadata.bind
+ else:
+ return None
+
+ def persistent_load(self, id_):
+ m = our_ids.match(text_type(id_))
+ if not m:
+ return None
+ else:
+ type_, args = m.group(1, 2)
+ if type_ == "attribute":
+ key, clsarg = args.split(":")
+ cls = pickle.loads(b64decode(clsarg))
+ return getattr(cls, key)
+ elif type_ == "mapper":
+ cls = pickle.loads(b64decode(args))
+ return class_mapper(cls)
+ elif type_ == "mapper_selectable":
+ cls = pickle.loads(b64decode(args))
+ return class_mapper(cls).__clause_element__()
+ elif type_ == "mapperprop":
+ mapper, keyname = args.split(":")
+ cls = pickle.loads(b64decode(mapper))
+ return class_mapper(cls).attrs[keyname]
+ elif type_ == "table":
+ return self.metadata.tables[args]
+ elif type_ == "column":
+ table, colname = args.split(":")
+ return self.metadata.tables[table].c[colname]
+ elif type_ == "session":
+ return self.scoped_session()
+ elif type_ == "engine":
+ return self.get_engine()
+ else:
+ raise Exception("Unknown token: %s" % type_)
def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL):
[tox]
envlist = py
+[greenletextras]
+extras=
+ asyncio
+ sqlite: aiosqlite
+ sqlite_file: aiosqlite
+ postgresql: postgresql-asyncpg
+ mysql: asyncmy
+ mysql: aiomysql
+
[testenv]
# note that we have a .coveragerc file that points coverage specifically
# at ./lib/sqlalchemy, and *not* at the build that tox might create under .tox.
usedevelop=
cov: True
+extras=
+ py{3,38,39,310,311,312}: {[greenletextras]extras}
+
+ postgresql: postgresql
+ postgresql: postgresql-pg8000
+
+ mysql: mysql
+ mysql: pymysql
+ mysql: mariadb-connector
+
+ oracle: oracle
+ mssql: mssql
+
deps=
pytest>=4.6.11,<5.0; python_version < '3'
pytest>=6.2,<8; python_version >= '3'
pytest-xdist
mock; python_version < '3.3'
- sqlite: .[aiosqlite]
- sqlite_file: .[aiosqlite]
- sqlite_file: .[sqlcipher]; python_version >= '3' and python_version < '3.10'
- postgresql: .[postgresql]
- py3{,7,8,9,10,11}-postgresql: .[postgresql_asyncpg]; python_version >= '3'
- postgresql: .[postgresql_pg8000]; python_version >= '3'
- mysql: .[mysql]
- mysql: .[pymysql]
- mysql: .[asyncmy]; python_version >= '3'
- mysql: .[aiomysql]; python_version >= '3'
- # mysql: .[mariadb_connector]; python_version >= '3'
+ py313: git+https://github.com/vstinner/greenlet@py313\#egg=greenlet
- oracle: .[oracle]
-
- mssql: .[mssql]
-
- py312: greenlet>=3.0.0a1
-
- dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git#egg=aiosqlite
- dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git#egg=sqlcipher3
+ dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git\#egg=aiosqlite
+ dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git\#egg=sqlcipher3
dbapimain-postgresql: git+https://github.com/psycopg/psycopg2.git#egg=psycopg2
dbapimain-postgresql: git+https://github.com/MagicStack/asyncpg.git#egg=asyncpg
dbapimain-oracle: git+https://github.com/oracle/python-cx_Oracle.git#egg=cx_Oracle
- dbapimain-mssql: git+https://github.com/mkleehammer/pyodbc.git#egg=pyodbc
+ py313-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc
+ dbapimain-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc
cov: pytest-cov
WORKERS={env:TOX_WORKERS:-n4 --max-worker-restart=5}
-
-
nocext: DISABLE_SQLALCHEMY_CEXT=1
cext: REQUIRE_SQLALCHEMY_CEXT=1
cov: COVERAGE={[testenv]cov_args}
sqlite: SQLITE={env:TOX_SQLITE:--db sqlite}
sqlite_file: SQLITE={env:TOX_SQLITE_FILE:--db sqlite_file}
- py3{,5,6,7,8,9,10,11}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite}
- py3{,5,6,7,8,9}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite --dbdriver pysqlcipher}
+
+ sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric --dbdriver aiosqlite}
+ py{313,314}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric}
+
+ sqlite-nogreenlet: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric}
+
+ py{37,38,39}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite --dbdriver pysqlcipher}
+
# omit pysqlcipher for Python 3.10
py3{,10,11}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite}
oracle,mssql,sqlite_file: IDENTS=--write-idents db_idents.txt
oracle,mssql,sqlite_file: MEMUSAGE=--nomemory
+
+
# tox as of 2.0 blocks all environment variables from the
# outside, unless they are here (or in TOX_TESTENV_PASSENV,
# wildcards OK). Need at least these
# thanks to https://julien.danjou.info/the-best-flake8-extensions/
[testenv:pep8]
basepython = python3
+
+extras=
+ {[greenletextras]extras}
+
deps=
flake8
#flake8-import-order
# command run in the github action when cext are active.
[testenv:github-cext]
+extras=
+ {[greenletextras]extras}
+
deps = {[testenv]deps}
.[aiosqlite]
commands=
# command run in the github action when cext are not active.
[testenv:github-nocext]
+extras=
+ {[greenletextras]extras}
+
deps = {[testenv]deps}
.[aiosqlite]
commands=