]> git.ipfire.org Git - thirdparty/sqlalchemy/sqlalchemy.git/commitdiff
backport of applicable 3.13 fixes
authorMike Bayer <mike_mp@zzzcomputing.com>
Sun, 26 May 2024 15:34:27 +0000 (11:34 -0400)
committerMike Bayer <mike_mp@zzzcomputing.com>
Mon, 17 Jun 2024 20:36:42 +0000 (16:36 -0400)
Fixes: #11417
Change-Id: Ib2ceccd9583d8776700f0da5b591906efcfe6e6f
(cherry picked from commit 754804635bc922c20d0b0075e0ed2da0add38742)

doc/build/changelog/unreleased_14/11417.rst [new file with mode: 0644]
lib/sqlalchemy/ext/serializer.py
setup.cfg
test/orm/test_mapper.py
tox.ini

diff --git a/doc/build/changelog/unreleased_14/11417.rst b/doc/build/changelog/unreleased_14/11417.rst
new file mode 100644 (file)
index 0000000..5182c03
--- /dev/null
@@ -0,0 +1,11 @@
+.. change::
+    :tags: bug, general
+    :tickets: 11417
+
+    Set up full Python 3.13 support to the extent currently possible, repairing
+    issues within internal language helpers as well as the serializer extension
+    module.
+
+    For version 1.4, this also modernizes the "extras" names in setup.cfg
+    to use dashes and not underscores for two-word names.  Underscore names
+    are still present to accommodate potential compatibility issues.
index 987ec1f544de1aef8f8c31f3f9c408ec7c2f65d8..689acae7e2b86ebc38f603cd8e52d845890f5fd7 100644 (file)
@@ -71,102 +71,202 @@ from ..util import b64decode
 from ..util import b64encode
 from ..util import byte_buffer
 from ..util import pickle
+from ..util import py2k
 from ..util import text_type
 
-
 __all__ = ["Serializer", "Deserializer", "dumps", "loads"]
 
 
-def Serializer(*args, **kw):
-    pickler = pickle.Pickler(*args, **kw)
-
-    def persistent_id(obj):
-        # print "serializing:", repr(obj)
-        if isinstance(obj, Mapper) and not obj.non_primary:
-            id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
-        elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
-            id_ = (
-                "mapperprop:"
-                + b64encode(pickle.dumps(obj.parent.class_))
-                + ":"
-                + obj.key
-            )
-        elif isinstance(obj, Table):
-            if "parententity" in obj._annotations:
-                id_ = "mapper_selectable:" + b64encode(
-                    pickle.dumps(obj._annotations["parententity"].class_)
+if py2k:
+
+    def Serializer(*args, **kw):
+        pickler = pickle.Pickler(*args, **kw)
+
+        def persistent_id(obj):
+            # print "serializing:", repr(obj)
+            if isinstance(obj, Mapper) and not obj.non_primary:
+                id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
+            elif (
+                isinstance(obj, MapperProperty) and not obj.parent.non_primary
+            ):
+                id_ = (
+                    "mapperprop:"
+                    + b64encode(pickle.dumps(obj.parent.class_))
+                    + ":"
+                    + obj.key
                 )
+            elif isinstance(obj, Table):
+                if "parententity" in obj._annotations:
+                    id_ = "mapper_selectable:" + b64encode(
+                        pickle.dumps(obj._annotations["parententity"].class_)
+                    )
+                else:
+                    id_ = "table:" + text_type(obj.key)
+            elif isinstance(obj, Column) and isinstance(obj.table, Table):
+                id_ = (
+                    "column:"
+                    + text_type(obj.table.key)
+                    + ":"
+                    + text_type(obj.key)
+                )
+            elif isinstance(obj, Session):
+                id_ = "session:"
+            elif isinstance(obj, Engine):
+                id_ = "engine:"
             else:
-                id_ = "table:" + text_type(obj.key)
-        elif isinstance(obj, Column) and isinstance(obj.table, Table):
-            id_ = (
-                "column:" + text_type(obj.table.key) + ":" + text_type(obj.key)
-            )
-        elif isinstance(obj, Session):
-            id_ = "session:"
-        elif isinstance(obj, Engine):
-            id_ = "engine:"
-        else:
-            return None
-        return id_
-
-    pickler.persistent_id = persistent_id
-    return pickler
-
-
-our_ids = re.compile(
-    r"(mapperprop|mapper|mapper_selectable|table|column|"
-    r"session|attribute|engine):(.*)"
-)
-
-
-def Deserializer(file, metadata=None, scoped_session=None, engine=None):
-    unpickler = pickle.Unpickler(file)
-
-    def get_engine():
-        if engine:
-            return engine
-        elif scoped_session and scoped_session().bind:
-            return scoped_session().bind
-        elif metadata and metadata.bind:
-            return metadata.bind
-        else:
-            return None
-
-    def persistent_load(id_):
-        m = our_ids.match(text_type(id_))
-        if not m:
-            return None
-        else:
-            type_, args = m.group(1, 2)
-            if type_ == "attribute":
-                key, clsarg = args.split(":")
-                cls = pickle.loads(b64decode(clsarg))
-                return getattr(cls, key)
-            elif type_ == "mapper":
-                cls = pickle.loads(b64decode(args))
-                return class_mapper(cls)
-            elif type_ == "mapper_selectable":
-                cls = pickle.loads(b64decode(args))
-                return class_mapper(cls).__clause_element__()
-            elif type_ == "mapperprop":
-                mapper, keyname = args.split(":")
-                cls = pickle.loads(b64decode(mapper))
-                return class_mapper(cls).attrs[keyname]
-            elif type_ == "table":
-                return metadata.tables[args]
-            elif type_ == "column":
-                table, colname = args.split(":")
-                return metadata.tables[table].c[colname]
-            elif type_ == "session":
-                return scoped_session()
-            elif type_ == "engine":
-                return get_engine()
+                return None
+            return id_
+
+        pickler.persistent_id = persistent_id
+        return pickler
+
+    our_ids = re.compile(
+        r"(mapperprop|mapper|mapper_selectable|table|column|"
+        r"session|attribute|engine):(.*)"
+    )
+
+    def Deserializer(file, metadata=None, scoped_session=None, engine=None):
+        unpickler = pickle.Unpickler(file)
+
+        def get_engine():
+            if engine:
+                return engine
+            elif scoped_session and scoped_session().bind:
+                return scoped_session().bind
+            elif metadata and metadata.bind:
+                return metadata.bind
             else:
-                raise Exception("Unknown token: %s" % type_)
+                return None
 
-    unpickler.persistent_load = persistent_load
-    return unpickler
+        def persistent_load(id_):
+            m = our_ids.match(text_type(id_))
+            if not m:
+                return None
+            else:
+                type_, args = m.group(1, 2)
+                if type_ == "attribute":
+                    key, clsarg = args.split(":")
+                    cls = pickle.loads(b64decode(clsarg))
+                    return getattr(cls, key)
+                elif type_ == "mapper":
+                    cls = pickle.loads(b64decode(args))
+                    return class_mapper(cls)
+                elif type_ == "mapper_selectable":
+                    cls = pickle.loads(b64decode(args))
+                    return class_mapper(cls).__clause_element__()
+                elif type_ == "mapperprop":
+                    mapper, keyname = args.split(":")
+                    cls = pickle.loads(b64decode(mapper))
+                    return class_mapper(cls).attrs[keyname]
+                elif type_ == "table":
+                    return metadata.tables[args]
+                elif type_ == "column":
+                    table, colname = args.split(":")
+                    return metadata.tables[table].c[colname]
+                elif type_ == "session":
+                    return scoped_session()
+                elif type_ == "engine":
+                    return get_engine()
+                else:
+                    raise Exception("Unknown token: %s" % type_)
+
+        unpickler.persistent_load = persistent_load
+        return unpickler
+
+
+else:
+
+    class Serializer(pickle.Pickler):
+        def persistent_id(self, obj):
+            # print "serializing:", repr(obj)
+            if isinstance(obj, Mapper) and not obj.non_primary:
+                id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
+            elif (
+                isinstance(obj, MapperProperty) and not obj.parent.non_primary
+            ):
+                id_ = (
+                    "mapperprop:"
+                    + b64encode(pickle.dumps(obj.parent.class_))
+                    + ":"
+                    + obj.key
+                )
+            elif isinstance(obj, Table):
+                if "parententity" in obj._annotations:
+                    id_ = "mapper_selectable:" + b64encode(
+                        pickle.dumps(obj._annotations["parententity"].class_)
+                    )
+                else:
+                    id_ = "table:" + text_type(obj.key)
+            elif isinstance(obj, Column) and isinstance(obj.table, Table):
+                id_ = (
+                    "column:"
+                    + text_type(obj.table.key)
+                    + ":"
+                    + text_type(obj.key)
+                )
+            elif isinstance(obj, Session):
+                id_ = "session:"
+            elif isinstance(obj, Engine):
+                id_ = "engine:"
+            else:
+                return None
+            return id_
+
+    our_ids = re.compile(
+        r"(mapperprop|mapper|mapper_selectable|table|column|"
+        r"session|attribute|engine):(.*)"
+    )
+
+    class Deserializer(pickle.Unpickler):
+        def __init__(
+            self, file, metadata=None, scoped_session=None, engine=None
+        ):
+            super().__init__(file)
+            self.metadata = metadata
+            self.scoped_session = scoped_session
+            self.engine = engine
+
+        def get_engine(self):
+            if self.engine:
+                return self.engine
+            elif self.scoped_session and self.scoped_session().bind:
+                return self.scoped_session().bind
+            elif self.metadata and self.metadata.bind:
+                return self.metadata.bind
+            else:
+                return None
+
+        def persistent_load(self, id_):
+            m = our_ids.match(text_type(id_))
+            if not m:
+                return None
+            else:
+                type_, args = m.group(1, 2)
+                if type_ == "attribute":
+                    key, clsarg = args.split(":")
+                    cls = pickle.loads(b64decode(clsarg))
+                    return getattr(cls, key)
+                elif type_ == "mapper":
+                    cls = pickle.loads(b64decode(args))
+                    return class_mapper(cls)
+                elif type_ == "mapper_selectable":
+                    cls = pickle.loads(b64decode(args))
+                    return class_mapper(cls).__clause_element__()
+                elif type_ == "mapperprop":
+                    mapper, keyname = args.split(":")
+                    cls = pickle.loads(b64decode(mapper))
+                    return class_mapper(cls).attrs[keyname]
+                elif type_ == "table":
+                    return self.metadata.tables[args]
+                elif type_ == "column":
+                    table, colname = args.split(":")
+                    return self.metadata.tables[table].c[colname]
+                elif type_ == "session":
+                    return self.scoped_session()
+                elif type_ == "engine":
+                    return self.get_engine()
+                else:
+                    raise Exception("Unknown token: %s" % type_)
 
 
 def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL):
index 2347114286020da3b23f0d81391c18bf5e859c67..e4cee11058d4eecc8a9da515203f5de793ddbc9f 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -52,25 +52,28 @@ mypy =
     mypy >= 0.910;python_version>="3"
     sqlalchemy2-stubs
 mssql = pyodbc
-mssql_pymssql = pymssql
-mssql_pyodbc = pyodbc
+
+mssql-pymssql = pymssql
+mssql-pyodbc = pyodbc
+
 mysql =
     mysqlclient>=1.4.0,<2;python_version<"3"
     mysqlclient>=1.4.0;python_version>="3"
-mysql_connector =
+mysql-connector =
     mysql-connector-python
-mariadb_connector =
+mariadb-connector =
     mariadb>=1.0.1,!=1.1.2;python_version>="3"
 oracle =
     cx_oracle>=7,<8;python_version<"3"
     cx_oracle>=7;python_version>="3"
 postgresql = psycopg2>=2.7
-postgresql_pg8000 = pg8000>=1.16.6,!=1.29.0
-postgresql_asyncpg =
+postgresql-pg8000 =
+    pg8000>=1.16.6,!=1.29.0;python_version>="3"
+
+postgresql-asyncpg =
     %(asyncio)s
     asyncpg;python_version>="3"
-postgresql_psycopg2binary = psycopg2-binary
-postgresql_psycopg2cffi = psycopg2cffi
+
 pymysql =
     pymysql;python_version>="3"
     pymysql<1;python_version<"3"
@@ -87,6 +90,31 @@ aiosqlite =
 sqlcipher =
     sqlcipher3_binary;python_version>="3"
 
+# legacy underscore names
+# there appears as if there might be some dual-passes through this file
+# when tox installs extras, sometimes looking for dashed names and sometimes
+# looking first for underscore names. so the dash/underscore names here are
+# kept entirely independent of each other else things dont seem to want
+# to install
+mssql_pymssql = pymssql
+mssql_pyodbc = pyodbc
+
+mysql_connector =
+    mysql-connector-python
+
+mariadb_connector =
+    mariadb>=1.0.1,!=1.1.2;python_version>="3"
+
+postgresql_pg8000 =
+    pg8000>=1.16.6,!=1.29.0;python_version>="3"
+
+postgresql_asyncpg =
+    %(asyncio)s
+    asyncpg;python_version>="3"
+
+postgresql_psycopg2binary = psycopg2-binary
+postgresql_psycopg2cffi = psycopg2cffi
+
 [egg_info]
 tag_build = dev
 
index c8a87cf5b7daef13d13ea03a3493e921a3b2e7b3..1c46f316931482ba60121b18f7cad03be5bc17eb 100644 (file)
@@ -1794,12 +1794,12 @@ class MapperTest(_fixtures.FixtureTest, AssertsCompiledSQL):
         )
 
         # object gracefully handles this condition
-        assert not hasattr(User.x, "__name__")
+        assert not hasattr(User.x, "foobar")
         assert not hasattr(User.x, "comparator")
 
         m.add_property("some_attr", column_property(users.c.name))
 
-        assert not hasattr(User.x, "__name__")
+        assert not hasattr(User.x, "foobar")
         assert hasattr(User.x, "comparator")
 
     def test_synonym_of_non_property_raises(self):
diff --git a/tox.ini b/tox.ini
index 9a198e77d7672edc22e7ec3f5b789fa4cf29e5a3..5372dbb34a76d9e8f83bb9c642bb8bb1a6ec60f9 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -2,6 +2,15 @@
 [tox]
 envlist = py
 
+[greenletextras]
+extras=
+     asyncio
+     sqlite: aiosqlite
+     sqlite_file: aiosqlite
+     postgresql: postgresql-asyncpg
+     mysql: asyncmy
+     mysql: aiomysql
+
 [testenv]
 # note that we have a .coveragerc file that points coverage specifically
 # at ./lib/sqlalchemy, and *not* at the build that tox might create under .tox.
@@ -18,33 +27,30 @@ constrain_package_deps=false
 usedevelop=
      cov: True
 
+extras=
+     py{3,38,39,310,311,312}: {[greenletextras]extras}
+
+     postgresql: postgresql
+     postgresql: postgresql-pg8000
+
+     mysql: mysql
+     mysql: pymysql
+     mysql: mariadb-connector
+
+     oracle: oracle
+     mssql: mssql
+
 deps=
      pytest>=4.6.11,<5.0; python_version < '3'
      pytest>=6.2,<8; python_version >= '3'
      pytest-xdist
      mock; python_version < '3.3'
 
-     sqlite: .[aiosqlite]
-     sqlite_file: .[aiosqlite]
-     sqlite_file: .[sqlcipher]; python_version >= '3' and python_version < '3.10'
-     postgresql: .[postgresql]
-     py3{,7,8,9,10,11}-postgresql: .[postgresql_asyncpg]; python_version >= '3'
-     postgresql: .[postgresql_pg8000]; python_version >= '3'
 
-     mysql: .[mysql]
-     mysql: .[pymysql]
-     mysql: .[asyncmy]; python_version >= '3'
-     mysql: .[aiomysql]; python_version >= '3'
-     # mysql: .[mariadb_connector]; python_version >= '3'
+     py313: git+https://github.com/vstinner/greenlet@py313\#egg=greenlet
 
-     oracle: .[oracle]
-
-     mssql: .[mssql]
-
-     py312: greenlet>=3.0.0a1
-
-     dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git#egg=aiosqlite
-     dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git#egg=sqlcipher3
+     dbapimain-sqlite: git+https://github.com/omnilib/aiosqlite.git\#egg=aiosqlite
+     dbapimain-sqlite: git+https://github.com/coleifer/sqlcipher3.git\#egg=sqlcipher3
 
      dbapimain-postgresql: git+https://github.com/psycopg/psycopg2.git#egg=psycopg2
      dbapimain-postgresql: git+https://github.com/MagicStack/asyncpg.git#egg=asyncpg
@@ -56,7 +62,8 @@ deps=
 
      dbapimain-oracle: git+https://github.com/oracle/python-cx_Oracle.git#egg=cx_Oracle
 
-     dbapimain-mssql: git+https://github.com/mkleehammer/pyodbc.git#egg=pyodbc
+     py313-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc
+     dbapimain-mssql: git+https://github.com/mkleehammer/pyodbc.git\#egg=pyodbc
 
      cov: pytest-cov
 
@@ -91,8 +98,6 @@ setenv=
 
     WORKERS={env:TOX_WORKERS:-n4  --max-worker-restart=5}
 
-
-
     nocext: DISABLE_SQLALCHEMY_CEXT=1
     cext: REQUIRE_SQLALCHEMY_CEXT=1
     cov: COVERAGE={[testenv]cov_args}
@@ -104,8 +109,14 @@ setenv=
 
     sqlite: SQLITE={env:TOX_SQLITE:--db sqlite}
     sqlite_file: SQLITE={env:TOX_SQLITE_FILE:--db sqlite_file}
-    py3{,5,6,7,8,9,10,11}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite}
-    py3{,5,6,7,8,9}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite --dbdriver pysqlcipher}
+
+    sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric --dbdriver aiosqlite}
+    py{313,314}-sqlite: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric}
+
+    sqlite-nogreenlet: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver pysqlite_numeric}
+
+    py{37,38,39}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite --dbdriver pysqlcipher}
+
     # omit pysqlcipher for Python 3.10
     py3{,10,11}-sqlite_file: EXTRA_SQLITE_DRIVERS={env:EXTRA_SQLITE_DRIVERS:--dbdriver sqlite --dbdriver aiosqlite}
 
@@ -134,6 +145,8 @@ setenv=
     oracle,mssql,sqlite_file: IDENTS=--write-idents db_idents.txt
     oracle,mssql,sqlite_file: MEMUSAGE=--nomemory
 
+
+
 # tox as of 2.0 blocks all environment variables from the
 # outside, unless they are here (or in TOX_TESTENV_PASSENV,
 # wildcards OK).  Need at least these
@@ -177,6 +190,10 @@ commands =
 # thanks to https://julien.danjou.info/the-best-flake8-extensions/
 [testenv:pep8]
 basepython = python3
+
+extras=
+     {[greenletextras]extras}
+
 deps=
       flake8
       #flake8-import-order
@@ -196,6 +213,9 @@ commands =
 
 # command run in the github action when cext are active.
 [testenv:github-cext]
+extras=
+     {[greenletextras]extras}
+
 deps = {[testenv]deps}
        .[aiosqlite]
 commands=
@@ -204,6 +224,9 @@ commands=
 
 # command run in the github action when cext are not active.
 [testenv:github-nocext]
+extras=
+     {[greenletextras]extras}
+
 deps = {[testenv]deps}
        .[aiosqlite]
 commands=